Bug 1337111 - Part 2. Add an image decoder flag to request complete frames. r=tnikkel
authorAndrew Osmond <aosmond@mozilla.com>
Mon, 17 Sep 2018 15:06:28 -0400
changeset 436846 e72e07baca921e697a6d79aa8488e2a0aecb1364
parent 436845 81865752a2f77ef0bdc3e79d05df71335a5430b7
child 436847 a826a94ae5dd12bfb01edde50dc9a37e4c1e6e3b
push id34660
push userbtara@mozilla.com
push dateMon, 17 Sep 2018 21:58:52 +0000
treeherdermozilla-central@87a95e1b7ec6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstnikkel
bugs1337111
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1337111 - Part 2. Add an image decoder flag to request complete frames. r=tnikkel DecoderFlags::BLEND_ANIMATION will cause the decoder to inject the BlendAnimationFilter from the previous patch into the SurfacePipe filter chain. All frames produced by this decoder will be complete, and should be equivalent to the result outputted by FrameAnimator.
image/Decoder.cpp
image/Decoder.h
image/DecoderFlags.h
image/SurfacePipeFactory.h
image/decoders/GIF2.h
image/decoders/nsGIFDecoder2.cpp
image/decoders/nsPNGDecoder.cpp
image/imgFrame.cpp
image/imgFrame.h
--- a/image/Decoder.cpp
+++ b/image/Decoder.cpp
@@ -286,17 +286,17 @@ nsresult
 Decoder::AllocateFrame(const gfx::IntSize& aOutputSize,
                        const gfx::IntRect& aFrameRect,
                        gfx::SurfaceFormat aFormat,
                        uint8_t aPaletteDepth,
                        const Maybe<AnimationParams>& aAnimParams)
 {
   mCurrentFrame = AllocateFrameInternal(aOutputSize, aFrameRect, aFormat,
                                         aPaletteDepth, aAnimParams,
-                                        mCurrentFrame.get());
+                                        std::move(mCurrentFrame));
 
   if (mCurrentFrame) {
     mHasFrameToTake = true;
 
     // Gather the raw pointers the decoders will use.
     mCurrentFrame->GetImageData(&mImageData, &mImageDataLength);
     mCurrentFrame->GetPaletteData(&mColormap, &mColormapSize);
 
@@ -316,17 +316,17 @@ Decoder::AllocateFrame(const gfx::IntSiz
 }
 
 RawAccessFrameRef
 Decoder::AllocateFrameInternal(const gfx::IntSize& aOutputSize,
                                const gfx::IntRect& aFrameRect,
                                SurfaceFormat aFormat,
                                uint8_t aPaletteDepth,
                                const Maybe<AnimationParams>& aAnimParams,
-                               imgFrame* aPreviousFrame)
+                               RawAccessFrameRef&& aPreviousFrame)
 {
   if (HasError()) {
     return RawAccessFrameRef();
   }
 
   uint32_t frameNum = aAnimParams ? aAnimParams->mFrameNum : 0;
   if (frameNum != mFrameCount) {
     MOZ_ASSERT_UNREACHABLE("Allocating frames out of order");
@@ -369,17 +369,37 @@ Decoder::AllocateFrameInternal(const gfx
     }
   }
 
   if (frameNum > 0) {
     ref->SetRawAccessOnly();
 
     // Some GIFs are huge but only have a small area that they animate. We only
     // need to refresh that small area when frame 0 comes around again.
-    mFirstFrameRefreshArea.UnionRect(mFirstFrameRefreshArea, frame->GetRect());
+    mFirstFrameRefreshArea.UnionRect(mFirstFrameRefreshArea,
+                                     ref->GetBoundedBlendRect());
+
+    if (ShouldBlendAnimation()) {
+      if (aPreviousFrame->GetDisposalMethod() !=
+          DisposalMethod::RESTORE_PREVIOUS) {
+        // If the new restore frame is the direct previous frame, then we know
+        // the dirty rect is composed only of the current frame's blend rect and
+        // the restore frame's clear rect (if applicable) which are handled in
+        // filters.
+        mRestoreFrame = std::move(aPreviousFrame);
+        mRestoreDirtyRect.SetBox(0, 0, 0, 0);
+      } else {
+        // We only need the previous frame's dirty rect, because while there may
+        // have been several frames between us and mRestoreFrame, the only areas
+        // that changed are the restore frame's clear rect, the current frame
+        // blending rect, and the previous frame's blending rect. All else is
+        // forgotten due to us restoring the same frame again.
+        mRestoreDirtyRect = aPreviousFrame->GetBoundedBlendRect();
+      }
+    }
   }
 
   mFrameCount++;
 
   return ref;
 }
 
 /*
--- a/image/Decoder.h
+++ b/image/Decoder.h
@@ -264,16 +264,25 @@ public:
    * Should we stop decoding after the first frame?
    */
   bool IsFirstFrameDecode() const
   {
     return bool(mDecoderFlags & DecoderFlags::FIRST_FRAME_ONLY);
   }
 
   /**
+   * Should blend the current frame with the previous frames to produce a
+   * complete frame instead of a partial frame for animated images.
+   */
+  bool ShouldBlendAnimation() const
+  {
+    return bool(mDecoderFlags & DecoderFlags::BLEND_ANIMATION);
+  }
+
+  /**
    * @return the number of complete animation frames which have been decoded so
    * far, if it has changed since the last call to TakeCompleteFrameCount();
    * otherwise, returns Nothing().
    */
   Maybe<uint32_t> TakeCompleteFrameCount();
 
   // The number of frames we have, including anything in-progress. Thus, this
   // is only 0 if we haven't begun any frames.
@@ -405,16 +414,42 @@ public:
   RasterImage* GetImageMaybeNull() const { return mImage.get(); }
 
   RawAccessFrameRef GetCurrentFrameRef()
   {
     return mCurrentFrame ? mCurrentFrame->RawAccessRef()
                          : RawAccessFrameRef();
   }
 
+  /**
+   * For use during decoding only. Allows the BlendAnimationFilter to get the
+   * current frame we are producing for its animation parameters.
+   */
+  imgFrame* GetCurrentFrame()
+  {
+    MOZ_ASSERT(ShouldBlendAnimation());
+    return mCurrentFrame.get();
+  }
+
+  /**
+   * For use during decoding only. Allows the BlendAnimationFilter to get the
+   * frame it should be pulling the previous frame data from.
+   */
+  const RawAccessFrameRef& GetRestoreFrameRef() const
+  {
+    MOZ_ASSERT(ShouldBlendAnimation());
+    return mRestoreFrame;
+  }
+
+  const gfx::IntRect& GetRestoreDirtyRect() const
+  {
+    MOZ_ASSERT(ShouldBlendAnimation());
+    return mRestoreDirtyRect;
+  }
+
   bool HasFrameToTake() const { return mHasFrameToTake; }
   void ClearHasFrameToTake() {
     MOZ_ASSERT(mHasFrameToTake);
     mHasFrameToTake = false;
   }
 
 protected:
   friend class AutoRecordDecoderTelemetry;
@@ -539,32 +574,42 @@ private:
     return mInFrame ? mFrameCount - 1 : mFrameCount;
   }
 
   RawAccessFrameRef AllocateFrameInternal(const gfx::IntSize& aOutputSize,
                                           const gfx::IntRect& aFrameRect,
                                           gfx::SurfaceFormat aFormat,
                                           uint8_t aPaletteDepth,
                                           const Maybe<AnimationParams>& aAnimParams,
-                                          imgFrame* aPreviousFrame);
+                                          RawAccessFrameRef&& aPreviousFrame);
 
 protected:
   Maybe<Downscaler> mDownscaler;
 
   uint8_t* mImageData;  // Pointer to image data in either Cairo or 8bit format
   uint32_t mImageDataLength;
   uint32_t* mColormap;  // Current colormap to be used in Cairo format
   uint32_t mColormapSize;
 
 private:
   RefPtr<RasterImage> mImage;
   Maybe<SourceBufferIterator> mIterator;
+
+  // The current frame the decoder is producing.
   RawAccessFrameRef mCurrentFrame;
+
+  // The complete frame to combine with the current partial frame to produce
+  // a complete current frame.
+  RawAccessFrameRef mRestoreFrame;
+
   ImageMetadata mImageMetadata;
-  gfx::IntRect mInvalidRect; // Tracks an invalidation region in the current frame.
+
+  gfx::IntRect mInvalidRect; // Tracks new rows as the current frame is decoded.
+  gfx::IntRect mRestoreDirtyRect; // Tracks an invalidation region between the
+                                  // restore frame and the previous frame.
   Maybe<gfx::IntSize> mOutputSize;  // The size of our output surface.
   Maybe<gfx::IntSize> mExpectedSize; // The expected size of the image.
   Progress mProgress;
 
   uint32_t mFrameCount; // Number of frames, including anything in-progress
   FrameTimeout mLoopLength;  // Length of a single loop of this image.
   gfx::IntRect mFirstFrameRefreshArea;  // The area of the image that needs to
                                         // be invalidated when the animation loops.
--- a/image/DecoderFlags.h
+++ b/image/DecoderFlags.h
@@ -26,17 +26,25 @@ enum class DecoderFlags : uint8_t
   ASYNC_NOTIFY                   = 1 << 3,
 
   /**
    * By default, a surface is considered substitutable. That means callers are
    * willing to accept a less than ideal match to display. If a caller requires
    * a specific size and won't accept alternatives, then this flag should be
    * set.
    */
-  CANNOT_SUBSTITUTE              = 1 << 4
+  CANNOT_SUBSTITUTE              = 1 << 4,
+
+  /**
+   * By default, an animation decoder will produce partial frames that need to
+   * be combined with the previously displayed/composited frame by FrameAnimator
+   * to produce a complete frame. If this flag is set, the decoder will perform
+   * this blending at decode time, and the frames produced are complete.
+   */
+  BLEND_ANIMATION                = 1 << 5
 };
 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(DecoderFlags)
 
 /**
  * @return the default set of decode flags.
  */
 inline DecoderFlags
 DefaultDecoderFlags()
--- a/image/SurfacePipeFactory.h
+++ b/image/SurfacePipeFactory.h
@@ -49,21 +49,25 @@ enum class SurfacePipeFlags
   DEINTERLACE         = 1 << 0,  // If set, deinterlace the image.
 
   ADAM7_INTERPOLATE   = 1 << 1,  // If set, the caller is deinterlacing the
                                  // image using ADAM7, and we may want to
                                  // interpolate it for better intermediate results.
 
   FLIP_VERTICALLY     = 1 << 2,  // If set, flip the image vertically.
 
-  PROGRESSIVE_DISPLAY = 1 << 3   // If set, we expect the image to be displayed
+  PROGRESSIVE_DISPLAY = 1 << 3,  // If set, we expect the image to be displayed
                                  // progressively. This enables features that
                                  // result in a better user experience for
                                  // progressive display but which may be more
                                  // computationally expensive.
+
+  BLEND_ANIMATION     = 1 << 4   // If set, produce the next full frame of an
+                                 // animation instead of a partial frame to be
+                                 // blended later.
 };
 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(SurfacePipeFlags)
 
 class SurfacePipeFactory
 {
 public:
   /**
    * Creates and initializes a normal (i.e., non-paletted) SurfacePipe.
@@ -96,45 +100,50 @@ public:
                     SurfacePipeFlags aFlags)
   {
     const bool deinterlace = bool(aFlags & SurfacePipeFlags::DEINTERLACE);
     const bool flipVertically = bool(aFlags & SurfacePipeFlags::FLIP_VERTICALLY);
     const bool progressiveDisplay = bool(aFlags & SurfacePipeFlags::PROGRESSIVE_DISPLAY);
     const bool downscale = aInputSize != aOutputSize;
     const bool removeFrameRect =
       !aFrameRect.IsEqualEdges(nsIntRect(0, 0, aInputSize.width, aInputSize.height));
+    const bool blendAnimation = bool(aFlags & SurfacePipeFlags::BLEND_ANIMATION);
 
     // Don't interpolate if we're sure we won't show this surface to the user
     // until it's completely decoded. The final pass of an ADAM7 image doesn't
     // need interpolation, so we only need to interpolate if we'll be displaying
     // the image while it's still being decoded.
     const bool adam7Interpolate = bool(aFlags & SurfacePipeFlags::ADAM7_INTERPOLATE) &&
                                   progressiveDisplay;
 
     if (deinterlace && adam7Interpolate) {
       MOZ_ASSERT_UNREACHABLE("ADAM7 deinterlacing is handled by libpng");
       return Nothing();
     }
 
+    MOZ_ASSERT_IF(blendAnimation, aAnimParams);
+
     // Construct configurations for the SurfaceFilters. Note that the order of
     // these filters is significant. We want to deinterlace or interpolate raw
     // input rows, before any other transformations, and we want to remove the
     // frame rect (which may involve adding blank rows or columns to the image)
     // before any downscaling, so that the new rows and columns are taken into
     // account.
     DeinterlacingConfig<uint32_t> deinterlacingConfig { progressiveDisplay };
     ADAM7InterpolatingConfig interpolatingConfig;
     RemoveFrameRectConfig removeFrameRectConfig { aFrameRect };
+    BlendAnimationConfig blendAnimationConfig { aDecoder };
     DownscalingConfig downscalingConfig { aInputSize, aFormat };
     SurfaceConfig surfaceConfig { aDecoder, aOutputSize, aFormat,
                                   flipVertically, aAnimParams };
 
     Maybe<SurfacePipe> pipe;
 
     if (downscale) {
+      MOZ_ASSERT(!blendAnimation);
       if (removeFrameRect) {
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, removeFrameRectConfig,
                           downscalingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, removeFrameRectConfig,
                           downscalingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
@@ -145,25 +154,33 @@ public:
           pipe = MakePipe(deinterlacingConfig, downscalingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, downscalingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(downscalingConfig, surfaceConfig);
         }
       }
     } else {  // (downscale is false)
-      if (removeFrameRect) {
+      if (blendAnimation) {
+        if (deinterlace) {
+          pipe = MakePipe(deinterlacingConfig, blendAnimationConfig, surfaceConfig);
+        } else if (adam7Interpolate) {
+          pipe = MakePipe(interpolatingConfig, blendAnimationConfig, surfaceConfig);
+        } else {  // (deinterlace and adam7Interpolate are false)
+          pipe = MakePipe(blendAnimationConfig, surfaceConfig);
+        }
+      } else if (removeFrameRect) {
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, removeFrameRectConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, removeFrameRectConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(removeFrameRectConfig, surfaceConfig);
         }
-      } else {  // (removeFrameRect is false)
+      } else {  // (blendAnimation and removeFrameRect is false)
         if (deinterlace) {
           pipe = MakePipe(deinterlacingConfig, surfaceConfig);
         } else if (adam7Interpolate) {
           pipe = MakePipe(interpolatingConfig, surfaceConfig);
         } else {  // (deinterlace and adam7Interpolate are false)
           pipe = MakePipe(surfaceConfig);
         }
       }
--- a/image/decoders/GIF2.h
+++ b/image/decoders/GIF2.h
@@ -32,16 +32,18 @@ typedef struct gif_struct {
 
     // Output state machine
     int64_t pixels_remaining;  // Pixels remaining to be output.
 
     // Parameters for image frame currently being decoded
     int tpixel;                 // Index of transparent pixel
     int32_t disposal_method;    // Restore to background, leave in place, etc.
     uint32_t* local_colormap;   // Per-image colormap
+    uint32_t local_colormap_buffer_size; // Size of the buffer containing the
+                                         // local colormap.
     int local_colormap_size;    // Size of local colormap array.
     uint32_t delay_time;        // Display time, in milliseconds,
                                 // for this image in a multi-image GIF
 
     // Global (multi-image) state
     int version;                // Either 89 for GIF89 or 87 for GIF87
     int32_t screen_width;       // Logical screen width & height
     int32_t screen_height;
--- a/image/decoders/nsGIFDecoder2.cpp
+++ b/image/decoders/nsGIFDecoder2.cpp
@@ -174,40 +174,50 @@ nsGIFDecoder2::CheckForTransparency(cons
 nsresult
 nsGIFDecoder2::BeginImageFrame(const IntRect& aFrameRect,
                                uint16_t aDepth,
                                bool aIsInterlaced)
 {
   MOZ_ASSERT(HasSize());
 
   bool hasTransparency = CheckForTransparency(aFrameRect);
+  bool blendAnimation = ShouldBlendAnimation();
 
   // Make sure there's no animation if we're downscaling.
   MOZ_ASSERT_IF(Size() != OutputSize(), !GetImageMetadata().HasAnimation());
 
   AnimationParams animParams {
     aFrameRect,
     FrameTimeout::FromRawMilliseconds(mGIFStruct.delay_time),
     uint32_t(mGIFStruct.images_decoded),
     BlendMethod::OVER,
     DisposalMethod(mGIFStruct.disposal_method)
   };
 
   SurfacePipeFlags pipeFlags = aIsInterlaced
                              ? SurfacePipeFlags::DEINTERLACE
                              : SurfacePipeFlags();
 
-  Maybe<SurfacePipe> pipe;
+  gfx::SurfaceFormat format;
   if (mGIFStruct.images_decoded == 0) {
-    gfx::SurfaceFormat format = hasTransparency ? SurfaceFormat::B8G8R8A8
-                                                : SurfaceFormat::B8G8R8X8;
-
     // The first frame may be displayed progressively.
     pipeFlags |= SurfacePipeFlags::PROGRESSIVE_DISPLAY;
 
+    format = hasTransparency ? SurfaceFormat::B8G8R8A8
+                             : SurfaceFormat::B8G8R8X8;
+  } else {
+    format = SurfaceFormat::B8G8R8A8;
+  }
+
+  if (blendAnimation) {
+    pipeFlags |= SurfacePipeFlags::BLEND_ANIMATION;
+  }
+
+  Maybe<SurfacePipe> pipe;
+  if (mGIFStruct.images_decoded == 0 || blendAnimation) {
     // The first frame is always decoded into an RGB surface.
     pipe =
       SurfacePipeFactory::CreateSurfacePipe(this, Size(), OutputSize(),
                                             aFrameRect, format,
                                             Some(animParams), pipeFlags);
   } else {
     // This is an animation frame (and not the first). To minimize the memory
     // usage of animations, the image data is stored in paletted form.
@@ -215,18 +225,18 @@ nsGIFDecoder2::BeginImageFrame(const Int
     // We should never use paletted surfaces with a draw target directly, so
     // the only practical difference between B8G8R8A8 and B8G8R8X8 is the
     // cleared pixel value if we get truncated. We want 0 in that case to
     // ensure it is an acceptable value for the color map as was the case
     // historically.
     MOZ_ASSERT(Size() == OutputSize());
     pipe =
       SurfacePipeFactory::CreatePalettedSurfacePipe(this, Size(), aFrameRect,
-                                                    SurfaceFormat::B8G8R8A8,
-                                                    aDepth, Some(animParams),
+                                                    format, aDepth,
+                                                    Some(animParams),
                                                     pipeFlags);
   }
 
   mCurrentFrameIndex = mGIFStruct.images_decoded;
 
   if (!pipe) {
     mPipe = SurfacePipe();
     return NS_ERROR_FAILURE;
@@ -903,28 +913,40 @@ nsGIFDecoder2::FinishImageDescriptor(con
   mGIFStruct.pixels_remaining =
     int64_t(frameRect.Width()) * int64_t(frameRect.Height());
 
   if (haveLocalColorTable) {
     // We have a local color table, so prepare to read it into the palette of
     // the current frame.
     mGIFStruct.local_colormap_size = 1 << depth;
 
-    if (mGIFStruct.images_decoded == 0) {
-      // The first frame has a local color table. Allocate space for it as we
-      // use a BGRA or BGRX surface for the first frame; such surfaces don't
-      // have their own palettes internally.
+    if (!mColormap) {
+      // Allocate a buffer to store the local color tables. This could be if the
+      // first frame has a local color table, or for subsequent frames when
+      // blending the animation during decoding.
+      MOZ_ASSERT(mGIFStruct.images_decoded == 0 || ShouldBlendAnimation());
+
+      // Ensure our current colormap buffer is large enough to hold the new one.
       mColormapSize = sizeof(uint32_t) << realDepth;
-      if (!mGIFStruct.local_colormap) {
+      if (mGIFStruct.local_colormap_buffer_size < mColormapSize) {
+        if (mGIFStruct.local_colormap) {
+          free(mGIFStruct.local_colormap);
+        }
+        mGIFStruct.local_colormap_buffer_size = mColormapSize;
         mGIFStruct.local_colormap =
           static_cast<uint32_t*>(moz_xmalloc(mColormapSize));
+      } else {
+        mColormapSize = mGIFStruct.local_colormap_buffer_size;
       }
+
       mColormap = mGIFStruct.local_colormap;
     }
 
+    MOZ_ASSERT(mColormap);
+
     const size_t size = 3 << depth;
     if (mColormapSize > size) {
       // Clear the part of the colormap which will be unused with this palette.
       // If a GIF references an invalid palette entry, ensure the entry is opaque white.
       // This is needed for Skia as if it isn't, RGBX surfaces will cause blending issues
       // with Skia.
       memset(reinterpret_cast<uint8_t*>(mColormap) + size, 0xFF,
              mColormapSize - size);
@@ -936,17 +958,17 @@ nsGIFDecoder2::FinishImageDescriptor(con
     // large and it'd be preferable to avoid unnecessary copies.
     return Transition::ToUnbuffered(State::FINISHED_LOCAL_COLOR_TABLE,
                                     State::LOCAL_COLOR_TABLE,
                                     size);
   }
 
   // There's no local color table; copy the global color table into the palette
   // of the current frame.
-  if (mGIFStruct.images_decoded > 0) {
+  if (mColormap) {
     memcpy(mColormap, mGIFStruct.global_colormap, mColormapSize);
   } else {
     mColormap = mGIFStruct.global_colormap;
   }
 
   return Transition::To(State::IMAGE_DATA_BLOCK, BLOCK_HEADER_LEN);
 }
 
@@ -1046,17 +1068,17 @@ nsGIFDecoder2::ReadLZWData(const char* a
 {
   const uint8_t* data = reinterpret_cast<const uint8_t*>(aData);
   size_t length = aLength;
 
   while (mGIFStruct.pixels_remaining > 0 &&
          (length > 0 || mGIFStruct.bits >= mGIFStruct.codesize)) {
     size_t bytesRead = 0;
 
-    auto result = mGIFStruct.images_decoded == 0
+    auto result = mGIFStruct.images_decoded == 0 || ShouldBlendAnimation()
       ? mPipe.WritePixelBlocks<uint32_t>([&](uint32_t* aPixelBlock, int32_t aBlockSize) {
           return YieldPixels<uint32_t>(data, length, &bytesRead, aPixelBlock, aBlockSize);
         })
       : mPipe.WritePixelBlocks<uint8_t>([&](uint8_t* aPixelBlock, int32_t aBlockSize) {
           return YieldPixels<uint8_t>(data, length, &bytesRead, aPixelBlock, aBlockSize);
         });
 
     if (MOZ_UNLIKELY(bytesRead > length)) {
--- a/image/decoders/nsPNGDecoder.cpp
+++ b/image/decoders/nsPNGDecoder.cpp
@@ -228,16 +228,20 @@ nsPNGDecoder::CreateFrame(const FrameInf
                              ? SurfacePipeFlags::ADAM7_INTERPOLATE
                              : SurfacePipeFlags();
 
   if (mNumFrames == 0) {
     // The first frame may be displayed progressively.
     pipeFlags |= SurfacePipeFlags::PROGRESSIVE_DISPLAY;
   }
 
+  if (ShouldBlendAnimation()) {
+    pipeFlags |= SurfacePipeFlags::BLEND_ANIMATION;
+  }
+
   Maybe<SurfacePipe> pipe =
     SurfacePipeFactory::CreateSurfacePipe(this, Size(), OutputSize(),
                                           aFrameInfo.mFrameRect, mFormat,
                                           animParams, pipeFlags);
 
   if (!pipe) {
     mPipe = SurfacePipe();
     return NS_ERROR_FAILURE;
--- a/image/imgFrame.cpp
+++ b/image/imgFrame.cpp
@@ -243,16 +243,21 @@ imgFrame::InitForDecoder(const nsIntSize
     NS_WARNING("Should have legal image size");
     mAborted = true;
     return NS_ERROR_FAILURE;
   }
 
   mImageSize = aImageSize;
   mFrameRect = aRect;
 
+  // May be updated shortly after InitForDecoder by BlendAnimationFilter
+  // because it needs to take into consideration the previous frames to
+  // properly calculate. We start with the whole frame as dirty.
+  mDirtyRect = aRect;
+
   if (aAnimParams) {
     mBlendRect = aAnimParams->mBlendRect;
     mTimeout = aAnimParams->mTimeout;
     mBlendMethod = aAnimParams->mBlendMethod;
     mDisposalMethod = aAnimParams->mDisposalMethod;
   } else {
     mBlendRect = aRect;
   }
--- a/image/imgFrame.h
+++ b/image/imgFrame.h
@@ -183,16 +183,19 @@ public:
   void GetImageData(uint8_t** aData, uint32_t* length) const;
   uint8_t* GetImageData() const;
 
   bool GetIsPaletted() const;
   void GetPaletteData(uint32_t** aPalette, uint32_t* length) const;
   uint32_t* GetPaletteData() const;
   uint8_t GetPaletteDepth() const { return mPaletteDepth; }
 
+  const IntRect& GetDirtyRect() const { return mDirtyRect; }
+  void SetDirtyRect(const IntRect& aDirtyRect) { mDirtyRect = aDirtyRect; }
+
   bool GetCompositingFailed() const;
   void SetCompositingFailed(bool val);
 
   void SetOptimizable();
 
   void FinalizeSurface();
   already_AddRefed<SourceSurface> GetSourceSurface();
 
@@ -290,20 +293,42 @@ private: // data
   bool mFinished;
   bool mOptimizable;
 
 
   //////////////////////////////////////////////////////////////////////////////
   // Effectively const data, only mutated in the Init methods.
   //////////////////////////////////////////////////////////////////////////////
 
+  //! The size of the buffer we are decoding to.
   IntSize      mImageSize;
+
+  //! XXX(aosmond): This means something different depending on the context. We
+  //!               should correct this.
+  //!
+  //! There are several different contexts for mFrameRect:
+  //! - If for non-animated image, it will be originate at (0, 0) and matches
+  //!   the dimensions of mImageSize.
+  //! - If for an APNG, it also matches the above.
+  //! - If for a GIF which is producing full frames, it matches the above.
+  //! - If for a GIF which is producing partial frames, it matches mBlendRect.
   IntRect      mFrameRect;
+
+  //! The contents for the frame, as represented in the encoded image. This may
+  //! differ from mImageSize because it may be a partial frame. For the first
+  //! frame, this means we need to shift the data in place, and for animated
+  //! frames, it likely need to combine with a previous frame to get the full
+  //! contents.
   IntRect      mBlendRect;
 
+  //! This is the region that has changed between this frame and the previous
+  //! frame of an animation. For the first frame, this will be the same as
+  //! mFrameRect.
+  IntRect      mDirtyRect;
+
   //! The timeout for this frame.
   FrameTimeout mTimeout;
 
   DisposalMethod mDisposalMethod;
   BlendMethod    mBlendMethod;
   SurfaceFormat  mFormat;
 
   // The palette and image data for images that are paletted, since Cairo