--- a/browser/components/shell/nsWindowsShellService.cpp
+++ b/browser/components/shell/nsWindowsShellService.cpp
@@ -3,17 +3,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "nsWindowsShellService.h"
#include "imgIContainer.h"
#include "imgIRequest.h"
#include "mozilla/gfx/2D.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsIDOMElement.h"
#include "nsIDOMHTMLImageElement.h"
#include "nsIImageLoadingContent.h"
#include "nsIPrefService.h"
#include "nsIPrefLocalizedString.h"
#include "nsIServiceManager.h"
#include "nsIStringBundle.h"
#include "nsNetUtil.h"
@@ -1070,29 +1070,29 @@ nsWindowsShellService::SetShouldCheckDef
return prefs->SetBoolPref(PREF_CHECKDEFAULTBROWSER, aShouldCheck);
}
static nsresult
WriteBitmap(nsIFile* aFile, imgIContainer* aImage)
{
nsresult rv;
- nsRefPtr<SourceSurface> surface =
+ RefPtr<SourceSurface> surface =
aImage->GetFrame(imgIContainer::FRAME_FIRST,
imgIContainer::FLAG_SYNC_DECODE);
NS_ENSURE_TRUE(surface, NS_ERROR_FAILURE);
// For either of the following formats we want to set the biBitCount member
// of the BITMAPINFOHEADER struct to 32, below. For that value the bitmap
// format defines that the A8/X8 WORDs in the bitmap byte stream be ignored
// for the BI_RGB value we use for the biCompression member.
MOZ_ASSERT(surface->GetFormat() == SurfaceFormat::B8G8R8A8 ||
surface->GetFormat() == SurfaceFormat::B8G8R8X8);
- nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
+ RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
NS_ENSURE_TRUE(dataSurface, NS_ERROR_FAILURE);
int32_t width = dataSurface->GetSize().width;
int32_t height = dataSurface->GetSize().height;
int32_t bytesPerPixel = 4 * sizeof(uint8_t);
uint32_t bytesPerRow = bytesPerPixel * width;
// initialize these bitmap structs which we will later
--- a/devtools/shared/heapsnapshot/HeapSnapshot.h
+++ b/devtools/shared/heapsnapshot/HeapSnapshot.h
@@ -8,17 +8,17 @@
#include "js/HashTable.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/devtools/DeserializedNode.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/HashFunctions.h"
#include "mozilla/Maybe.h"
#include "mozilla/RefCounted.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/UniquePtr.h"
#include "CoreDump.pb.h"
#include "nsCOMPtr.h"
#include "nsCRTGlue.h"
#include "nsCycleCollectionParticipant.h"
#include "nsISupports.h"
--- a/devtools/shared/security/LocalCertService.cpp
+++ b/devtools/shared/security/LocalCertService.cpp
@@ -1,16 +1,16 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "LocalCertService.h"
#include "mozilla/ModuleUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "cert.h"
#include "CryptoTask.h"
#include "nsIPK11Token.h"
#include "nsIPK11TokenDB.h"
#include "nsIX509Cert.h"
#include "nsIX509CertDB.h"
#include "nsIX509CertValidity.h"
#include "nsLiteralString.h"
@@ -414,17 +414,17 @@ LocalCertService::GetOrCreateCert(const
// Before sending off the task, login to key slot if needed
nsresult rv = LoginToKeySlot();
if (NS_FAILED(rv)) {
aCallback->HandleCert(nullptr, rv);
return NS_OK;
}
- nsRefPtr<LocalCertGetTask> task(new LocalCertGetTask(aNickname, aCallback));
+ RefPtr<LocalCertGetTask> task(new LocalCertGetTask(aNickname, aCallback));
return task->Dispatch("LocalCertGet");
}
NS_IMETHODIMP
LocalCertService::RemoveCert(const nsACString& aNickname,
nsILocalCertCallback* aCallback)
{
if (NS_WARN_IF(aNickname.IsEmpty())) {
@@ -436,17 +436,17 @@ LocalCertService::RemoveCert(const nsACS
// Before sending off the task, login to key slot if needed
nsresult rv = LoginToKeySlot();
if (NS_FAILED(rv)) {
aCallback->HandleResult(rv);
return NS_OK;
}
- nsRefPtr<LocalCertRemoveTask> task(
+ RefPtr<LocalCertRemoveTask> task(
new LocalCertRemoveTask(aNickname, aCallback));
return task->Dispatch("LocalCertRm");
}
NS_IMETHODIMP
LocalCertService::GetLoginPromptRequired(bool* aRequired)
{
nsresult rv;
--- a/dom/base/ImageEncoder.cpp
+++ b/dom/base/ImageEncoder.cpp
@@ -3,17 +3,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "ImageEncoder.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/gfx/DataSurfaceHelpers.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/SyncRunnable.h"
#include "mozilla/unused.h"
#include "gfxUtils.h"
#include "nsNetUtil.h"
using namespace mozilla::gfx;
namespace mozilla {
@@ -47,18 +47,18 @@ public:
nsCOMPtr<nsIThread> mainThread = do_GetMainThread();
MOZ_ASSERT(mainThread);
SyncRunnable::DispatchToThread(mainThread, this, false);
return mDataSourceSurface.forget();
}
private:
- nsRefPtr<layers::Image> mImage;
- nsRefPtr<gfx::DataSourceSurface> mDataSourceSurface;
+ RefPtr<layers::Image> mImage;
+ RefPtr<gfx::DataSourceSurface> mDataSourceSurface;
};
// This function returns a DataSourceSurface in B8G8R8A8 format.
// It uses SourceSurface to do format convert. Because most SourceSurface in
// image formats should be referenced or dereferenced on main thread, it uses a
// sync class SurfaceHelper to retrieve SourceSurface and convert to B8G8R8A8 on
// main thread.
already_AddRefed<DataSourceSurface>
@@ -388,18 +388,18 @@ ImageEncoder::ExtractDataInternal(const
rv = aEncoder->InitFromData(data.Elements(),
aSize.width * aSize.height * 4,
aSize.width,
aSize.height,
aSize.width * 4,
imgIEncoder::INPUT_FORMAT_HOSTARGB,
aOptions);
} else {
- nsRefPtr<gfx::DataSourceSurface> dataSurface;
- nsRefPtr<layers::Image> image(aImage);
+ RefPtr<gfx::DataSourceSurface> dataSurface;
+ RefPtr<layers::Image> image(aImage);
dataSurface = GetBRGADataSourceSurfaceSync(image.forget());
DataSourceSurface::MappedSurface map;
if (!dataSurface->Map(gfx::DataSourceSurface::MapType::READ, &map)) {
return NS_ERROR_INVALID_ARG;
}
rv = aEncoder->InitFromData(map.mData,
aSize.width * aSize.height * 4,
@@ -414,17 +414,17 @@ ImageEncoder::ExtractDataInternal(const
if (NS_SUCCEEDED(rv)) {
imgStream = do_QueryInterface(aEncoder);
}
} else {
// no context, so we have to encode an empty image
// note that if we didn't have a current context, the spec says we're
// supposed to just return transparent black pixels of the canvas
// dimensions.
- nsRefPtr<DataSourceSurface> emptyCanvas =
+ RefPtr<DataSourceSurface> emptyCanvas =
Factory::CreateDataSourceSurfaceWithStride(IntSize(aSize.width, aSize.height),
SurfaceFormat::B8G8R8A8,
4 * aSize.width, true);
if (NS_WARN_IF(!emptyCanvas)) {
return NS_ERROR_INVALID_ARG;
}
DataSourceSurface::MappedSurface map;
--- a/dom/base/nsContentUtils.cpp
+++ b/dom/base/nsContentUtils.cpp
@@ -7487,21 +7487,21 @@ nsContentUtils::TransferableToIPCTransfe
NS_ConsumeStream(stream, UINT32_MAX, imageData);
item->data() = imageData;
continue;
}
// Images to be placed on the clipboard are imgIContainers.
nsCOMPtr<imgIContainer> image(do_QueryInterface(data));
if (image) {
- nsRefPtr<mozilla::gfx::SourceSurface> surface =
+ RefPtr<mozilla::gfx::SourceSurface> surface =
image->GetFrame(imgIContainer::FRAME_CURRENT,
imgIContainer::FLAG_SYNC_DECODE);
if (surface) {
- nsRefPtr<mozilla::gfx::DataSourceSurface> dataSurface =
+ mozilla::RefPtr<mozilla::gfx::DataSourceSurface> dataSurface =
surface->GetDataSurface();
size_t length;
int32_t stride;
mozilla::UniquePtr<char[]> surfaceData =
nsContentUtils::GetSurfaceData(dataSurface, &length, &stride);
IPCDataTransferItem* item = aIPCDataTransfer->items().AppendElement();
item->flavor() = nsCString(flavorStr);
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -1420,18 +1420,18 @@ nsDOMWindowUtils::CompareCanvases(nsIDOM
uint32_t* aMaxDifference,
uint32_t* retVal)
{
if (aCanvas1 == nullptr ||
aCanvas2 == nullptr ||
retVal == nullptr)
return NS_ERROR_FAILURE;
- nsRefPtr<DataSourceSurface> img1 = CanvasToDataSourceSurface(aCanvas1);
- nsRefPtr<DataSourceSurface> img2 = CanvasToDataSourceSurface(aCanvas2);
+ RefPtr<DataSourceSurface> img1 = CanvasToDataSourceSurface(aCanvas1);
+ RefPtr<DataSourceSurface> img2 = CanvasToDataSourceSurface(aCanvas2);
DataSourceSurface::ScopedMap map1(img1, DataSourceSurface::READ);
DataSourceSurface::ScopedMap map2(img2, DataSourceSurface::READ);
if (img1 == nullptr || img2 == nullptr ||
!map1.IsMapped() || !map2.IsMapped() ||
img1->GetSize() != img2->GetSize() ||
map1.GetStride() != map2.GetStride()) {
@@ -2263,32 +2263,32 @@ nsDOMWindowUtils::AdvanceTimeAndRefresh(
}
}
nsPresContext* presContext = GetPresContext();
if (presContext) {
nsRefreshDriver* driver = presContext->RefreshDriver();
driver->AdvanceTimeAndRefresh(aMilliseconds);
- nsRefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
+ RefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
if (transaction && transaction->IPCOpen()) {
transaction->SendSetTestSampleTime(driver->MostRecentRefresh());
}
}
return NS_OK;
}
NS_IMETHODIMP
nsDOMWindowUtils::RestoreNormalRefresh()
{
// Kick the compositor out of test mode before the refresh driver, so that
// the refresh driver doesn't send an update that gets ignored by the
// compositor.
- nsRefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
+ RefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
if (transaction && transaction->IPCOpen()) {
transaction->SendLeaveTestMode();
}
if (nsPresContext* pc = GetPresContext()) {
nsRefreshDriver* driver = pc->RefreshDriver();
driver->RestoreNormalRefresh();
}
--- a/dom/bluetooth/bluedroid/BluetoothMapSmsManager.cpp
+++ b/dom/bluetooth/bluedroid/BluetoothMapSmsManager.cpp
@@ -8,17 +8,17 @@
#include "BluetoothMapSmsManager.h"
#include "BluetoothService.h"
#include "BluetoothSocket.h"
#include "BluetoothUtils.h"
#include "BluetoothUuid.h"
#include "ObexBase.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"
#include "nsIObserver.h"
#include "nsIObserverService.h"
USING_BLUETOOTH_NAMESPACE
using namespace mozilla;
--- a/dom/bluetooth/bluedroid/BluetoothOppManager.cpp
+++ b/dom/bluetooth/bluedroid/BluetoothOppManager.cpp
@@ -11,17 +11,17 @@
#include "BluetoothSocket.h"
#include "BluetoothUtils.h"
#include "BluetoothUuid.h"
#include "ObexBase.h"
#include "mozilla/dom/bluetooth/BluetoothTypes.h"
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/dom/File.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"
#include "nsCExternalHandlerService.h"
#include "nsIObserver.h"
#include "nsIObserverService.h"
#include "nsIFile.h"
#include "nsIInputStream.h"
--- a/dom/bluetooth/bluedroid/BluetoothPbapManager.cpp
+++ b/dom/bluetooth/bluedroid/BluetoothPbapManager.cpp
@@ -11,17 +11,17 @@
#include "BluetoothSocket.h"
#include "BluetoothUtils.h"
#include "BluetoothUuid.h"
#include "ObexBase.h"
#include "mozilla/dom/BluetoothPbapParametersBinding.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/ipc/BlobParent.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"
#include "nsIInputStream.h"
#include "nsIObserver.h"
#include "nsIObserverService.h"
USING_BLUETOOTH_NAMESPACE
--- a/dom/bluetooth/bluedroid/BluetoothSocket.cpp
+++ b/dom/bluetooth/bluedroid/BluetoothSocket.cpp
@@ -7,17 +7,17 @@
#include "BluetoothSocket.h"
#include <fcntl.h>
#include <sys/socket.h>
#include "BluetoothSocketObserver.h"
#include "BluetoothInterface.h"
#include "BluetoothUtils.h"
#include "mozilla/ipc/UnixSocketWatcher.h"
#include "mozilla/FileUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsISupportsImpl.h" // for MOZ_COUNT_CTOR, MOZ_COUNT_DTOR
#include "nsXULAppAPI.h"
using namespace mozilla::ipc;
USING_BLUETOOTH_NAMESPACE
static const size_t MAX_READ_SIZE = 1 << 16;
static BluetoothSocketInterface* sBluetoothSocketInterface;
--- a/dom/bluetooth/bluez/BluetoothOppManager.cpp
+++ b/dom/bluetooth/bluez/BluetoothOppManager.cpp
@@ -11,17 +11,17 @@
#include "BluetoothSocket.h"
#include "BluetoothUtils.h"
#include "BluetoothUuid.h"
#include "ObexBase.h"
#include "mozilla/dom/bluetooth/BluetoothTypes.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/ipc/BlobParent.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"
#include "nsCExternalHandlerService.h"
#include "nsIObserver.h"
#include "nsIObserverService.h"
#include "nsIFile.h"
#include "nsIInputStream.h"
--- a/dom/bluetooth/bluez/BluetoothSocket.cpp
+++ b/dom/bluetooth/bluez/BluetoothSocket.cpp
@@ -3,17 +3,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "BluetoothSocket.h"
#include <fcntl.h>
#include "BluetoothSocketObserver.h"
#include "BluetoothUnixSocketConnector.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsISupportsImpl.h" // for MOZ_COUNT_CTOR, MOZ_COUNT_DTOR
#include "nsXULAppAPI.h"
using namespace mozilla::ipc;
BEGIN_BLUETOOTH_NAMESPACE
static const size_t MAX_READ_SIZE = 1 << 16;
--- a/dom/camera/GonkCameraHwMgr.cpp
+++ b/dom/camera/GonkCameraHwMgr.cpp
@@ -22,17 +22,17 @@
#include <sys/system_properties.h>
#include "GonkNativeWindow.h"
#endif
#include "base/basictypes.h"
#include "nsDebug.h"
#include "mozilla/layers/TextureClient.h"
#include "CameraPreferences.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21
#include "GonkBufferQueueProducer.h"
#endif
#include "GonkCameraControl.h"
#include "CameraCommon.h"
using namespace mozilla;
using namespace mozilla::layers;
@@ -66,17 +66,17 @@ GonkCameraHardware::OnRateLimitPreview(b
#ifdef MOZ_WIDGET_GONK
void
GonkCameraHardware::OnNewFrame()
{
if (mClosing) {
return;
}
- nsRefPtr<TextureClient> buffer = mNativeWindow->getCurrentBuffer();
+ RefPtr<TextureClient> buffer = mNativeWindow->getCurrentBuffer();
if (!buffer) {
DOM_CAMERA_LOGE("received null frame");
return;
}
OnNewPreviewFrame(mTarget, buffer);
}
// Android data callback
--- a/dom/camera/GonkRecorder.h
+++ b/dom/camera/GonkRecorder.h
@@ -24,17 +24,17 @@
#include <camera/CameraParameters.h>
#include <utils/String8.h>
#include <system/audio.h>
#if ANDROID_VERSION >= 21
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#endif
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "GonkCameraHwMgr.h"
namespace android {
class GonkCameraSource;
struct MOZ_EXPORT MediaSource;
struct MediaWriter;
class MOZ_EXPORT MetaData;
--- a/dom/canvas/CanvasGradient.h
+++ b/dom/canvas/CanvasGradient.h
@@ -2,17 +2,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_CanvasGradient_h
#define mozilla_dom_CanvasGradient_h
#include "mozilla/Attributes.h"
#include "nsTArray.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/dom/CanvasRenderingContext2DBinding.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
#include "mozilla/gfx/2D.h"
#include "nsWrapperCache.h"
#include "gfxGradientCache.h"
namespace mozilla {
namespace dom {
@@ -65,17 +65,17 @@ protected:
CanvasGradient(CanvasRenderingContext2D* aContext, Type aType)
: mContext(aContext)
, mType(aType)
{
}
nsRefPtr<CanvasRenderingContext2D> mContext;
nsTArray<mozilla::gfx::GradientStop> mRawStops;
- nsRefPtr<mozilla::gfx::GradientStops> mStops;
+ mozilla::RefPtr<mozilla::gfx::GradientStops> mStops;
Type mType;
virtual ~CanvasGradient() {}
};
} // namespace dom
} // namespace mozilla
#endif // mozilla_dom_CanvasGradient_h
--- a/dom/canvas/CanvasImageCache.cpp
+++ b/dom/canvas/CanvasImageCache.cpp
@@ -47,17 +47,17 @@ struct ImageCacheEntryData {
size_t SizeInBytes() { return mSize.width * mSize.height * 4; }
// Key
nsRefPtr<Element> mImage;
nsIImageLoadingContent* mILC;
nsRefPtr<HTMLCanvasElement> mCanvas;
// Value
nsCOMPtr<imgIRequest> mRequest;
- nsRefPtr<SourceSurface> mSourceSurface;
+ RefPtr<SourceSurface> mSourceSurface;
IntSize mSize;
nsExpirationState mState;
};
class ImageCacheEntry : public PLDHashEntryHdr {
public:
typedef ImageCacheKey KeyType;
typedef const ImageCacheKey* KeyTypePointer;
@@ -105,17 +105,17 @@ public:
static KeyTypePointer KeyToPointer(KeyType key) { return &key; }
static PLDHashNumber HashKey(KeyTypePointer key)
{
return NS_PTR_TO_UINT32(key) >> 2;
}
enum { ALLOW_MEMMOVE = true };
nsCOMPtr<imgIRequest> mRequest;
- nsRefPtr<SourceSurface> mSourceSurface;
+ RefPtr<SourceSurface> mSourceSurface;
};
static bool sPrefsInitialized = false;
static int32_t sCanvasImageCacheLimit = 0;
class ImageCacheObserver;
class ImageCache final : public nsExpirationTracker<ImageCacheEntryData,4> {
--- a/dom/canvas/CanvasPath.h
+++ b/dom/canvas/CanvasPath.h
@@ -1,17 +1,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef CanvasPath_h
#define CanvasPath_h
#include "mozilla/Attributes.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsWrapperCache.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/ErrorResult.h"
namespace mozilla {
namespace dom {
@@ -70,18 +70,18 @@ public:
const Optional<NonNull<SVGMatrix>>& aMatrix);
private:
virtual ~CanvasPath() {}
nsCOMPtr<nsISupports> mParent;
static gfx::Float ToFloat(double aValue) { return gfx::Float(aValue); }
- mutable nsRefPtr<gfx::Path> mPath;
- mutable nsRefPtr<gfx::PathBuilder> mPathBuilder;
+ mutable RefPtr<gfx::Path> mPath;
+ mutable RefPtr<gfx::PathBuilder> mPathBuilder;
void EnsurePathBuilder() const;
};
} // namespace dom
} // namespace mozilla
#endif /* CanvasPath_h */
--- a/dom/canvas/CanvasPattern.h
+++ b/dom/canvas/CanvasPattern.h
@@ -3,17 +3,17 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_CanvasPattern_h
#define mozilla_dom_CanvasPattern_h
#include "mozilla/Attributes.h"
#include "mozilla/dom/CanvasRenderingContext2DBinding.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsISupports.h"
#include "nsWrapperCache.h"
class nsIPrincipal;
namespace mozilla {
namespace gfx {
class SourceSurface;
@@ -61,17 +61,17 @@ public:
{
return mContext;
}
// WebIDL
void SetTransform(SVGMatrix& matrix);
nsRefPtr<CanvasRenderingContext2D> mContext;
- nsRefPtr<gfx::SourceSurface> mSurface;
+ RefPtr<gfx::SourceSurface> mSurface;
nsCOMPtr<nsIPrincipal> mPrincipal;
mozilla::gfx::Matrix mTransform;
const bool mForceWriteOnly;
const bool mCORSUsed;
const RepeatMode mRepeat;
};
} // namespace dom
--- a/dom/canvas/CanvasRenderingContext2D.cpp
+++ b/dom/canvas/CanvasRenderingContext2D.cpp
@@ -345,17 +345,17 @@ public:
// Return a SourceSurface that contains the FillPaint or StrokePaint source.
already_AddRefed<SourceSurface>
DoSourcePaint(gfx::IntRect& aRect, CanvasRenderingContext2D::Style aStyle)
{
if (aRect.IsEmpty()) {
return nullptr;
}
- nsRefPtr<DrawTarget> dt =
+ RefPtr<DrawTarget> dt =
mFinalTarget->CreateSimilarDrawTarget(aRect.Size(), SurfaceFormat::B8G8R8A8);
if (!dt) {
aRect.SetEmpty();
return nullptr;
}
Matrix transform =
mFinalTarget->GetTransform().PostTranslate(-aRect.TopLeft() + mOffset);
@@ -371,21 +371,21 @@ public:
}
~AdjustedTargetForFilter()
{
if (!mCtx) {
return;
}
- nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
-
- nsRefPtr<SourceSurface> fillPaint =
+ RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
+
+ RefPtr<SourceSurface> fillPaint =
DoSourcePaint(mFillPaintRect, CanvasRenderingContext2D::Style::FILL);
- nsRefPtr<SourceSurface> strokePaint =
+ RefPtr<SourceSurface> strokePaint =
DoSourcePaint(mStrokePaintRect, CanvasRenderingContext2D::Style::STROKE);
AutoRestoreTransform autoRestoreTransform(mFinalTarget);
mFinalTarget->SetTransform(Matrix());
gfx::FilterSupport::RenderFilterDescription(
mFinalTarget, mCtx->CurrentState().filter,
gfx::Rect(mPostFilterBounds),
@@ -398,18 +398,18 @@ public:
}
DrawTarget* DT()
{
return mTarget;
}
private:
- nsRefPtr<DrawTarget> mTarget;
- nsRefPtr<DrawTarget> mFinalTarget;
+ RefPtr<DrawTarget> mTarget;
+ RefPtr<DrawTarget> mFinalTarget;
CanvasRenderingContext2D *mCtx;
gfx::IntRect mSourceGraphicRect;
gfx::IntRect mFillPaintRect;
gfx::IntRect mStrokePaintRect;
gfx::IntRect mPostFilterBounds;
gfx::IntPoint mOffset;
gfx::CompositionOp mCompositionOp;
};
@@ -466,17 +466,17 @@ public:
}
~AdjustedTargetForShadow()
{
if (!mCtx) {
return;
}
- nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
+ RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
mFinalTarget->DrawSurfaceWithShadow(snapshot, mTempRect.TopLeft(),
Color::FromABGR(mCtx->CurrentState().shadowColor),
mCtx->CurrentState().shadowOffset, mSigma,
mCompositionOp);
}
DrawTarget* DT()
@@ -485,18 +485,18 @@ public:
}
gfx::IntPoint OffsetToFinalDT()
{
return mTempRect.TopLeft();
}
private:
- nsRefPtr<DrawTarget> mTarget;
- nsRefPtr<DrawTarget> mFinalTarget;
+ RefPtr<DrawTarget> mTarget;
+ RefPtr<DrawTarget> mFinalTarget;
CanvasRenderingContext2D *mCtx;
Float mSigma;
gfx::IntRect mTempRect;
gfx::CompositionOp mCompositionOp;
};
/* This is an RAII based class that can be used as a drawtarget for
* operations that need a shadow or a filter drawn. It will automatically
@@ -640,17 +640,17 @@ private:
}
nsIntRegion extents =
gfx::FilterSupport::ComputePostFilterExtents(ctx->CurrentState().filter,
intBounds);
return gfx::Rect(extents.GetBounds());
}
- nsRefPtr<DrawTarget> mTarget;
+ RefPtr<DrawTarget> mTarget;
UniquePtr<AdjustedTargetForShadow> mShadowTarget;
UniquePtr<AdjustedTargetForFilter> mFilterTarget;
};
void
CanvasPattern::SetTransform(SVGMatrix& aMatrix)
{
mTransform = ToMatrix(aMatrix.GetMatrix());
@@ -1200,17 +1200,17 @@ bool CanvasRenderingContext2D::SwitchRen
gfxPlatform::GetPlatform()->GetSkiaGLGlue()->GetGLContext()->MakeCurrent();
gfxPlatform::GetPlatform()->GetSkiaGLGlue()->GetGLContext()->fDeleteTextures(1, &mVideoTexture);
}
mCurrentVideoSize.width = 0;
mCurrentVideoSize.height = 0;
}
#endif
- nsRefPtr<SourceSurface> snapshot;
+ RefPtr<SourceSurface> snapshot;
Matrix transform;
if (mTarget) {
snapshot = mTarget->Snapshot();
transform = mTarget->GetTransform();
} else {
MOZ_ASSERT(mBufferProvider);
// When mBufferProvider is true but we have no mTarget, our current state's
@@ -1639,22 +1639,22 @@ CanvasRenderingContext2D::SetContextOpti
void
CanvasRenderingContext2D::GetImageBuffer(uint8_t** aImageBuffer,
int32_t* aFormat)
{
*aImageBuffer = nullptr;
*aFormat = 0;
EnsureTarget();
- nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
+ RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
if (!snapshot) {
return;
}
- nsRefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
+ RefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
if (!data || data->GetSize() != IntSize(mWidth, mHeight)) {
return;
}
*aImageBuffer = SurfaceToPackedBGRA(data);
*aFormat = imgIEncoder::INPUT_FORMAT_HOSTARGB;
}
@@ -2052,17 +2052,17 @@ CanvasRenderingContext2D::CreatePattern(
error.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
// Special case for Canvas, which could be an Azure canvas!
nsICanvasRenderingContextInternal *srcCanvas = canvas->GetContextAtIndex(0);
if (srcCanvas) {
// This might not be an Azure canvas!
- nsRefPtr<SourceSurface> srcSurf = srcCanvas->GetSurfaceSnapshot();
+ RefPtr<SourceSurface> srcSurf = srcCanvas->GetSurfaceSnapshot();
nsRefPtr<CanvasPattern> pat =
new CanvasPattern(this, srcSurf, repeatMode, htmlElement->NodePrincipal(), canvas->IsWriteOnly(), false);
return pat.forget();
}
} else if (source.IsHTMLImageElement()) {
HTMLImageElement* img = &source.GetAsHTMLImageElement();
@@ -2073,17 +2073,17 @@ CanvasRenderingContext2D::CreatePattern(
htmlElement = img;
} else if (source.IsHTMLVideoElement()) {
htmlElement = &source.GetAsHTMLVideoElement();
} else {
// Special case for ImageBitmap
ImageBitmap& imgBitmap = source.GetAsImageBitmap();
EnsureTarget();
- nsRefPtr<SourceSurface> srcSurf = imgBitmap.PrepareForDrawTarget(mTarget);
+ RefPtr<SourceSurface> srcSurf = imgBitmap.PrepareForDrawTarget(mTarget);
// An ImageBitmap never taints others so we set principalForSecurityCheck to
// nullptr and set CORSUsed to true for passing the security check in
// CanvasUtils::DoDrawImageSecurityCheck().
nsRefPtr<CanvasPattern> pat =
new CanvasPattern(this, srcSurf, repeatMode, nullptr, false, true);
return pat.forget();
@@ -2695,17 +2695,17 @@ CanvasRenderingContext2D::Fill(const Can
Redraw();
}
void CanvasRenderingContext2D::Fill(const CanvasPath& path, const CanvasWindingRule& winding)
{
EnsureTarget();
- nsRefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
+ RefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
if (!gfxpath) {
return;
}
gfx::Rect bounds;
if (NeedToCalculateBounds()) {
@@ -2748,17 +2748,17 @@ CanvasRenderingContext2D::Stroke()
Redraw();
}
void
CanvasRenderingContext2D::Stroke(const CanvasPath& path)
{
EnsureTarget();
- nsRefPtr<gfx::Path> gfxpath = path.GetPath(CanvasWindingRule::Nonzero, mTarget);
+ RefPtr<gfx::Path> gfxpath = path.GetPath(CanvasWindingRule::Nonzero, mTarget);
if (!gfxpath) {
return;
}
const ContextState &state = CurrentState();
StrokeOptions strokeOptions(state.lineWidth, state.lineJoin,
@@ -2867,17 +2867,17 @@ CanvasRenderingContext2D::Clip(const Can
CurrentState().clipsPushed.push_back(mPath);
}
void
CanvasRenderingContext2D::Clip(const CanvasPath& path, const CanvasWindingRule& winding)
{
EnsureTarget();
- nsRefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
+ RefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
if (!gfxpath) {
return;
}
mTarget->PushClip(gfxpath);
CurrentState().clipsPushed.push_back(gfxpath);
}
@@ -3053,17 +3053,17 @@ CanvasRenderingContext2D::EnsureUserSpac
mPathTransformWillUpdate) {
mDSPathBuilder =
mPath->TransformedCopyToBuilder(mPathToDS, fillRule);
mPath = nullptr;
mPathTransformWillUpdate = false;
}
if (mDSPathBuilder) {
- nsRefPtr<Path> dsPath;
+ RefPtr<Path> dsPath;
dsPath = mDSPathBuilder->Finish();
mDSPathBuilder = nullptr;
Matrix inverse = mTarget->GetTransform();
if (!inverse.Invert()) {
NS_WARNING("Could not invert transform");
return;
}
@@ -3309,17 +3309,17 @@ CanvasRenderingContext2D::MeasureText(co
}
return new TextMetrics(width);
}
void
CanvasRenderingContext2D::AddHitRegion(const HitRegionOptions& options, ErrorResult& error)
{
- nsRefPtr<gfx::Path> path;
+ RefPtr<gfx::Path> path;
if (options.mPath) {
EnsureTarget();
path = options.mPath->GetPath(CanvasWindingRule::Nonzero, mTarget);
}
if (!path) {
// check if the path is valid
EnsureUserSpacePath(CanvasWindingRule::Nonzero);
@@ -3356,17 +3356,17 @@ CanvasRenderingContext2D::AddHitRegion(c
nsINode::DeleteProperty<bool>);
#endif
}
// finally, add the region to the list
RegionInfo info;
info.mId = options.mId;
info.mElement = options.mControl;
- nsRefPtr<PathBuilder> pathBuilder = path->TransformedCopyToBuilder(mTarget->GetTransform());
+ RefPtr<PathBuilder> pathBuilder = path->TransformedCopyToBuilder(mTarget->GetTransform());
info.mPath = pathBuilder->Finish();
mHitRegionsOptions.InsertElementAt(0, info);
}
void
CanvasRenderingContext2D::RemoveHitRegion(const nsAString& id)
{
@@ -3525,17 +3525,17 @@ struct MOZ_STACK_CLASS CanvasBidiProcess
if (c + 1 < numRuns) {
endRun = runs[c + 1].mCharacterOffset;
} else {
endRun = mTextRun->GetLength();
}
const gfxTextRun::CompressedGlyph *glyphs = mTextRun->GetCharacterGlyphs();
- nsRefPtr<ScaledFont> scaledFont =
+ RefPtr<ScaledFont> scaledFont =
gfxPlatform::GetPlatform()->GetScaledFontForFont(mCtx->mTarget, font);
if (!scaledFont) {
// This can occur when something switched DirectWrite off.
return;
}
AutoRestoreTransform sidewaysRestore;
@@ -3560,17 +3560,17 @@ struct MOZ_STACK_CLASS CanvasBidiProcess
// offset the (alphabetic) baseline of the
// horizontally-shaped text from the (centered)
// default baseline used for vertical
}
mCtx->mTarget->SetTransform(mat);
}
- nsRefPtr<GlyphRenderingOptions> renderingOptions = font->GetGlyphRenderingOptions();
+ RefPtr<GlyphRenderingOptions> renderingOptions = font->GetGlyphRenderingOptions();
GlyphBuffer buffer;
std::vector<Glyph> glyphBuf;
// TODO:
// This more-or-less duplicates the code found in gfxTextRun::Draw
// and the gfxFont methods that uses (Draw, DrawGlyphs, DrawOneGlyph);
@@ -3673,17 +3673,17 @@ struct MOZ_STACK_CLASS CanvasBidiProcess
AdjustedTarget target(mCtx, &bounds);
CanvasGeneralPattern cgp;
const Pattern& patForStyle
(cgp.ForStyle(mCtx, CanvasRenderingContext2D::Style::STROKE, mCtx->mTarget));
const DrawOptions drawOpts(state.globalAlpha, mCtx->UsedOperation());
for (unsigned i = glyphBuf.size(); i > 0; --i) {
- nsRefPtr<Path> path = scaledFont->GetPathForGlyphs(buffer, mCtx->mTarget);
+ RefPtr<Path> path = scaledFont->GetPathForGlyphs(buffer, mCtx->mTarget);
target->Stroke(path, patForStyle, strokeOpts, drawOpts);
buffer.mGlyphs++;
}
}
}
}
// current text run
@@ -4184,17 +4184,17 @@ CanvasRenderingContext2D::IsPointInPath(
bool CanvasRenderingContext2D::IsPointInPath(const CanvasPath& mPath, double x, double y, const CanvasWindingRule& mWinding)
{
if (!FloatValidate(x,y)) {
return false;
}
EnsureTarget();
- nsRefPtr<gfx::Path> tempPath = mPath.GetPath(mWinding, mTarget);
+ RefPtr<gfx::Path> tempPath = mPath.GetPath(mWinding, mTarget);
return tempPath->ContainsPoint(Point(x, y), mTarget->GetTransform());
}
bool
CanvasRenderingContext2D::IsPointInStroke(double x, double y)
{
if (!FloatValidate(x,y)) {
@@ -4224,17 +4224,17 @@ CanvasRenderingContext2D::IsPointInStrok
bool CanvasRenderingContext2D::IsPointInStroke(const CanvasPath& mPath, double x, double y)
{
if (!FloatValidate(x,y)) {
return false;
}
EnsureTarget();
- nsRefPtr<gfx::Path> tempPath = mPath.GetPath(CanvasWindingRule::Nonzero, mTarget);
+ RefPtr<gfx::Path> tempPath = mPath.GetPath(CanvasWindingRule::Nonzero, mTarget);
const ContextState &state = CurrentState();
StrokeOptions strokeOptions(state.lineWidth,
state.lineJoin,
state.lineCap,
state.miterLimit,
state.dash.Length(),
@@ -4249,25 +4249,25 @@ bool CanvasRenderingContext2D::IsPointIn
// relative to the returned surface.
static already_AddRefed<SourceSurface>
ExtractSubrect(SourceSurface* aSurface, gfx::Rect* aSourceRect, DrawTarget* aTargetDT)
{
gfx::Rect roundedOutSourceRect = *aSourceRect;
roundedOutSourceRect.RoundOut();
gfx::IntRect roundedOutSourceRectInt;
if (!roundedOutSourceRect.ToIntRect(&roundedOutSourceRectInt)) {
- nsRefPtr<SourceSurface> surface(aSurface);
+ RefPtr<SourceSurface> surface(aSurface);
return surface.forget();
}
- nsRefPtr<DrawTarget> subrectDT =
+ RefPtr<DrawTarget> subrectDT =
aTargetDT->CreateSimilarDrawTarget(roundedOutSourceRectInt.Size(), SurfaceFormat::B8G8R8A8);
if (!subrectDT) {
- nsRefPtr<SourceSurface> surface(aSurface);
+ RefPtr<SourceSurface> surface(aSurface);
return surface.forget();
}
*aSourceRect -= roundedOutSourceRect.TopLeft();
subrectDT->CopySurface(aSurface, roundedOutSourceRectInt, IntPoint());
return subrectDT->Snapshot();
}
@@ -4371,17 +4371,17 @@ CanvasRenderingContext2D::DrawImage(cons
MOZ_ASSERT(optional_argc == 0 || optional_argc == 2 || optional_argc == 6);
if (optional_argc == 6) {
NormalizeRect(sx, sy, sw, sh);
NormalizeRect(dx, dy, dw, dh);
}
- nsRefPtr<SourceSurface> srcSurf;
+ RefPtr<SourceSurface> srcSurf;
gfx::IntSize imgSize;
Element* element = nullptr;
EnsureTarget();
if (image.IsHTMLCanvasElement()) {
HTMLCanvasElement* canvas = &image.GetAsHTMLCanvasElement();
element = canvas;
@@ -4853,17 +4853,17 @@ CanvasRenderingContext2D::DrawWindow(nsG
Matrix matrix = mTarget->GetTransform();
double sw = matrix._11 * w;
double sh = matrix._22 * h;
if (!sw || !sh) {
return;
}
nsRefPtr<gfxContext> thebes;
- nsRefPtr<DrawTarget> drawDT;
+ RefPtr<DrawTarget> drawDT;
// Rendering directly is faster and can be done if mTarget supports Azure
// and does not need alpha blending.
if (gfxPlatform::GetPlatform()->SupportsAzureContentForDrawTarget(mTarget) &&
GlobalAlpha() == 1.0f)
{
thebes = new gfxContext(mTarget);
thebes->SetMatrix(gfxMatrix(matrix._11, matrix._12, matrix._21,
matrix._22, matrix._31, matrix._32));
@@ -4878,25 +4878,25 @@ CanvasRenderingContext2D::DrawWindow(nsG
thebes = new gfxContext(drawDT);
thebes->SetMatrix(gfxMatrix::Scaling(matrix._11, matrix._22));
}
nsCOMPtr<nsIPresShell> shell = presContext->PresShell();
unused << shell->RenderDocument(r, renderDocFlags, backgroundColor, thebes);
if (drawDT) {
- nsRefPtr<SourceSurface> snapshot = drawDT->Snapshot();
- nsRefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
+ RefPtr<SourceSurface> snapshot = drawDT->Snapshot();
+ RefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
DataSourceSurface::MappedSurface rawData;
if (NS_WARN_IF(!data->Map(DataSourceSurface::READ, &rawData))) {
error.Throw(NS_ERROR_FAILURE);
return;
}
- nsRefPtr<SourceSurface> source =
+ RefPtr<SourceSurface> source =
mTarget->CreateSourceSurfaceFromData(rawData.mData,
data->GetSize(),
rawData.mStride,
data->GetFormat());
data->Unmap();
if (!source) {
error.Throw(NS_ERROR_FAILURE);
@@ -5033,17 +5033,17 @@ CanvasRenderingContext2D::DrawWidgetAsOn
return;
}
nsIWidget* widget = presContext->GetRootWidget();
if (!widget) {
error.Throw(NS_ERROR_FAILURE);
return;
}
- nsRefPtr<SourceSurface> snapshot = widget->SnapshotWidgetOnScreen();
+ RefPtr<SourceSurface> snapshot = widget->SnapshotWidgetOnScreen();
if (!snapshot) {
error.Throw(NS_ERROR_FAILURE);
return;
}
gfx::Rect sourceRect(gfx::Point(0, 0), gfx::Size(snapshot->GetSize()));
mTarget->DrawSurface(snapshot, sourceRect, sourceRect,
DrawSurfaceOptions(gfx::Filter::POINT),
@@ -5175,20 +5175,20 @@ CanvasRenderingContext2D::GetImageDataAr
if (mZero) {
*aRetval = darray;
return NS_OK;
}
IntRect srcRect(0, 0, mWidth, mHeight);
IntRect destRect(aX, aY, aWidth, aHeight);
IntRect srcReadRect = srcRect.Intersect(destRect);
- nsRefPtr<DataSourceSurface> readback;
+ RefPtr<DataSourceSurface> readback;
DataSourceSurface::MappedSurface rawData;
if (!srcReadRect.IsEmpty()) {
- nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
+ RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
if (snapshot) {
readback = snapshot->GetDataSurface();
}
if (!readback || !readback->Map(DataSourceSurface::READ, &rawData)) {
return NS_ERROR_OUT_OF_MEMORY;
}
}
@@ -5270,17 +5270,17 @@ CanvasRenderingContext2D::GetImageDataAr
void
CanvasRenderingContext2D::EnsureErrorTarget()
{
if (sErrorTarget) {
return;
}
- nsRefPtr<DrawTarget> errorTarget = gfxPlatform::GetPlatform()->CreateOffscreenCanvasDrawTarget(IntSize(1, 1), SurfaceFormat::B8G8R8A8);
+ RefPtr<DrawTarget> errorTarget = gfxPlatform::GetPlatform()->CreateOffscreenCanvasDrawTarget(IntSize(1, 1), SurfaceFormat::B8G8R8A8);
MOZ_ASSERT(errorTarget, "Failed to allocate the error target!");
sErrorTarget = errorTarget;
NS_ADDREF(sErrorTarget);
}
void
CanvasRenderingContext2D::FillRuleChanged()
@@ -5440,17 +5440,17 @@ CanvasRenderingContext2D::PutImageData_e
srcLine += w * 4;
}
EnsureTarget();
if (!IsTargetValid()) {
return NS_ERROR_FAILURE;
}
- nsRefPtr<SourceSurface> sourceSurface =
+ RefPtr<SourceSurface> sourceSurface =
mTarget->CreateSourceSurfaceFromData(imgsurf->Data(), IntSize(copyWidth, copyHeight), imgsurf->Stride(), SurfaceFormat::B8G8R8A8);
// In certain scenarios, requesting larger than 8k image fails. Bug 803568
// covers the details of how to run into it, but the full detailed
// investigation hasn't been done to determine the underlying cause. We
// will just handle the failure to allocate the surface to avoid a crash.
if (!sourceSurface) {
return NS_ERROR_FAILURE;
@@ -5717,27 +5717,27 @@ CanvasPath::Constructor(const GlobalObje
{
nsRefPtr<CanvasPath> path = new CanvasPath(aGlobal.GetAsSupports());
return path.forget();
}
already_AddRefed<CanvasPath>
CanvasPath::Constructor(const GlobalObject& aGlobal, CanvasPath& aCanvasPath, ErrorResult& aRv)
{
- nsRefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
+ RefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
gfxPlatform::GetPlatform()->ScreenReferenceDrawTarget());
nsRefPtr<CanvasPath> path = new CanvasPath(aGlobal.GetAsSupports(), tempPath->CopyToBuilder());
return path.forget();
}
already_AddRefed<CanvasPath>
CanvasPath::Constructor(const GlobalObject& aGlobal, const nsAString& aPathString, ErrorResult& aRv)
{
- nsRefPtr<gfx::Path> tempPath = SVGContentUtils::GetPath(aPathString);
+ RefPtr<gfx::Path> tempPath = SVGContentUtils::GetPath(aPathString);
if (!tempPath) {
return Constructor(aGlobal, aRv);
}
nsRefPtr<CanvasPath> path = new CanvasPath(aGlobal.GetAsSupports(), tempPath->CopyToBuilder());
return path.forget();
}
@@ -5891,25 +5891,25 @@ CanvasPath::BezierTo(const gfx::Point& a
EnsurePathBuilder();
mPathBuilder->BezierTo(aCP1, aCP2, aCP3);
}
void
CanvasPath::AddPath(CanvasPath& aCanvasPath, const Optional<NonNull<SVGMatrix>>& aMatrix)
{
- nsRefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
+ RefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
gfxPlatform::GetPlatform()->ScreenReferenceDrawTarget());
if (aMatrix.WasPassed()) {
const SVGMatrix& m = aMatrix.Value();
Matrix transform(m.A(), m.B(), m.C(), m.D(), m.E(), m.F());
if (!transform.IsIdentity()) {
- nsRefPtr<PathBuilder> tempBuilder = tempPath->TransformedCopyToBuilder(transform, FillRule::FILL_WINDING);
+ RefPtr<PathBuilder> tempBuilder = tempPath->TransformedCopyToBuilder(transform, FillRule::FILL_WINDING);
tempPath = tempBuilder->Finish();
}
}
EnsurePathBuilder(); // in case a path is added to itself
tempPath->StreamToSink(mPathBuilder);
}
@@ -5919,43 +5919,43 @@ CanvasPath::GetPath(const CanvasWindingR
FillRule fillRule = FillRule::FILL_WINDING;
if (winding == CanvasWindingRule::Evenodd) {
fillRule = FillRule::FILL_EVEN_ODD;
}
if (mPath &&
(mPath->GetBackendType() == aTarget->GetBackendType()) &&
(mPath->GetFillRule() == fillRule)) {
- nsRefPtr<gfx::Path> path(mPath);
+ RefPtr<gfx::Path> path(mPath);
return path.forget();
}
if (!mPath) {
// if there is no path, there must be a pathbuilder
MOZ_ASSERT(mPathBuilder);
mPath = mPathBuilder->Finish();
if (!mPath) {
- nsRefPtr<gfx::Path> path(mPath);
+ RefPtr<gfx::Path> path(mPath);
return path.forget();
}
mPathBuilder = nullptr;
}
// retarget our backend if we're used with a different backend
if (mPath->GetBackendType() != aTarget->GetBackendType()) {
- nsRefPtr<PathBuilder> tmpPathBuilder = aTarget->CreatePathBuilder(fillRule);
+ RefPtr<PathBuilder> tmpPathBuilder = aTarget->CreatePathBuilder(fillRule);
mPath->StreamToSink(tmpPathBuilder);
mPath = tmpPathBuilder->Finish();
} else if (mPath->GetFillRule() != fillRule) {
- nsRefPtr<PathBuilder> tmpPathBuilder = mPath->CopyToBuilder(fillRule);
+ RefPtr<PathBuilder> tmpPathBuilder = mPath->CopyToBuilder(fillRule);
mPath = tmpPathBuilder->Finish();
}
- nsRefPtr<gfx::Path> path(mPath);
+ RefPtr<gfx::Path> path(mPath);
return path.forget();
}
void
CanvasPath::EnsurePathBuilder() const
{
if (mPathBuilder) {
return;
--- a/dom/canvas/CanvasRenderingContext2D.h
+++ b/dom/canvas/CanvasRenderingContext2D.h
@@ -4,17 +4,17 @@
#ifndef CanvasRenderingContext2D_h
#define CanvasRenderingContext2D_h
#include "mozilla/Attributes.h"
#include <vector>
#include "nsIDOMCanvasRenderingContext2D.h"
#include "nsICanvasRenderingContextInternal.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsColor.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "mozilla/dom/HTMLVideoElement.h"
#include "gfxTextRun.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/dom/CanvasGradient.h"
#include "mozilla/dom/CanvasRenderingContext2DBinding.h"
#include "mozilla/dom/CanvasPattern.h"
@@ -724,19 +724,19 @@ protected:
nsTArray<CanvasRenderingContext2DUserData*> mUserDatas;
// If mCanvasElement is not provided, then a docshell is
nsCOMPtr<nsIDocShell> mDocShell;
// This is created lazily so it is necessary to call EnsureTarget before
// accessing it. In the event of an error it will be equal to
// sErrorTarget.
- nsRefPtr<mozilla::gfx::DrawTarget> mTarget;
+ mozilla::RefPtr<mozilla::gfx::DrawTarget> mTarget;
- nsRefPtr<mozilla::layers::PersistentBufferProvider> mBufferProvider;
+ mozilla::RefPtr<mozilla::layers::PersistentBufferProvider> mBufferProvider;
uint32_t SkiaGLTex() const;
// This observes our draw calls at the beginning of the canvas
// lifetime and switches to software or GPU mode depending on
// what it thinks is best
CanvasDrawObserver* mDrawObserver;
void RemoveDrawObserver();
@@ -783,19 +783,19 @@ protected:
*
* There's never both a device space path builder and a user space path
* builder present at the same time. There is also never a path and a
* path builder present at the same time. When writing proceeds on an
* existing path the Path is cleared and a new builder is created.
*
* mPath is always in user-space.
*/
- nsRefPtr<mozilla::gfx::Path> mPath;
- nsRefPtr<mozilla::gfx::PathBuilder> mDSPathBuilder;
- nsRefPtr<mozilla::gfx::PathBuilder> mPathBuilder;
+ mozilla::RefPtr<mozilla::gfx::Path> mPath;
+ mozilla::RefPtr<mozilla::gfx::PathBuilder> mDSPathBuilder;
+ mozilla::RefPtr<mozilla::gfx::PathBuilder> mPathBuilder;
bool mPathTransformWillUpdate;
mozilla::gfx::Matrix mPathToDS;
/**
* Number of times we've invalidated before calling redraw
*/
uint32_t mInvalidateCount;
static const uint32_t kCanvasMaxInvalidateCount = 100;
@@ -804,17 +804,17 @@ protected:
* State information for hit regions
*/
struct RegionInfo
{
nsString mId;
// fallback element for a11y
nsRefPtr<Element> mElement;
// Path of the hit region in the 2d context coordinate space (not user space)
- nsRefPtr<gfx::Path> mPath;
+ RefPtr<gfx::Path> mPath;
};
nsTArray<RegionInfo> mHitRegionsOptions;
/**
* Returns true if a shadow should be drawn along with a
* drawing operation.
*/
@@ -979,17 +979,17 @@ protected:
return (int32_t)floor(ShadowBlurSigma() * GAUSSIAN_SCALE_FACTOR + 0.5);
}
mozilla::gfx::Float ShadowBlurSigma() const
{
return std::min(SIGMA_MAX, shadowBlur / 2.0f);
}
- std::vector<nsRefPtr<mozilla::gfx::Path> > clipsPushed;
+ std::vector<mozilla::RefPtr<mozilla::gfx::Path> > clipsPushed;
nsRefPtr<gfxFontGroup> fontGroup;
nsCOMPtr<nsIAtom> fontLanguage;
nsFont fontFont;
EnumeratedArray<Style, Style::MAX, nsRefPtr<CanvasGradient>> gradientStyles;
EnumeratedArray<Style, Style::MAX, nsRefPtr<CanvasPattern>> patternStyles;
EnumeratedArray<Style, Style::MAX, nscolor> colorStyles;
@@ -1013,17 +1013,17 @@ protected:
mozilla::gfx::FillRule fillRule;
mozilla::gfx::CapStyle lineCap;
mozilla::gfx::JoinStyle lineJoin;
nsString filterString;
nsTArray<nsStyleFilter> filterChain;
nsRefPtr<nsSVGFilterChainObserver> filterChainObserver;
mozilla::gfx::FilterDescription filter;
- nsTArray<nsRefPtr<mozilla::gfx::SourceSurface>> filterAdditionalImages;
+ nsTArray<mozilla::RefPtr<mozilla::gfx::SourceSurface>> filterAdditionalImages;
bool imageSmoothingEnabled;
bool fontExplicitLanguage;
};
nsAutoTArray<ContextState, 3> mStyleStack;
inline ContextState& CurrentState() {
--- a/dom/canvas/DocumentRendererChild.cpp
+++ b/dom/canvas/DocumentRendererChild.cpp
@@ -4,17 +4,17 @@
#include "mozilla/ipc/DocumentRendererChild.h"
#include "base/basictypes.h"
#include "gfx2DGlue.h"
#include "gfxPattern.h"
#include "mozilla/gfx/2D.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsPIDOMWindow.h"
#include "nsIDOMWindow.h"
#include "nsIDocShell.h"
#include "nsIInterfaceRequestorUtils.h"
#include "nsComponentManagerUtils.h"
#include "nsCSSParser.h"
#include "nsPresContext.h"
#include "nsCOMPtr.h"
@@ -69,17 +69,17 @@ DocumentRendererChild::RenderDocument(ns
nscolor bgColor;
if (!nsRuleNode::ComputeColor(bgColorValue, presContext, nullptr, bgColor)) {
return false;
}
// Draw directly into the output array.
data.SetLength(renderSize.width * renderSize.height * 4);
- nsRefPtr<DrawTarget> dt =
+ RefPtr<DrawTarget> dt =
Factory::CreateDrawTargetForData(BackendType::CAIRO,
reinterpret_cast<uint8_t*>(data.BeginWriting()),
IntSize(renderSize.width, renderSize.height),
4 * renderSize.width,
SurfaceFormat::B8G8R8A8);
if (!dt) {
gfxWarning() << "DocumentRendererChild::RenderDocument failed to Factory::CreateDrawTargetForData";
return false;
--- a/dom/canvas/DocumentRendererParent.cpp
+++ b/dom/canvas/DocumentRendererParent.cpp
@@ -2,17 +2,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/ipc/DocumentRendererParent.h"
#include "gfx2DGlue.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/gfx/PathHelpers.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsICanvasRenderingContextInternal.h"
using namespace mozilla;
using namespace mozilla::gfx;
using namespace mozilla::ipc;
DocumentRendererParent::DocumentRendererParent()
{}
@@ -31,17 +31,17 @@ void DocumentRendererParent::DrawToCanva
const nsCString& aData)
{
if (!mCanvas || !mCanvasContext)
return;
DrawTarget* drawTarget = mCanvasContext->GetDrawTarget();
Rect rect(0, 0, aSize.width, aSize.height);
MaybeSnapToDevicePixels(rect, *drawTarget, true);
- nsRefPtr<DataSourceSurface> dataSurface =
+ RefPtr<DataSourceSurface> dataSurface =
Factory::CreateWrappingDataSourceSurface(reinterpret_cast<uint8_t*>(const_cast<nsCString&>(aData).BeginWriting()),
aSize.width * 4,
IntSize(aSize.width, aSize.height),
SurfaceFormat::B8G8R8A8);
SurfacePattern pattern(dataSurface, ExtendMode::CLAMP);
drawTarget->FillRect(rect, pattern);
gfxRect damageRect = mCanvasContext->UserToDevice(ThebesRect(rect));
--- a/dom/canvas/ImageBitmap.cpp
+++ b/dom/canvas/ImageBitmap.cpp
@@ -100,17 +100,17 @@ CropAndCopyDataSourceSurface(DataSourceS
// So, instead, we force the output format to be SurfaceFormat::B8G8R8A8.
const SurfaceFormat format = SurfaceFormat::B8G8R8A8;
const int bytesPerPixel = BytesPerPixel(format);
const IntSize dstSize = IntSize(positiveCropRect.width,
positiveCropRect.height);
const uint32_t dstStride = dstSize.width * bytesPerPixel;
// Create a new SourceSurface.
- nsRefPtr<DataSourceSurface> dstDataSurface =
+ RefPtr<DataSourceSurface> dstDataSurface =
Factory::CreateDataSourceSurfaceWithStride(dstSize, format, dstStride, true);
if (NS_WARN_IF(!dstDataSurface)) {
return nullptr;
}
// Only do copying and cropping when the positiveCropRect intersects with
// the size of aSurface.
@@ -176,30 +176,30 @@ CreateSurfaceFromRawData(const gfx::IntS
uint32_t aBufferLength,
const Maybe<IntRect>& aCropRect,
ErrorResult& aRv)
{
MOZ_ASSERT(!aSize.IsEmpty());
MOZ_ASSERT(aBuffer);
// Wrap the source buffer into a SourceSurface.
- nsRefPtr<DataSourceSurface> dataSurface =
+ RefPtr<DataSourceSurface> dataSurface =
Factory::CreateWrappingDataSourceSurface(aBuffer, aStride, aSize, aFormat);
if (NS_WARN_IF(!dataSurface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
// The temporary cropRect variable is equal to the size of source buffer if we
// do not need to crop, or it equals to the given cropping size.
const IntRect cropRect = aCropRect.valueOr(IntRect(0, 0, aSize.width, aSize.height));
// Copy the source buffer in the _cropRect_ area into a new SourceSurface.
- nsRefPtr<DataSourceSurface> result = CropAndCopyDataSourceSurface(dataSurface, cropRect);
+ RefPtr<DataSourceSurface> result = CropAndCopyDataSourceSurface(dataSurface, cropRect);
if (NS_WARN_IF(!result)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
return result.forget();
}
@@ -211,28 +211,28 @@ CreateImageFromRawData(const gfx::IntSiz
uint8_t* aBuffer,
uint32_t aBufferLength,
const Maybe<IntRect>& aCropRect,
ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
// Copy and crop the source buffer into a SourceSurface.
- nsRefPtr<SourceSurface> rgbaSurface =
+ RefPtr<SourceSurface> rgbaSurface =
CreateSurfaceFromRawData(aSize, aStride, aFormat,
aBuffer, aBufferLength,
aCropRect, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Convert RGBA to BGRA
- nsRefPtr<DataSourceSurface> rgbaDataSurface = rgbaSurface->GetDataSurface();
- nsRefPtr<DataSourceSurface> bgraDataSurface =
+ RefPtr<DataSourceSurface> rgbaDataSurface = rgbaSurface->GetDataSurface();
+ RefPtr<DataSourceSurface> bgraDataSurface =
Factory::CreateDataSourceSurfaceWithStride(rgbaDataSurface->GetSize(),
SurfaceFormat::B8G8R8A8,
rgbaDataSurface->Stride());
DataSourceSurface::MappedSurface rgbaMap;
DataSourceSurface::MappedSurface bgraMap;
if (NS_WARN_IF(!rgbaDataSurface->Map(DataSourceSurface::MapType::READ, &rgbaMap)) ||
@@ -363,17 +363,17 @@ GetSurfaceFromElement(nsIGlobalObject* a
return nullptr;
}
if (NS_WARN_IF(!res.mSourceSurface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
- nsRefPtr<SourceSurface> surface(res.mSourceSurface);
+ RefPtr<SourceSurface> surface(res.mSourceSurface);
return surface.forget();
}
/*
* The specification doesn't allow to create an ImegeBitmap from a vector image.
* This function is used to check if the given HTMLImageElement contains a
* raster image.
*/
@@ -430,60 +430,60 @@ ImageBitmap::PrepareForDrawTarget(gfx::D
if (!mSurface) {
mSurface = mData->GetAsSourceSurface();
}
if (!mSurface) {
return nullptr;
}
- nsRefPtr<DrawTarget> target = aTarget;
+ RefPtr<DrawTarget> target = aTarget;
IntRect surfRect(0, 0, mSurface->GetSize().width, mSurface->GetSize().height);
// Check if we still need to crop our surface
if (!mPictureRect.IsEqualEdges(surfRect)) {
IntRect surfPortion = surfRect.Intersect(mPictureRect);
// the crop lies entirely outside the surface area, nothing to draw
if (surfPortion.IsEmpty()) {
mSurface = nullptr;
- nsRefPtr<gfx::SourceSurface> surface(mSurface);
+ RefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
IntPoint dest(std::max(0, surfPortion.X() - mPictureRect.X()),
std::max(0, surfPortion.Y() - mPictureRect.Y()));
// We must initialize this target with mPictureRect.Size() because the
// specification states that if the cropping area is given, then return an
// ImageBitmap with the size equals to the cropping area.
target = target->CreateSimilarDrawTarget(mPictureRect.Size(),
target->GetFormat());
if (!target) {
mSurface = nullptr;
- nsRefPtr<gfx::SourceSurface> surface(mSurface);
+ RefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
// We need to fall back to generic copying and cropping for the Windows8.1,
// D2D1 backend.
// In the Windows8.1 D2D1 backend, it might trigger "partial upload" from a
// non-SourceSurfaceD2D1 surface to a D2D1Image in the following
// CopySurface() step. However, the "partial upload" only supports uploading
// a rectangle starts from the upper-left point, which means it cannot
// upload an arbitrary part of the source surface and this causes problems
// if the mPictureRect is not starts from the upper-left point.
if (target->GetBackendType() == BackendType::DIRECT2D1_1 &&
mSurface->GetType() != SurfaceType::D2D1_1_IMAGE) {
- nsRefPtr<DataSourceSurface> dataSurface = mSurface->GetDataSurface();
+ RefPtr<DataSourceSurface> dataSurface = mSurface->GetDataSurface();
if (NS_WARN_IF(!dataSurface)) {
mSurface = nullptr;
- nsRefPtr<gfx::SourceSurface> surface(mSurface);
+ RefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
mSurface = CropAndCopyDataSourceSurface(dataSurface, mPictureRect);
} else {
target->CopySurface(mSurface, surfPortion, dest);
mSurface = target->Snapshot();
}
@@ -492,17 +492,17 @@ ImageBitmap::PrepareForDrawTarget(gfx::D
mPictureRect.MoveTo(0, 0);
}
// Replace our surface with one optimized for the target we're about to draw
// to, under the assumption it'll likely be drawn again to that target.
// This call should be a no-op for already-optimized surfaces
mSurface = target->OptimizeSourceSurface(mSurface);
- nsRefPtr<gfx::SourceSurface> surface(mSurface);
+ RefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
/* static */ already_AddRefed<ImageBitmap>
ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, HTMLImageElement& aImageEl,
const Maybe<IntRect>& aCropRect, ErrorResult& aRv)
{
// Check if the image element is completely available or not.
@@ -514,17 +514,17 @@ ImageBitmap::CreateInternal(nsIGlobalObj
// Check if the image element is a bitmap (e.g. it's a vector graphic) or not.
if (!HasRasterImage(aImageEl)) {
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
// Get the SourceSurface out from the image element and then do security
// checking.
- nsRefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aImageEl, aRv);
+ RefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aImageEl, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Create ImageBitmap.
nsRefPtr<layers::Image> data = CreateImageFromSurface(surface, aRv);
@@ -591,37 +591,37 @@ ImageBitmap::CreateInternal(nsIGlobalObj
ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, HTMLCanvasElement& aCanvasEl,
const Maybe<IntRect>& aCropRect, ErrorResult& aRv)
{
if (aCanvasEl.Width() == 0 || aCanvasEl.Height() == 0) {
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
- nsRefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aCanvasEl, aRv);
+ RefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aCanvasEl, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Crop the source surface if needed.
- nsRefPtr<SourceSurface> croppedSurface;
+ RefPtr<SourceSurface> croppedSurface;
IntRect cropRect = aCropRect.valueOr(IntRect());
// If the HTMLCanvasElement's rendering context is WebGL, then the snapshot
// we got from the HTMLCanvasElement is a DataSourceSurface which is a copy
// of the rendering context. We handle cropping in this case.
if ((aCanvasEl.GetCurrentContextType() == CanvasContextType::WebGL1 ||
aCanvasEl.GetCurrentContextType() == CanvasContextType::WebGL2) &&
aCropRect.isSome()) {
// The _surface_ must be a DataSourceSurface.
MOZ_ASSERT(surface->GetType() == SurfaceType::DATA,
"The snapshot SourceSurface from WebGL rendering contest is not \
DataSourceSurface.");
- nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
+ RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
croppedSurface = CropAndCopyDataSourceSurface(dataSurface, cropRect);
cropRect.MoveTo(0, 0);
}
else {
croppedSurface = surface;
}
if (NS_WARN_IF(!croppedSurface)) {
@@ -708,17 +708,17 @@ ImageBitmap::CreateInternal(nsIGlobalObj
const Maybe<IntRect>& aCropRect, ErrorResult& aRv)
{
// Check origin-clean.
if (aCanvasCtx.GetCanvas()->IsWriteOnly()) {
aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
return nullptr;
}
- nsRefPtr<SourceSurface> surface = aCanvasCtx.GetSurfaceSnapshot();
+ RefPtr<SourceSurface> surface = aCanvasCtx.GetSurfaceSnapshot();
if (NS_WARN_IF(!surface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
const IntSize surfaceSize = surface->GetSize();
if (surfaceSize.width == 0 || surfaceSize.height == 0) {
@@ -858,53 +858,53 @@ DecodeBlob(Blob& aBlob, ErrorResult& aRv
if (NS_FAILED(rv)) {
aRv.Throw(rv);
return nullptr;
}
// Get the surface out.
uint32_t frameFlags = imgIContainer::FLAG_SYNC_DECODE | imgIContainer::FLAG_WANT_DATA_SURFACE;
uint32_t whichFrame = imgIContainer::FRAME_FIRST;
- nsRefPtr<SourceSurface> surface = imgContainer->GetFrame(whichFrame, frameFlags);
+ RefPtr<SourceSurface> surface = imgContainer->GetFrame(whichFrame, frameFlags);
if (NS_WARN_IF(!surface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
return surface.forget();
}
static already_AddRefed<layers::Image>
DecodeAndCropBlob(Blob& aBlob, Maybe<IntRect>& aCropRect, ErrorResult& aRv)
{
// Decode the blob into a SourceSurface.
- nsRefPtr<SourceSurface> surface = DecodeBlob(aBlob, aRv);
+ RefPtr<SourceSurface> surface = DecodeBlob(aBlob, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Crop the source surface if needed.
- nsRefPtr<SourceSurface> croppedSurface = surface;
+ RefPtr<SourceSurface> croppedSurface = surface;
if (aCropRect.isSome()) {
// The blob is just decoded into a RasterImage and not optimized yet, so the
// _surface_ we get is a DataSourceSurface which wraps the RasterImage's
// raw buffer.
//
// The _surface_ might already be optimized so that its type is not
// SurfaceType::DATA. However, we could keep using the generic cropping and
// copying since the decoded buffer is only used in this ImageBitmap so we
// should crop it to save memory usage.
//
// TODO: Bug1189632 is going to refactor this create-from-blob part to
// decode the blob off the main thread. Re-check if we should do
// cropping at this moment again there.
- nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
+ RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
croppedSurface = CropAndCopyDataSourceSurface(dataSurface, aCropRect.ref());
aCropRect->MoveTo(0, 0);
}
if (NS_WARN_IF(!croppedSurface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
return nullptr;
}
@@ -959,17 +959,17 @@ protected:
mPromise->MaybeResolve(imageBitmap);
return;
}
virtual already_AddRefed<ImageBitmap> CreateImageBitmap(ErrorResult& aRv) = 0;
nsRefPtr<Promise> mPromise;
nsCOMPtr<nsIGlobalObject> mGlobalObject;
- nsRefPtr<mozilla::dom::Blob> mBlob;
+ RefPtr<mozilla::dom::Blob> mBlob;
Maybe<IntRect> mCropRect;
};
class CreateImageBitmapFromBlobTask final : public nsRunnable,
public CreateImageBitmapFromBlob
{
public:
CreateImageBitmapFromBlobTask(Promise* aPromise,
--- a/dom/canvas/ImageBitmap.h
+++ b/dom/canvas/ImageBitmap.h
@@ -154,17 +154,17 @@ protected:
* called first time.
*
* The mSurface might just be a reference to the same data buffer of the mData
* if the are of mPictureRect is just the same as the mData's size. Or, it is
* a independent data buffer which is copied and cropped form the mData's data
* buffer.
*/
nsRefPtr<layers::Image> mData;
- nsRefPtr<gfx::SourceSurface> mSurface;
+ RefPtr<gfx::SourceSurface> mSurface;
/*
* The mPictureRect is the size of the source image in default, however, if
* users specify the cropping area while creating an ImageBitmap, then this
* mPictureRect is the cropping area.
*
* Note that if the CreateInternal() copies and crops data from the source
* image, then this mPictureRect is just the size of the final mData.
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -632,17 +632,17 @@ BaseCaps(const WebGLContextOptions& opti
}
////////////////////////////////////////
static already_AddRefed<gl::GLContext>
CreateGLWithEGL(const gl::SurfaceCaps& caps, gl::CreateContextFlags flags,
WebGLContext* webgl)
{
- nsRefPtr<GLContext> gl;
+ RefPtr<GLContext> gl;
#ifndef XP_MACOSX // Mac doesn't have GLContextProviderEGL.
gfx::IntSize dummySize(16, 16);
gl = gl::GLContextProviderEGL::CreateOffscreen(dummySize, caps,
flags);
if (!gl) {
webgl->GenerateWarning("Error during EGL OpenGL init.");
return nullptr;
}
@@ -652,17 +652,17 @@ CreateGLWithEGL(const gl::SurfaceCaps& c
#endif // XP_MACOSX
return gl.forget();
}
static already_AddRefed<GLContext>
CreateGLWithANGLE(const gl::SurfaceCaps& caps, gl::CreateContextFlags flags,
WebGLContext* webgl)
{
- nsRefPtr<GLContext> gl;
+ RefPtr<GLContext> gl;
#ifdef XP_WIN
gfx::IntSize dummySize(16, 16);
gl = gl::GLContextProviderEGL::CreateOffscreen(dummySize, caps, flags);
if (!gl) {
webgl->GenerateWarning("Error during ANGLE OpenGL init.");
return nullptr;
}
@@ -684,17 +684,17 @@ CreateGLWithDefault(const gl::SurfaceCap
IsFeatureInBlacklist(gfxInfo, nsIGfxInfo::FEATURE_WEBGL_OPENGL))
{
webgl->GenerateWarning("Refused to create native OpenGL context because of"
" blacklisting.");
return nullptr;
}
gfx::IntSize dummySize(16, 16);
- nsRefPtr<GLContext> gl = gl::GLContextProvider::CreateOffscreen(dummySize, caps, flags);
+ RefPtr<GLContext> gl = gl::GLContextProvider::CreateOffscreen(dummySize, caps, flags);
if (!gl) {
webgl->GenerateWarning("Error during native OpenGL init.");
return nullptr;
}
if (gl->IsANGLE())
return nullptr;
@@ -1125,24 +1125,24 @@ WebGLContext::LoseOldestWebGLContextIfLi
void
WebGLContext::GetImageBuffer(uint8_t** out_imageBuffer, int32_t* out_format)
{
*out_imageBuffer = nullptr;
*out_format = 0;
// Use GetSurfaceSnapshot() to make sure that appropriate y-flip gets applied
bool premult;
- nsRefPtr<SourceSurface> snapshot =
+ RefPtr<SourceSurface> snapshot =
GetSurfaceSnapshot(mOptions.premultipliedAlpha ? nullptr : &premult);
if (!snapshot)
return;
MOZ_ASSERT(mOptions.premultipliedAlpha || !premult, "We must get unpremult when we ask for it!");
- nsRefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
+ RefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
DataSourceSurface::MappedSurface map;
if (!dataSurface->Map(DataSourceSurface::MapType::READ, &map))
return;
uint8_t* imageBuffer = new (fallible) uint8_t[mWidth * mHeight * 4];
if (!imageBuffer) {
dataSurface->Unmap();
@@ -1805,17 +1805,17 @@ already_AddRefed<mozilla::gfx::SourceSur
WebGLContext::GetSurfaceSnapshot(bool* out_premultAlpha)
{
if (!gl)
return nullptr;
bool hasAlpha = mOptions.alpha;
SurfaceFormat surfFormat = hasAlpha ? SurfaceFormat::B8G8R8A8
: SurfaceFormat::B8G8R8X8;
- nsRefPtr<DataSourceSurface> surf;
+ RefPtr<DataSourceSurface> surf;
surf = Factory::CreateDataSourceSurfaceWithStride(IntSize(mWidth, mHeight),
surfFormat,
mWidth * 4);
if (NS_WARN_IF(!surf)) {
return nullptr;
}
gl->MakeCurrent();
@@ -1833,17 +1833,17 @@ WebGLContext::GetSurfaceSnapshot(bool* o
if (!srcPremultAlpha) {
if (out_premultAlpha) {
*out_premultAlpha = false;
} else if(hasAlpha) {
gfxUtils::PremultiplyDataSurface(surf, surf);
}
}
- nsRefPtr<DrawTarget> dt =
+ RefPtr<DrawTarget> dt =
Factory::CreateDrawTarget(BackendType::CAIRO,
IntSize(mWidth, mHeight),
SurfaceFormat::B8G8R8A8);
if (!dt) {
return nullptr;
}
--- a/dom/canvas/WebGLContext.h
+++ b/dom/canvas/WebGLContext.h
@@ -1266,17 +1266,17 @@ protected:
template<class ElementType>
nsLayoutUtils::SurfaceFromElementResult
SurfaceFromElement(ElementType& element) {
return SurfaceFromElement(&element);
}
nsresult
SurfaceFromElementResultToImageSurface(nsLayoutUtils::SurfaceFromElementResult& res,
- nsRefPtr<gfx::DataSourceSurface>& imageOut,
+ RefPtr<gfx::DataSourceSurface>& imageOut,
WebGLTexelFormat* format);
// Returns false if `object` is null or not valid.
template<class ObjectType>
bool ValidateObject(const char* info, ObjectType* object);
// Returns false if `object` is not valid. Considers null to be valid.
template<class ObjectType>
@@ -1336,17 +1336,17 @@ protected:
nsTArray<WebGLRefPtr<WebGLTexture> > mBound2DTextures;
nsTArray<WebGLRefPtr<WebGLTexture> > mBoundCubeMapTextures;
nsTArray<WebGLRefPtr<WebGLTexture> > mBound3DTextures;
nsTArray<WebGLRefPtr<WebGLSampler> > mBoundSamplers;
void ResolveTexturesForDraw() const;
WebGLRefPtr<WebGLProgram> mCurrentProgram;
- nsRefPtr<const webgl::LinkedProgramInfo> mActiveProgramLinkInfo;
+ RefPtr<const webgl::LinkedProgramInfo> mActiveProgramLinkInfo;
GLenum LastColorAttachment() const {
return LOCAL_GL_COLOR_ATTACHMENT0 + mGLMaxColorAttachments - 1;
}
bool ValidateFramebufferTarget(GLenum target, const char* const info);
WebGLRefPtr<WebGLFramebuffer> mBoundDrawFramebuffer;
@@ -1429,17 +1429,17 @@ protected:
GLfloat mDepthClearValue;
GLint mViewportX;
GLint mViewportY;
GLsizei mViewportWidth;
GLsizei mViewportHeight;
bool mAlreadyWarnedAboutViewportLargerThanDest;
- nsRefPtr<WebGLContextLossHandler> mContextLossHandler;
+ RefPtr<WebGLContextLossHandler> mContextLossHandler;
bool mAllowContextRestore;
bool mLastLossWasSimulated;
ContextStatus mContextStatus;
bool mContextLostErrorSet;
// Used for some hardware (particularly Tegra 2 and 4) that likes to
// be Flushed while doing hundreds of draw calls.
int mDrawCallsSinceLastFlush;
--- a/dom/canvas/WebGLContextGL.cpp
+++ b/dom/canvas/WebGLContextGL.cpp
@@ -1856,24 +1856,24 @@ WebGLContext::StencilOpSeparate(GLenum f
return;
MakeContextCurrent();
gl->fStencilOpSeparate(face, sfail, dpfail, dppass);
}
nsresult
WebGLContext::SurfaceFromElementResultToImageSurface(nsLayoutUtils::SurfaceFromElementResult& res,
- nsRefPtr<DataSourceSurface>& imageOut,
+ RefPtr<DataSourceSurface>& imageOut,
WebGLTexelFormat* format)
{
*format = WebGLTexelFormat::None;
if (!res.mSourceSurface)
return NS_OK;
- nsRefPtr<DataSourceSurface> data = res.mSourceSurface->GetDataSurface();
+ RefPtr<DataSourceSurface> data = res.mSourceSurface->GetDataSurface();
if (!data) {
// SurfaceFromElement lied!
return NS_OK;
}
// We disallow loading cross-domain images and videos that have not been validated
// with CORS as WebGL textures. The reason for doing that is that timing
// attacks on WebGL shaders are able to retrieve approximations of the
--- a/dom/canvas/WebGLProgram.cpp
+++ b/dom/canvas/WebGLProgram.cpp
@@ -66,43 +66,43 @@ ParseName(const nsCString& name, nsCStri
*out_isArray = true;
*out_arrayIndex = indexNum;
return true;
}
static void
AddActiveInfo(WebGLContext* webgl, GLint elemCount, GLenum elemType, bool isArray,
const nsACString& baseUserName, const nsACString& baseMappedName,
- std::vector<nsRefPtr<WebGLActiveInfo>>* activeInfoList,
+ std::vector<RefPtr<WebGLActiveInfo>>* activeInfoList,
std::map<nsCString, const WebGLActiveInfo*>* infoLocMap)
{
- nsRefPtr<WebGLActiveInfo> info = new WebGLActiveInfo(webgl, elemCount, elemType,
+ RefPtr<WebGLActiveInfo> info = new WebGLActiveInfo(webgl, elemCount, elemType,
isArray, baseUserName,
baseMappedName);
activeInfoList->push_back(info);
infoLocMap->insert(std::make_pair(info->mBaseUserName, info.get()));
}
static void
AddActiveBlockInfo(const nsACString& baseUserName,
const nsACString& baseMappedName,
- std::vector<nsRefPtr<webgl::UniformBlockInfo>>* activeInfoList)
+ std::vector<RefPtr<webgl::UniformBlockInfo>>* activeInfoList)
{
- nsRefPtr<webgl::UniformBlockInfo> info = new webgl::UniformBlockInfo(baseUserName, baseMappedName);
+ RefPtr<webgl::UniformBlockInfo> info = new webgl::UniformBlockInfo(baseUserName, baseMappedName);
activeInfoList->push_back(info);
}
//#define DUMP_SHADERVAR_MAPPINGS
static already_AddRefed<const webgl::LinkedProgramInfo>
QueryProgramInfo(WebGLProgram* prog, gl::GLContext* gl)
{
- nsRefPtr<webgl::LinkedProgramInfo> info(new webgl::LinkedProgramInfo(prog));
+ RefPtr<webgl::LinkedProgramInfo> info(new webgl::LinkedProgramInfo(prog));
GLuint maxAttribLenWithNull = 0;
gl->fGetProgramiv(prog->mGLName, LOCAL_GL_ACTIVE_ATTRIBUTE_MAX_LENGTH,
(GLint*)&maxAttribLenWithNull);
if (maxAttribLenWithNull < 1)
maxAttribLenWithNull = 1;
GLuint maxUniformLenWithNull = 0;
@@ -432,17 +432,17 @@ WebGLProgram::GetActiveAttrib(GLuint ind
const auto& activeList = mMostRecentLinkInfo->activeAttribs;
if (index >= activeList.size()) {
mContext->ErrorInvalidValue("`index` (%i) must be less than %s (%i).",
index, "ACTIVE_ATTRIBS", activeList.size());
return nullptr;
}
- nsRefPtr<WebGLActiveInfo> ret = activeList[index];
+ RefPtr<WebGLActiveInfo> ret = activeList[index];
return ret.forget();
}
already_AddRefed<WebGLActiveInfo>
WebGLProgram::GetActiveUniform(GLuint index) const
{
if (!mMostRecentLinkInfo) {
// According to the spec, this can return null.
@@ -453,17 +453,17 @@ WebGLProgram::GetActiveUniform(GLuint in
const auto& activeList = mMostRecentLinkInfo->activeUniforms;
if (index >= activeList.size()) {
mContext->ErrorInvalidValue("`index` (%i) must be less than %s (%i).",
index, "ACTIVE_UNIFORMS", activeList.size());
return nullptr;
}
- nsRefPtr<WebGLActiveInfo> ret = activeList[index];
+ RefPtr<WebGLActiveInfo> ret = activeList[index];
return ret.forget();
}
void
WebGLProgram::GetAttachedShaders(nsTArray<nsRefPtr<WebGLShader>>* const out) const
{
out->TruncateLength(0);
@@ -593,17 +593,17 @@ WebGLProgram::GetUniformBlockIndex(const
const NS_LossyConvertUTF16toASCII userName(userName_wide);
nsDependentCString baseUserName;
bool isArray;
size_t arrayIndex;
if (!ParseName(userName, &baseUserName, &isArray, &arrayIndex))
return LOCAL_GL_INVALID_INDEX;
- nsRefPtr<const webgl::UniformBlockInfo> info;
+ RefPtr<const webgl::UniformBlockInfo> info;
if (!LinkInfo()->FindUniformBlock(baseUserName, &info)) {
return LOCAL_GL_INVALID_INDEX;
}
const nsCString& baseMappedName = info->mBaseMappedName;
nsAutoCString mappedName(baseMappedName);
if (isArray) {
mappedName.AppendLiteral("[");
@@ -1020,17 +1020,17 @@ WebGLProgram::GetTransformFeedbackVaryin
}
const nsCString& varyingUserName = mTransformFeedbackVaryings[index];
WebGLActiveInfo* info;
LinkInfo()->FindAttrib(varyingUserName, (const WebGLActiveInfo**) &info);
MOZ_ASSERT(info);
- nsRefPtr<WebGLActiveInfo> ret(info);
+ RefPtr<WebGLActiveInfo> ret(info);
return ret.forget();
}
bool
WebGLProgram::FindUniformBlockByMappedName(const nsACString& mappedName,
nsCString* const out_userName,
bool* const out_isArray) const
{
--- a/dom/canvas/WebGLProgram.h
+++ b/dom/canvas/WebGLProgram.h
@@ -6,17 +6,17 @@
#ifndef WEBGL_PROGRAM_H_
#define WEBGL_PROGRAM_H_
#include <map>
#include <set>
#include <vector>
#include "mozilla/LinkedList.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/WeakPtr.h"
#include "nsString.h"
#include "nsWrapperCache.h"
#include "WebGLObjectModel.h"
template<class> class nsRefPtr;
@@ -54,26 +54,26 @@ struct UniformBlockInfo final
struct LinkedProgramInfo final
: public RefCounted<LinkedProgramInfo>
, public SupportsWeakPtr<LinkedProgramInfo>
{
MOZ_DECLARE_REFCOUNTED_TYPENAME(LinkedProgramInfo)
MOZ_DECLARE_WEAKREFERENCE_TYPENAME(LinkedProgramInfo)
WebGLProgram* const prog;
- std::vector<nsRefPtr<WebGLActiveInfo>> activeAttribs;
- std::vector<nsRefPtr<WebGLActiveInfo>> activeUniforms;
+ std::vector<RefPtr<WebGLActiveInfo>> activeAttribs;
+ std::vector<RefPtr<WebGLActiveInfo>> activeUniforms;
// Needed for Get{Attrib,Uniform}Location. The keys for these are non-mapped
// user-facing `GLActiveInfo::name`s, without any final "[0]".
std::map<nsCString, const WebGLActiveInfo*> attribMap;
std::map<nsCString, const WebGLActiveInfo*> uniformMap;
std::map<nsCString, const nsCString>* fragDataMap;
- std::vector<nsRefPtr<UniformBlockInfo>> uniformBlocks;
+ std::vector<RefPtr<UniformBlockInfo>> uniformBlocks;
// Needed for draw call validation.
std::set<GLuint> activeAttribLocs;
explicit LinkedProgramInfo(WebGLProgram* prog);
bool FindAttrib(const nsCString& baseUserName,
const WebGLActiveInfo** const out_activeInfo) const
@@ -93,17 +93,17 @@ struct LinkedProgramInfo final
if (itr == uniformMap.end())
return false;
*out_activeInfo = itr->second;
return true;
}
bool FindUniformBlock(const nsCString& baseUserName,
- nsRefPtr<const UniformBlockInfo>* const out_info) const
+ RefPtr<const UniformBlockInfo>* const out_info) const
{
const size_t count = uniformBlocks.size();
for (size_t i = 0; i < count; i++) {
if (baseUserName == uniformBlocks[i]->mBaseUserName) {
*out_info = uniformBlocks[i].get();
return true;
}
}
@@ -206,17 +206,17 @@ public:
private:
WebGLRefPtr<WebGLShader> mVertShader;
WebGLRefPtr<WebGLShader> mFragShader;
std::map<nsCString, GLuint> mBoundAttribLocs;
std::vector<nsCString> mTransformFeedbackVaryings;
GLenum mTransformFeedbackBufferMode;
nsCString mLinkLog;
- nsRefPtr<const webgl::LinkedProgramInfo> mMostRecentLinkInfo;
+ RefPtr<const webgl::LinkedProgramInfo> mMostRecentLinkInfo;
// Storage for transform feedback varyings before link.
// (Work around for bug seen on nVidia drivers.)
std::vector<std::string> mTempMappedVaryings;
};
} // namespace mozilla
#endif // WEBGL_PROGRAM_H_
--- a/dom/canvas/WebGLTextureUpload.cpp
+++ b/dom/canvas/WebGLTextureUpload.cpp
@@ -691,17 +691,17 @@ WebGLTexture::TexImage2D(TexImageTarget
// Trying to handle the video by GPU directly first
if (TexImageFromVideoElement(texImageTarget, level, internalFormat,
unpackFormat, unpackType, elem))
{
return;
}
- nsRefPtr<gfx::DataSourceSurface> data;
+ RefPtr<gfx::DataSourceSurface> data;
WebGLTexelFormat srcFormat;
nsLayoutUtils::SurfaceFromElementResult res = mContext->SurfaceFromElement(elem);
*out_rv = mContext->SurfaceFromElementResultToImageSurface(res, data, &srcFormat);
if (out_rv->Failed() || !data)
return;
gfx::IntSize size = data->GetSize();
uint32_t byteLength = data->Stride() * size.height;
@@ -1006,17 +1006,17 @@ WebGLTexture::TexSubImage2D(TexImageTarg
const TexInternalFormat internalFormat = imageInfo.EffectiveInternalFormat();
// Trying to handle the video by GPU directly first
if (TexImageFromVideoElement(texImageTarget, level, internalFormat.get(), unpackFormat, unpackType, elem))
{
return;
}
- nsRefPtr<gfx::DataSourceSurface> data;
+ RefPtr<gfx::DataSourceSurface> data;
WebGLTexelFormat srcFormat;
nsLayoutUtils::SurfaceFromElementResult res = mContext->SurfaceFromElement(elem);
*out_rv = mContext->SurfaceFromElementResultToImageSurface(res, data, &srcFormat);
if (out_rv->Failed() || !data)
return;
gfx::IntSize size = data->GetSize();
uint32_t byteLength = data->Stride() * size.height;
--- a/dom/canvas/nsICanvasRenderingContextInternal.h
+++ b/dom/canvas/nsICanvasRenderingContextInternal.h
@@ -8,17 +8,17 @@
#include "mozilla/gfx/2D.h"
#include "nsISupports.h"
#include "nsIInputStream.h"
#include "nsIDocShell.h"
#include "nsRefreshDriver.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "GraphicsFilter.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#define NS_ICANVASRENDERINGCONTEXTINTERNAL_IID \
{ 0x3cc9e801, 0x1806, 0x4ff6, \
{ 0x86, 0x14, 0xf9, 0xd0, 0xf4, 0xfb, 0x3b, 0x08 } }
class gfxASurface;
class nsDisplayListBuilder;
--- a/dom/devicestorage/DeviceStorage.h
+++ b/dom/devicestorage/DeviceStorage.h
@@ -5,17 +5,17 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DeviceStorage_h
#define DeviceStorage_h
#include "nsIFile.h"
#include "nsIPrincipal.h"
#include "mozilla/DOMEventTargetHelper.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/StaticPtr.h"
#include "mozilla/dom/DOMRequest.h"
#include "nsWeakReference.h"
#define DEVICESTORAGE_PICTURES "pictures"
#define DEVICESTORAGE_VIDEOS "videos"
#define DEVICESTORAGE_MUSIC "music"
#define DEVICESTORAGE_APPS "apps"
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -60,29 +60,29 @@ public:
: mRegistered(false),
mOwningElement(aOwningElement),
mRefreshDriver(aRefreshDriver)
{
MOZ_ASSERT(mOwningElement);
}
static already_AddRefed<DataSourceSurface>
- CopySurface(const nsRefPtr<SourceSurface>& aSurface)
+ CopySurface(const RefPtr<SourceSurface>& aSurface)
{
- nsRefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
+ RefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
if (!data) {
return nullptr;
}
DataSourceSurface::ScopedMap read(data, DataSourceSurface::READ);
if (!read.IsMapped()) {
return nullptr;
}
- nsRefPtr<DataSourceSurface> copy =
+ RefPtr<DataSourceSurface> copy =
Factory::CreateDataSourceSurfaceWithStride(data->GetSize(),
data->GetFormat(),
read.GetStride());
if (!copy) {
return nullptr;
}
DataSourceSurface::ScopedMap write(copy, DataSourceSurface::WRITE);
@@ -115,22 +115,22 @@ public:
if (mOwningElement->IsContextCleanForFrameCapture()) {
return;
}
if (!mOwningElement->IsFrameCaptureRequested()) {
return;
}
- nsRefPtr<SourceSurface> snapshot = mOwningElement->GetSurfaceSnapshot(nullptr);
+ RefPtr<SourceSurface> snapshot = mOwningElement->GetSurfaceSnapshot(nullptr);
if (!snapshot) {
return;
}
- nsRefPtr<DataSourceSurface> copy = CopySurface(snapshot);
+ RefPtr<DataSourceSurface> copy = CopySurface(snapshot);
mOwningElement->SetFrameCapture(copy.forget());
mOwningElement->MarkContextCleanForFrameCapture();
}
void DetachFromRefreshDriver()
{
MOZ_ASSERT(mOwningElement);
@@ -170,17 +170,17 @@ private:
virtual ~RequestedFrameRefreshObserver()
{
MOZ_ASSERT(!mRefreshDriver);
MOZ_ASSERT(!mRegistered);
}
bool mRegistered;
HTMLCanvasElement* const mOwningElement;
- nsRefPtr<nsRefreshDriver> mRefreshDriver;
+ RefPtr<nsRefreshDriver> mRefreshDriver;
};
// ---------------------------------------------------------------------------
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(HTMLCanvasPrintState, mCanvas,
mContext, mCallback)
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(HTMLCanvasPrintState, AddRef)
@@ -1196,17 +1196,17 @@ HTMLCanvasElement::IsFrameCaptureRequest
}
}
return false;
}
void
HTMLCanvasElement::SetFrameCapture(already_AddRefed<SourceSurface> aSurface)
{
- nsRefPtr<SourceSurface> surface = aSurface;
+ RefPtr<SourceSurface> surface = aSurface;
CairoImage::Data imageData;
imageData.mSize = surface->GetSize();
imageData.mSourceSurface = surface;
nsRefPtr<CairoImage> image = new CairoImage();
image->SetData(imageData);
--- a/dom/ipc/ContentParent.cpp
+++ b/dom/ipc/ContentParent.cpp
@@ -2729,17 +2729,17 @@ ContentParent::RecvSetClipboard(const IP
item.flavor().EqualsLiteral(kGIFImageMime)) {
const IPCDataTransferImage& imageDetails = item.imageDetails();
const gfx::IntSize size(imageDetails.width(), imageDetails.height());
if (!size.width || !size.height) {
return true;
}
nsCString text = item.data().get_nsCString();
- nsRefPtr<gfx::DataSourceSurface> image =
+ mozilla::RefPtr<gfx::DataSourceSurface> image =
new mozilla::gfx::SourceSurfaceRawData();
mozilla::gfx::SourceSurfaceRawData* raw =
static_cast<mozilla::gfx::SourceSurfaceRawData*>(image.get());
raw->InitWrappingData(
reinterpret_cast<uint8_t*>(const_cast<nsCString&>(text).BeginWriting()),
size, imageDetails.stride(),
static_cast<mozilla::gfx::SurfaceFormat>(imageDetails.format()), false);
raw->GuaranteePersistance();
--- a/dom/ipc/TabParent.cpp
+++ b/dom/ipc/TabParent.cpp
@@ -2080,17 +2080,17 @@ TabParent::RecvSetCustomCursor(const nsC
if (widget) {
if (aForce) {
widget->ClearCachedCursor();
}
if (mTabSetsCursor) {
const gfx::IntSize size(aWidth, aHeight);
- nsRefPtr<gfx::DataSourceSurface> customCursor = new mozilla::gfx::SourceSurfaceRawData();
+ mozilla::RefPtr<gfx::DataSourceSurface> customCursor = new mozilla::gfx::SourceSurfaceRawData();
mozilla::gfx::SourceSurfaceRawData* raw = static_cast<mozilla::gfx::SourceSurfaceRawData*>(customCursor.get());
raw->InitWrappingData(
reinterpret_cast<uint8_t*>(const_cast<nsCString&>(aCursorData).BeginWriting()),
size, aStride, static_cast<mozilla::gfx::SurfaceFormat>(aFormat), false);
raw->GuaranteePersistance();
nsRefPtr<gfxDrawable> drawable = new gfxSurfaceDrawable(customCursor, size);
nsCOMPtr<imgIContainer> cursorImage(image::ImageOps::CreateFromDrawable(drawable));
@@ -3576,17 +3576,17 @@ TabParent::AddInitialDnDDataTo(DataTrans
variant, i,
nsContentUtils::GetSystemPrincipal());
}
}
mInitialDataTransferItems.Clear();
}
void
-TabParent::TakeDragVisualization(nsRefPtr<mozilla::gfx::SourceSurface>& aSurface,
+TabParent::TakeDragVisualization(RefPtr<mozilla::gfx::SourceSurface>& aSurface,
int32_t& aDragAreaX, int32_t& aDragAreaY)
{
aSurface = mDnDVisualization.forget();
aDragAreaX = mDragAreaX;
aDragAreaY = mDragAreaY;
}
bool
--- a/dom/ipc/TabParent.h
+++ b/dom/ipc/TabParent.h
@@ -10,17 +10,17 @@
#include "js/TypeDecls.h"
#include "mozilla/ContentCache.h"
#include "mozilla/dom/ipc/IdType.h"
#include "mozilla/dom/PBrowserParent.h"
#include "mozilla/dom/PFilePickerParent.h"
#include "mozilla/dom/TabContext.h"
#include "mozilla/EventForwards.h"
#include "mozilla/dom/File.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsCOMPtr.h"
#include "nsIAuthPromptProvider.h"
#include "nsIBrowserDOMWindow.h"
#include "nsIDOMEventListener.h"
#include "nsISecureBrowserUI.h"
#include "nsITabParent.h"
#include "nsIWebBrowserPersistable.h"
#include "nsIXULBrowserWindow.h"
@@ -433,17 +433,17 @@ public:
const uint32_t& aAction,
const nsCString& aVisualDnDData,
const uint32_t& aWidth, const uint32_t& aHeight,
const uint32_t& aStride, const uint8_t& aFormat,
const int32_t& aDragAreaX, const int32_t& aDragAreaY) override;
void AddInitialDnDDataTo(DataTransfer* aDataTransfer);
- void TakeDragVisualization(nsRefPtr<mozilla::gfx::SourceSurface>& aSurface,
+ void TakeDragVisualization(RefPtr<mozilla::gfx::SourceSurface>& aSurface,
int32_t& aDragAreaX, int32_t& aDragAreaY);
layout::RenderFrameParent* GetRenderFrame();
// Called by HttpChannelParent. The function may use a new process to
// reload the URI associated with the given channel.
void OnStartSignedPackageRequest(nsIChannel* aChannel);
protected:
@@ -549,17 +549,17 @@ private:
{
eString,
eBlob
};
DataType mType;
};
nsTArray<nsTArray<DataTransferItem>> mInitialDataTransferItems;
- nsRefPtr<gfx::DataSourceSurface> mDnDVisualization;
+ mozilla::RefPtr<gfx::DataSourceSurface> mDnDVisualization;
int32_t mDragAreaX;
int32_t mDragAreaY;
// When true, the TabParent is initialized without child side's request.
// When false, the TabParent is initialized by window.open() from child side.
bool mInitedByParent;
nsCOMPtr<nsILoadContext> mLoadContext;
--- a/dom/media/AudioStream.h
+++ b/dom/media/AudioStream.h
@@ -7,17 +7,17 @@
#define AudioStream_h_
#include "AudioSampleFormat.h"
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"
#include "nsThreadUtils.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include "mozilla/Monitor.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/UniquePtr.h"
#include "CubebUtils.h"
#include "soundtouch/SoundTouchFactory.h"
namespace mozilla {
struct CubebDestroyPolicy
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -465,34 +465,34 @@ DOMMediaStream::GetTracks(nsTArray<nsRef
for (const nsRefPtr<TrackPort>& info : mTracks) {
aTracks.AppendElement(info->GetTrack());
}
}
void
DOMMediaStream::AddTrack(MediaStreamTrack& aTrack)
{
- nsRefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
+ RefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
MOZ_ASSERT(dest);
if (!dest) {
return;
}
LOG(LogLevel::Info, ("DOMMediaStream %p Adding track %p (from stream %p with ID %d)",
this, &aTrack, aTrack.GetStream(), aTrack.GetTrackID()));
if (HasTrack(aTrack)) {
LOG(LogLevel::Debug, ("DOMMediaStream %p already contains track %p", this, &aTrack));
return;
}
- nsRefPtr<DOMMediaStream> addedDOMStream = aTrack.GetStream();
+ RefPtr<DOMMediaStream> addedDOMStream = aTrack.GetStream();
MOZ_RELEASE_ASSERT(addedDOMStream);
- nsRefPtr<MediaStream> owningStream = addedDOMStream->GetOwnedStream();
+ RefPtr<MediaStream> owningStream = addedDOMStream->GetOwnedStream();
MOZ_RELEASE_ASSERT(owningStream);
CombineWithPrincipal(addedDOMStream->mPrincipal);
// Hook up the underlying track with our underlying playback stream.
nsRefPtr<MediaInputPort> inputPort =
GetPlaybackStream()->AllocateInputPort(owningStream, aTrack.GetTrackID());
nsRefPtr<TrackPort> trackPort =
--- a/dom/media/MediaDecoderReader.cpp
+++ b/dom/media/MediaDecoderReader.cpp
@@ -334,17 +334,17 @@ MediaDecoderReader::RequestVideoData(boo
!VideoQueue().IsFinished()) {
if (!DecodeVideoFrame(skip, aTimeThreshold)) {
VideoQueue().Finish();
} else if (skip) {
// We still need to decode more data in order to skip to the next
// keyframe. Post another task to the decode task queue to decode
// again. We don't just decode straight in a loop here, as that
// would hog the decode task queue.
- nsRefPtr<nsIRunnable> task(new ReRequestVideoWithSkipTask(this, aTimeThreshold));
+ RefPtr<nsIRunnable> task(new ReRequestVideoWithSkipTask(this, aTimeThreshold));
mTaskQueue->Dispatch(task.forget());
return p;
}
}
if (VideoQueue().GetSize() > 0) {
nsRefPtr<VideoData> v = VideoQueue().PopFront();
if (v && mVideoDiscontinuity) {
v->mDiscontinuity = true;
@@ -370,17 +370,17 @@ MediaDecoderReader::RequestAudioData()
AudioQueue().Finish();
break;
}
// AudioQueue size is still zero, post a task to try again. Don't spin
// waiting in this while loop since it somehow prevents audio EOS from
// coming in gstreamer 1.x when there is still video buffer waiting to be
// consumed. (|mVideoSinkBufferCount| > 0)
if (AudioQueue().GetSize() == 0 && mTaskQueue) {
- nsRefPtr<nsIRunnable> task(new ReRequestAudioTask(this));
+ RefPtr<nsIRunnable> task(new ReRequestAudioTask(this));
mTaskQueue->Dispatch(task.forget());
return p;
}
}
if (AudioQueue().GetSize() > 0) {
nsRefPtr<AudioData> a = AudioQueue().PopFront();
if (mAudioDiscontinuity) {
a->mDiscontinuity = true;
--- a/dom/media/MediaDecoderReader.h
+++ b/dom/media/MediaDecoderReader.h
@@ -263,17 +263,17 @@ public:
// In situations where these notifications come from stochastic network
// activity, we can save significant recomputation by throttling the delivery
// of these updates to the reader implementation. We don't want to do this
// throttling when the update comes from MSE code, since that code needs the
// updates to be observable immediately, and is generally less
// trigger-happy with notifications anyway.
void DispatchNotifyDataArrived(uint32_t aLength, int64_t aOffset, bool aThrottleUpdates)
{
- nsRefPtr<nsRunnable> r =
+ RefPtr<nsRunnable> r =
NS_NewRunnableMethodWithArg<media::Interval<int64_t>>(this, aThrottleUpdates ? &MediaDecoderReader::ThrottledNotifyDataArrived
: &MediaDecoderReader::NotifyDataArrived,
media::Interval<int64_t>(aOffset, aOffset + aLength));
OwnerThread()->Dispatch(r.forget(), AbstractThread::DontAssertDispatchSuccess);
}
// Notify the reader that data from the resource was evicted (MediaSource only)
virtual void NotifyDataRemoved() {}
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -2220,17 +2220,17 @@ MediaDecoderStateMachine::FinishShutdown
// state machine is destroyed on the main thread, and so the
// event runner running this function (which holds a reference to the
// state machine) needs to finish and be released in order to allow
// that. So we dispatch an event to run after this event runner has
// finished and released its monitor/references. That event then will
// dispatch an event to the main thread to release the decoder and
// state machine.
DECODER_LOG("Shutting down state machine task queue");
- nsRefPtr<DecoderDisposer> disposer = new DecoderDisposer(mDecoder, this);
+ RefPtr<DecoderDisposer> disposer = new DecoderDisposer(mDecoder, this);
OwnerThread()->BeginShutdown()->Then(AbstractThread::MainThread(), __func__,
disposer.get(),
&DecoderDisposer::OnTaskQueueShutdown,
&DecoderDisposer::OnTaskQueueShutdown);
}
nsresult MediaDecoderStateMachine::RunStateMachine()
{
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -845,17 +845,17 @@ MediaFormatReader::ScheduleUpdate(TrackT
return;
}
auto& decoder = GetDecoderData(aTrack);
if (decoder.mUpdateScheduled) {
return;
}
LOGV("SchedulingUpdate(%s)", TrackTypeToStr(aTrack));
decoder.mUpdateScheduled = true;
- nsRefPtr<nsIRunnable> task(
+ RefPtr<nsIRunnable> task(
NS_NewRunnableMethodWithArg<TrackType>(this, &MediaFormatReader::Update, aTrack));
OwnerThread()->Dispatch(task.forget());
}
bool
MediaFormatReader::UpdateReceivedNewData(TrackType aTrack)
{
MOZ_ASSERT(OnTaskQueue());
@@ -1279,44 +1279,44 @@ MediaFormatReader::Output(TrackType aTra
aSample->mKeyframe, aSample->mDuration);
if (!aSample) {
NS_WARNING("MediaFormatReader::Output() passed a null sample");
Error(aTrack);
return;
}
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArgs<TrackType, MediaData*>(
this, &MediaFormatReader::NotifyNewOutput, aTrack, aSample);
OwnerThread()->Dispatch(task.forget());
}
void
MediaFormatReader::DrainComplete(TrackType aTrack)
{
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyDrainComplete, aTrack);
OwnerThread()->Dispatch(task.forget());
}
void
MediaFormatReader::InputExhausted(TrackType aTrack)
{
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyInputExhausted, aTrack);
OwnerThread()->Dispatch(task.forget());
}
void
MediaFormatReader::Error(TrackType aTrack)
{
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyError, aTrack);
OwnerThread()->Dispatch(task.forget());
}
void
MediaFormatReader::Flush(TrackType aTrack)
{
@@ -1420,17 +1420,17 @@ MediaFormatReader::Seek(int64_t aTime, i
return SeekPromise::CreateAndReject(NS_ERROR_FAILURE, __func__);
}
mOriginalSeekTime = Some(media::TimeUnit::FromMicroseconds(aTime));
mPendingSeekTime = mOriginalSeekTime;
nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
- nsRefPtr<nsIRunnable> task(
+ RefPtr<nsIRunnable> task(
NS_NewRunnableMethod(this, &MediaFormatReader::AttemptSeek));
OwnerThread()->Dispatch(task.forget());
return p;
}
void
MediaFormatReader::AttemptSeek()
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -2561,30 +2561,30 @@ MediaManager::Observe(nsISupports* aSubj
// must explicitly do this before dispatching the reply, since the reply may kill us with Stop()
mBackend = nullptr; // last reference, will invoke Shutdown() again
if (NS_FAILED(NS_DispatchToMainThread(mReply.forget()))) {
LOG(("Will leak thread: DispatchToMainthread of reply runnable failed in MediaManager shutdown"));
}
}
nsRefPtr<nsRunnable> mReply;
- nsRefPtr<MediaEngine> mBackend;
+ RefPtr<MediaEngine> mBackend;
};
// Post ShutdownTask to execute on mMediaThread and pass in a lambda
// callback to be executed back on this thread once it is done.
//
// The lambda callback "captures" the 'this' pointer for member access.
// This is safe since this is guaranteed to be here since sSingleton isn't
// cleared until the lambda function clears it.
// note that this == sSingleton
nsRefPtr<MediaManager> that(sSingleton);
// Release the backend (and call Shutdown()) from within the MediaManager thread
- nsRefPtr<MediaEngine> temp;
+ RefPtr<MediaEngine> temp;
{
MutexAutoLock lock(mMutex);
temp = mBackend.forget();
}
// Don't use MediaManager::PostTask() because we're sInShutdown=true here!
mMediaThread->message_loop()->PostTask(FROM_HERE, new ShutdownTask(
temp.forget(),
media::NewRunnableFrom([this, that]() mutable {
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -523,17 +523,17 @@ private:
nsClassHashtable<nsStringHashKey, GetUserMediaTask> mActiveCallbacks;
nsClassHashtable<nsUint64HashKey, nsTArray<nsString>> mCallIds;
// Always exists
nsAutoPtr<base::Thread> mMediaThread;
Mutex mMutex;
// protected with mMutex:
- nsRefPtr<MediaEngine> mBackend;
+ RefPtr<MediaEngine> mBackend;
static StaticRefPtr<MediaManager> sSingleton;
media::CoatCheck<PledgeSourceSet> mOutstandingPledges;
media::CoatCheck<GetUserMediaCallbackMediaStreamListener::PledgeVoid> mOutstandingVoidPledges;
#if defined(MOZ_B2G_CAMERA) && defined(MOZ_WIDGET_GONK)
nsRefPtr<nsDOMCameraManager> mCameraManager;
#endif
--- a/dom/media/MediaResource.cpp
+++ b/dom/media/MediaResource.cpp
@@ -1674,27 +1674,27 @@ public:
NS_IMETHOD Run() {
mDecoder->NotifyBytesConsumed(mNumBytes, mOffset);
// Drop ref to decoder on main thread, just in case this reference
// ends up being the last owning reference somehow.
mDecoder = nullptr;
return NS_OK;
}
- nsRefPtr<MediaDecoder> mDecoder;
+ RefPtr<MediaDecoder> mDecoder;
int64_t mNumBytes;
int64_t mOffset;
};
void BaseMediaResource::DispatchBytesConsumed(int64_t aNumBytes, int64_t aOffset)
{
if (aNumBytes <= 0) {
return;
}
- nsRefPtr<nsIRunnable> event(new DispatchBytesConsumedEvent(mDecoder, aNumBytes, aOffset));
+ RefPtr<nsIRunnable> event(new DispatchBytesConsumedEvent(mDecoder, aNumBytes, aOffset));
NS_DispatchToMainThread(event);
}
nsresult
MediaResourceIndex::Read(char* aBuffer, uint32_t aCount, uint32_t* aBytes)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
--- a/dom/media/MediaShutdownManager.h
+++ b/dom/media/MediaShutdownManager.h
@@ -4,17 +4,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(MediaShutdownManager_h_)
#define MediaShutdownManager_h_
#include "nsIObserver.h"
#include "mozilla/Monitor.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/StaticPtr.h"
#include "nsIThread.h"
#include "nsCOMPtr.h"
#include "nsTHashtable.h"
#include "nsHashKeys.h"
namespace mozilla {
--- a/dom/media/MediaTimer.cpp
+++ b/dom/media/MediaTimer.cpp
@@ -6,17 +6,17 @@
#include "MediaTimer.h"
#include <math.h>
#include "nsComponentManagerUtils.h"
#include "nsThreadUtils.h"
#include "mozilla/DebugOnly.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/SharedThreadPool.h"
namespace mozilla {
NS_IMPL_ADDREF(MediaTimer)
NS_IMPL_RELEASE_WITH_DESTROY(MediaTimer, DispatchDestroy())
MediaTimer::MediaTimer()
@@ -24,17 +24,17 @@ MediaTimer::MediaTimer()
, mTimer(do_CreateInstance("@mozilla.org/timer;1"))
, mCreationTimeStamp(TimeStamp::Now())
, mUpdateScheduled(false)
{
TIMER_LOG("MediaTimer::MediaTimer");
// Use the SharedThreadPool to create an nsIThreadPool with a maximum of one
// thread, which is equivalent to an nsIThread for our purposes.
- nsRefPtr<SharedThreadPool> threadPool(
+ RefPtr<SharedThreadPool> threadPool(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MediaTimer"), 1));
mThread = threadPool.get();
mTimer->SetTarget(mThread);
}
void
MediaTimer::DispatchDestroy()
{
--- a/dom/media/VideoUtils.h
+++ b/dom/media/VideoUtils.h
@@ -7,17 +7,17 @@
#ifndef VideoUtils_h
#define VideoUtils_h
#include "FlushableTaskQueue.h"
#include "mozilla/Attributes.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/MozPromise.h"
#include "mozilla/ReentrantMonitor.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsIThread.h"
#include "nsSize.h"
#include "nsRect.h"
#include "nsThreadUtils.h"
#include "prtime.h"
#include "AudioSampleFormat.h"
--- a/dom/media/directshow/AudioSinkFilter.cpp
+++ b/dom/media/directshow/AudioSinkFilter.cpp
@@ -138,17 +138,17 @@ AudioSinkFilter::GetSampleSink()
//
// Calls to IMediaSeeking are forwarded to the output pin that the
// AudioSinkInputPin is connected to, i.e. upstream towards the parser and
// source filters, which actually implement seeking.
#define ENSURE_CONNECTED_PIN_SEEKING \
if (!mInputPin) { \
return E_NOTIMPL; \
} \
- nsRefPtr<IMediaSeeking> pinSeeking = mInputPin->GetConnectedPinSeeking(); \
+ RefPtr<IMediaSeeking> pinSeeking = mInputPin->GetConnectedPinSeeking(); \
if (!pinSeeking) { \
return E_NOTIMPL; \
}
HRESULT
AudioSinkFilter::GetCapabilities(DWORD* aCapabilities)
{
ENSURE_CONNECTED_PIN_SEEKING
--- a/dom/media/directshow/AudioSinkFilter.h
+++ b/dom/media/directshow/AudioSinkFilter.h
@@ -5,17 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(AudioSinkFilter_h_)
#define AudioSinkFilter_h_
#include "BaseFilter.h"
#include "DirectShowUtils.h"
#include "nsAutoPtr.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
namespace mozilla {
class AudioSinkInputPin;
class SampleSink;
// Filter that acts as the end of the graph. Audio samples input into
// this filter block the calling thread, and the calling thread is
--- a/dom/media/directshow/AudioSinkInputPin.cpp
+++ b/dom/media/directshow/AudioSinkInputPin.cpp
@@ -129,21 +129,21 @@ AudioSinkInputPin::Receive(IMediaSample*
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
}
return S_OK;
}
already_AddRefed<IMediaSeeking>
AudioSinkInputPin::GetConnectedPinSeeking()
{
- nsRefPtr<IPin> peer = GetConnected();
+ RefPtr<IPin> peer = GetConnected();
if (!peer)
return nullptr;
- nsRefPtr<IMediaSeeking> seeking;
- peer->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(seeking)));
+ RefPtr<IMediaSeeking> seeking;
+ peer->QueryInterface(static_cast<IMediaSeeking**>(byRef(seeking)));
return seeking.forget();
}
HRESULT
AudioSinkInputPin::BeginFlush()
{
HRESULT hr = media::BaseInputPin::BeginFlush();
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
--- a/dom/media/directshow/AudioSinkInputPin.h
+++ b/dom/media/directshow/AudioSinkInputPin.h
@@ -4,17 +4,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(AudioSinkInputPin_h_)
#define AudioSinkInputPin_h_
#include "BaseInputPin.h"
#include "DirectShowUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsAutoPtr.h"
namespace mozilla {
namespace media {
class MediaType;
}
--- a/dom/media/directshow/DirectShowReader.cpp
+++ b/dom/media/directshow/DirectShowReader.cpp
@@ -1,17 +1,17 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "DirectShowReader.h"
#include "MediaDecoderReader.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "DirectShowUtils.h"
#include "AudioSinkFilter.h"
#include "SourceFilter.h"
#include "SampleSink.h"
#include "MediaResource.h"
#include "VideoUtils.h"
using namespace mozilla::media;
@@ -105,36 +105,36 @@ DirectShowReader::ReadMetadata(MediaInfo
nsresult rv;
// Create the filter graph, reference it by the GraphBuilder interface,
// to make graph building more convenient.
hr = CoCreateInstance(CLSID_FilterGraph,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
- reinterpret_cast<void**>(static_cast<IGraphBuilder**>(getter_AddRefs(mGraph))));
+ reinterpret_cast<void**>(static_cast<IGraphBuilder**>(byRef(mGraph))));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mGraph, NS_ERROR_FAILURE);
rv = ParseMP3Headers(&mMP3FrameParser, mDecoder->GetResource());
NS_ENSURE_SUCCESS(rv, rv);
#ifdef DEBUG
// Add the graph to the Running Object Table so that we can connect
// to this graph with GraphEdit/GraphStudio. Note: on Vista and up you must
// also regsvr32 proppage.dll from the Windows SDK.
// See: http://msdn.microsoft.com/en-us/library/ms787252(VS.85).aspx
hr = AddGraphToRunningObjectTable(mGraph, &mRotRegister);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
#endif
// Extract the interface pointers we'll need from the filter graph.
- hr = mGraph->QueryInterface(static_cast<IMediaControl**>(getter_AddRefs(mControl)));
+ hr = mGraph->QueryInterface(static_cast<IMediaControl**>(byRef(mControl)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mControl, NS_ERROR_FAILURE);
- hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(mMediaSeeking)));
+ hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(byRef(mMediaSeeking)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mMediaSeeking, NS_ERROR_FAILURE);
// Build the graph. Create the filters we need, and connect them. We
// build the entire graph ourselves to prevent other decoders installed
// on the system being created and used.
// Our source filters, wraps the MediaResource.
mSourceFilter = new SourceFilter(MEDIATYPE_Stream, MEDIASUBTYPE_MPEG1Audio);
@@ -142,36 +142,36 @@ DirectShowReader::ReadMetadata(MediaInfo
rv = mSourceFilter->Init(mDecoder->GetResource(), mMP3FrameParser.GetMP3Offset());
NS_ENSURE_SUCCESS(rv, rv);
hr = mGraph->AddFilter(mSourceFilter, L"MozillaDirectShowSource");
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
// The MPEG demuxer.
- nsRefPtr<IBaseFilter> demuxer;
+ RefPtr<IBaseFilter> demuxer;
hr = CreateAndAddFilter(mGraph,
CLSID_MPEG1Splitter,
L"MPEG1Splitter",
- getter_AddRefs(demuxer));
+ byRef(demuxer));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
// Platform MP3 decoder.
- nsRefPtr<IBaseFilter> decoder;
+ RefPtr<IBaseFilter> decoder;
// Firstly try to create the MP3 decoder filter that ships with WinXP
// directly. This filter doesn't normally exist on later versions of
// Windows.
hr = CreateAndAddFilter(mGraph,
CLSID_MPEG_LAYER_3_DECODER_FILTER,
L"MPEG Layer 3 Decoder",
- getter_AddRefs(decoder));
+ byRef(decoder));
if (FAILED(hr)) {
// Failed to create MP3 decoder filter. Try to instantiate
// the MP3 decoder DMO.
- hr = AddMP3DMOWrapperFilter(mGraph, getter_AddRefs(decoder));
+ hr = AddMP3DMOWrapperFilter(mGraph, byRef(decoder));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
}
// Sink, captures audio samples and inserts them into our pipeline.
static const wchar_t* AudioSinkFilterName = L"MozAudioSinkFilter";
mAudioSinkFilter = new AudioSinkFilter(AudioSinkFilterName, &hr);
NS_ENSURE_TRUE(mAudioSinkFilter && SUCCEEDED(hr), NS_ERROR_FAILURE);
hr = mGraph->AddFilter(mAudioSinkFilter, AudioSinkFilterName);
@@ -239,18 +239,18 @@ UnsignedByteToAudioSample(uint8_t aValue
bool
DirectShowReader::Finish(HRESULT aStatus)
{
MOZ_ASSERT(OnTaskQueue());
LOG("DirectShowReader::Finish(0x%x)", aStatus);
// Notify the filter graph of end of stream.
- nsRefPtr<IMediaEventSink> eventSink;
- HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(getter_AddRefs(eventSink)));
+ RefPtr<IMediaEventSink> eventSink;
+ HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(byRef(eventSink)));
if (SUCCEEDED(hr) && eventSink) {
eventSink->Notify(EC_COMPLETE, aStatus, 0);
}
return false;
}
class DirectShowCopy
{
@@ -303,17 +303,17 @@ DirectShowReader::DecodeAudioData()
SampleSink* sink = mAudioSinkFilter->GetSampleSink();
if (sink->AtEOS()) {
// End of stream.
return Finish(S_OK);
}
// Get the next chunk of audio samples. This blocks until the sample
// arrives, or an error occurs (like the stream is shutdown).
- nsRefPtr<IMediaSample> sample;
+ RefPtr<IMediaSample> sample;
hr = sink->Extract(sample);
if (FAILED(hr) || hr == S_FALSE) {
return Finish(hr);
}
int64_t start = 0, end = 0;
sample->GetMediaTime(&start, &end);
LOG("DirectShowReader::DecodeAudioData [%4.2lf-%4.2lf]",
--- a/dom/media/directshow/DirectShowReader.h
+++ b/dom/media/directshow/DirectShowReader.h
@@ -4,17 +4,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(DirectShowReader_h_)
#define DirectShowReader_h_
#include "windows.h" // HRESULT, DWORD
#include "MediaDecoderReader.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "MP3FrameParser.h"
struct IGraphBuilder;
struct IMediaControl;
struct IMediaSeeking;
namespace mozilla {
@@ -71,26 +71,26 @@ private:
// the code to send to the filter graph. Always returns false, so
// that we can just "return Finish()" from DecodeAudioData().
bool Finish(HRESULT aStatus);
nsresult SeekInternal(int64_t aTime);
// DirectShow filter graph, and associated playback and seeking
// control interfaces.
- nsRefPtr<IGraphBuilder> mGraph;
- nsRefPtr<IMediaControl> mControl;
- nsRefPtr<IMediaSeeking> mMediaSeeking;
+ RefPtr<IGraphBuilder> mGraph;
+ RefPtr<IMediaControl> mControl;
+ RefPtr<IMediaSeeking> mMediaSeeking;
// Wraps the MediaResource, and feeds undecoded data into the filter graph.
- nsRefPtr<SourceFilter> mSourceFilter;
+ RefPtr<SourceFilter> mSourceFilter;
// Sits at the end of the graph, removing decoded samples from the graph.
// The graph will block while this is blocked, i.e. it will pause decoding.
- nsRefPtr<AudioSinkFilter> mAudioSinkFilter;
+ RefPtr<AudioSinkFilter> mAudioSinkFilter;
// Some MP3s are variable bitrate, so DirectShow's duration estimation
// can make its duration estimation based on the wrong bitrate. So we parse
// the MP3 frames to get a more accuate estimate of the duration.
MP3FrameParser mMP3FrameParser;
#ifdef DEBUG
// Used to add/remove the filter graph to the Running Object Table. You can
--- a/dom/media/directshow/DirectShowUtils.cpp
+++ b/dom/media/directshow/DirectShowUtils.cpp
@@ -4,17 +4,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "DirectShowUtils.h"
#include "dmodshow.h"
#include "wmcodecdsp.h"
#include "dmoreg.h"
#include "nsAutoPtr.h"
#include "mozilla/ArrayUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsPrintfCString.h"
#define WARN(...) NS_WARNING(nsPrintfCString(__VA_ARGS__).get())
namespace mozilla {
// Create a table which maps GUIDs to a string representation of the GUID.
// This is useful for debugging purposes, for logging the GUIDs of media types.
@@ -307,18 +307,18 @@ HRESULT
MatchUnconnectedPin(IPin* aPin,
PIN_DIRECTION aPinDir,
bool *aOutMatches)
{
NS_ENSURE_TRUE(aPin, E_POINTER);
NS_ENSURE_TRUE(aOutMatches, E_POINTER);
// Ensure the pin is unconnected.
- nsRefPtr<IPin> peer;
- HRESULT hr = aPin->ConnectedTo(getter_AddRefs(peer));
+ RefPtr<IPin> peer;
+ HRESULT hr = aPin->ConnectedTo(byRef(peer));
if (hr != VFW_E_NOT_CONNECTED) {
*aOutMatches = false;
return hr;
}
// Ensure the pin is of the specified direction.
PIN_DIRECTION pinDir;
hr = aPin->QueryDirection(&pinDir);
@@ -327,43 +327,43 @@ MatchUnconnectedPin(IPin* aPin,
*aOutMatches = (pinDir == aPinDir);
return S_OK;
}
// Return the first unconnected input pin or output pin.
already_AddRefed<IPin>
GetUnconnectedPin(IBaseFilter* aFilter, PIN_DIRECTION aPinDir)
{
- nsRefPtr<IEnumPins> enumPins;
+ RefPtr<IEnumPins> enumPins;
- HRESULT hr = aFilter->EnumPins(getter_AddRefs(enumPins));
+ HRESULT hr = aFilter->EnumPins(byRef(enumPins));
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
// Test each pin to see if it matches the direction we're looking for.
- nsRefPtr<IPin> pin;
- while (S_OK == enumPins->Next(1, getter_AddRefs(pin), nullptr)) {
+ RefPtr<IPin> pin;
+ while (S_OK == enumPins->Next(1, byRef(pin), nullptr)) {
bool matches = FALSE;
if (SUCCEEDED(MatchUnconnectedPin(pin, aPinDir, &matches)) &&
matches) {
return pin.forget();
}
}
return nullptr;
}
HRESULT
ConnectFilters(IGraphBuilder* aGraph,
IBaseFilter* aOutputFilter,
IBaseFilter* aInputFilter)
{
- nsRefPtr<IPin> output = GetUnconnectedPin(aOutputFilter, PINDIR_OUTPUT);
+ RefPtr<IPin> output = GetUnconnectedPin(aOutputFilter, PINDIR_OUTPUT);
NS_ENSURE_TRUE(output, E_FAIL);
- nsRefPtr<IPin> input = GetUnconnectedPin(aInputFilter, PINDIR_INPUT);
+ RefPtr<IPin> input = GetUnconnectedPin(aInputFilter, PINDIR_INPUT);
NS_ENSURE_TRUE(output, E_FAIL);
return aGraph->Connect(output, input);
}
} // namespace mozilla
// avoid redefined macro in unified build
--- a/dom/media/directshow/SampleSink.cpp
+++ b/dom/media/directshow/SampleSink.cpp
@@ -76,17 +76,17 @@ SampleSink::Receive(IMediaSample* aSampl
mSample = aSample;
// Notify the signal, to awaken the consumer thread in WaitForSample()
// if necessary.
mon.NotifyAll();
return S_OK;
}
HRESULT
-SampleSink::Extract(nsRefPtr<IMediaSample>& aOutSample)
+SampleSink::Extract(RefPtr<IMediaSample>& aOutSample)
{
ReentrantMonitorAutoEnter mon(mMonitor);
// Loop until we have a sample, or we should abort.
while (true) {
if (mIsFlushing) {
return S_FALSE;
}
if (mSample) {
--- a/dom/media/directshow/SampleSink.h
+++ b/dom/media/directshow/SampleSink.h
@@ -5,17 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(SampleSink_h_)
#define SampleSink_h_
#include "BaseFilter.h"
#include "DirectShowUtils.h"
#include "nsAutoPtr.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/ReentrantMonitor.h"
namespace mozilla {
class SampleSink {
public:
SampleSink();
virtual ~SampleSink();
@@ -30,17 +30,17 @@ public:
// Called when a sample is delivered by the DirectShow graph to the sink.
// The decode thread retrieves the sample by calling WaitForSample().
// Blocks if there's already a sample waiting to be consumed by the decode
// thread.
HRESULT Receive(IMediaSample* aSample);
// Retrieves a sample from the sample queue, blocking until one becomes
// available, or until an error occurs. Returns S_FALSE on EOS.
- HRESULT Extract(nsRefPtr<IMediaSample>& aOutSample);
+ HRESULT Extract(RefPtr<IMediaSample>& aOutSample);
// Unblocks any threads waiting in GetSample().
// Clears mSample, which unblocks upstream stream.
void Flush();
// Opens up the sink to receive more samples in PutSample().
// Clears EOS flag.
void Reset();
@@ -49,17 +49,17 @@ public:
void SetEOS();
// Returns whether we're at end of stream.
bool AtEOS();
private:
// All data in this class is syncronized by mMonitor.
ReentrantMonitor mMonitor;
- nsRefPtr<IMediaSample> mSample;
+ RefPtr<IMediaSample> mSample;
// Format of the audio stream we're receiving.
WAVEFORMATEX mAudioFormat;
bool mIsFlushing;
bool mAtEOS;
};
--- a/dom/media/directshow/SourceFilter.cpp
+++ b/dom/media/directshow/SourceFilter.cpp
@@ -1,17 +1,17 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SourceFilter.h"
#include "MediaResource.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "DirectShowUtils.h"
#include "MP3FrameParser.h"
#include "mozilla/Logging.h"
#include <algorithm>
using namespace mozilla::media;
namespace mozilla {
@@ -51,17 +51,17 @@ public:
{
MOZ_COUNT_CTOR(ReadRequest);
}
~ReadRequest() {
MOZ_COUNT_DTOR(ReadRequest);
}
- nsRefPtr<IMediaSample> mSample;
+ RefPtr<IMediaSample> mSample;
DWORD_PTR mDwUser;
uint32_t mOffset;
uint32_t mCount;
};
// A wrapper around media resource that presents only a partition of the
// underlying resource to the caller to use. The partition returned is from
// an offset to the end of stream, and this object deals with ensuring
--- a/dom/media/directshow/SourceFilter.h
+++ b/dom/media/directshow/SourceFilter.h
@@ -9,17 +9,17 @@
#include "BaseFilter.h"
#include "BasePin.h"
#include "MediaType.h"
#include "nsDeque.h"
#include "nsAutoPtr.h"
#include "DirectShowUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
namespace mozilla {
class MediaResource;
class OutputPin;
// SourceFilter is an asynchronous DirectShow source filter which
--- a/dom/media/eme/MediaKeySession.cpp
+++ b/dom/media/eme/MediaKeySession.cpp
@@ -394,17 +394,17 @@ MediaKeySession::DispatchKeyMessage(Medi
}
void
MediaKeySession::DispatchKeyError(uint32_t aSystemCode)
{
EME_LOG("MediaKeySession[%p,'%s'] DispatchKeyError() systemCode=%u.",
this, NS_ConvertUTF16toUTF8(mSessionId).get(), aSystemCode);
- nsRefPtr<MediaKeyError> event(new MediaKeyError(this, aSystemCode));
+ RefPtr<MediaKeyError> event(new MediaKeyError(this, aSystemCode));
nsRefPtr<AsyncEventDispatcher> asyncDispatcher =
new AsyncEventDispatcher(this, event);
asyncDispatcher->PostDOMEvent();
}
void
MediaKeySession::DispatchKeyStatusesChange()
{
--- a/dom/media/eme/MediaKeys.h
+++ b/dom/media/eme/MediaKeys.h
@@ -6,17 +6,17 @@
#ifndef mozilla_dom_mediakeys_h__
#define mozilla_dom_mediakeys_h__
#include "nsIDOMMediaError.h"
#include "nsWrapperCache.h"
#include "nsISupports.h"
#include "mozilla/Attributes.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
#include "nsRefPtrHashtable.h"
#include "mozilla/dom/Promise.h"
#include "mozilla/dom/MediaKeysBinding.h"
#include "mozIGeckoMediaPluginService.h"
#include "mozilla/DetailedPromise.h"
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -365,23 +365,23 @@ nsresult VP8TrackEncoder::PrepareRawFram
VP8LOG("Converting an %s frame to I420 failed\n", yuvFormat.c_str());
return NS_ERROR_FAILURE;
}
VP8LOG("Converted an %s frame to I420\n");
} else {
// Not YCbCr at all. Try to get access to the raw data and convert.
- nsRefPtr<SourceSurface> surf = img->GetAsSourceSurface();
+ RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
if (!surf) {
VP8LOG("Getting surface from %s image failed\n", Stringify(format).c_str());
return NS_ERROR_FAILURE;
}
- nsRefPtr<DataSourceSurface> data = surf->GetDataSurface();
+ RefPtr<DataSourceSurface> data = surf->GetDataSurface();
if (!data) {
VP8LOG("Getting data surface from %s image with %s (%s) surface failed\n",
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
Stringify(surf->GetFormat()).c_str());
return NS_ERROR_FAILURE;
}
DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
--- a/dom/media/gmp/GMPAudioDecoderParent.h
+++ b/dom/media/gmp/GMPAudioDecoderParent.h
@@ -1,17 +1,17 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GMPAudioDecoderParent_h_
#define GMPAudioDecoderParent_h_
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "gmp-audio-decode.h"
#include "gmp-audio-codec.h"
#include "mozilla/gmp/PGMPAudioDecoderParent.h"
#include "GMPMessageUtils.h"
#include "GMPAudioDecoderProxy.h"
namespace mozilla {
namespace gmp {
--- a/dom/media/gmp/GMPDecryptorParent.h
+++ b/dom/media/gmp/GMPDecryptorParent.h
@@ -2,17 +2,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GMPDecryptorParent_h_
#define GMPDecryptorParent_h_
#include "mozilla/gmp/PGMPDecryptorParent.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "gmp-decryption.h"
#include "GMPDecryptorProxy.h"
namespace mozilla {
class CryptoSample;
namespace gmp {
--- a/dom/media/gmp/GMPVideoDecoderParent.h
+++ b/dom/media/gmp/GMPVideoDecoderParent.h
@@ -1,17 +1,17 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GMPVideoDecoderParent_h_
#define GMPVideoDecoderParent_h_
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "gmp-video-decode.h"
#include "mozilla/gmp/PGMPVideoDecoderParent.h"
#include "GMPMessageUtils.h"
#include "GMPSharedMemManager.h"
#include "GMPUtils.h"
#include "GMPVideoHost.h"
#include "GMPVideoDecoderProxy.h"
#include "VideoUtils.h"
--- a/dom/media/gmp/GMPVideoEncoderParent.h
+++ b/dom/media/gmp/GMPVideoEncoderParent.h
@@ -1,17 +1,17 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GMPVideoEncoderParent_h_
#define GMPVideoEncoderParent_h_
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "gmp-video-encode.h"
#include "mozilla/gmp/PGMPVideoEncoderParent.h"
#include "GMPMessageUtils.h"
#include "GMPSharedMemManager.h"
#include "GMPUtils.h"
#include "GMPVideoHost.h"
#include "GMPVideoEncoderProxy.h"
--- a/dom/media/mediasink/DecodedAudioDataSink.cpp
+++ b/dom/media/mediasink/DecodedAudioDataSink.cpp
@@ -257,17 +257,17 @@ DecodedAudioDataSink::SetPlaying(bool aP
}
nsresult
DecodedAudioDataSink::InitializeAudioStream()
{
// AudioStream initialization can block for extended periods in unusual
// circumstances, so we take care to drop the decoder monitor while
// initializing.
- nsRefPtr<AudioStream> audioStream(new AudioStream());
+ RefPtr<AudioStream> audioStream(new AudioStream());
nsresult rv = audioStream->Init(mInfo.mChannels, mInfo.mRate, mChannel);
if (NS_FAILED(rv)) {
audioStream->Shutdown();
return rv;
}
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mAudioStream = audioStream;
--- a/dom/media/mediasource/MediaSourceDemuxer.h
+++ b/dom/media/mediasource/MediaSourceDemuxer.h
@@ -63,17 +63,17 @@ private:
TrackInfo* GetTrackInfo(TrackInfo::TrackType);
void DoAttachSourceBuffer(TrackBuffersManager* aSourceBuffer);
void DoDetachSourceBuffer(TrackBuffersManager* aSourceBuffer);
bool OnTaskQueue()
{
return !GetTaskQueue() || GetTaskQueue()->IsCurrentThreadIn();
}
- nsRefPtr<TaskQueue> mTaskQueue;
+ RefPtr<TaskQueue> mTaskQueue;
nsTArray<nsRefPtr<MediaSourceTrackDemuxer>> mDemuxers;
nsTArray<nsRefPtr<TrackBuffersManager>> mSourceBuffers;
MozPromiseHolder<InitPromise> mInitPromise;
// Monitor to protect members below across multiple threads.
mutable Monitor mMonitor;
--- a/dom/media/mediasource/TrackBuffersManager.h
+++ b/dom/media/mediasource/TrackBuffersManager.h
@@ -326,17 +326,17 @@ private:
// TaskQueue methods and objects.
AbstractThread* GetTaskQueue() {
return mTaskQueue;
}
bool OnTaskQueue()
{
return !GetTaskQueue() || GetTaskQueue()->IsCurrentThreadIn();
}
- nsRefPtr<TaskQueue> mTaskQueue;
+ RefPtr<TaskQueue> mTaskQueue;
media::TimeInterval mAppendWindow;
media::TimeUnit mTimestampOffset;
media::TimeUnit mLastTimestampOffset;
void RestoreCachedVariables();
// Strong references to external objects.
nsRefPtr<dom::SourceBufferAttributes> mSourceBufferAttributes;
--- a/dom/media/omx/MPAPI.h
+++ b/dom/media/omx/MPAPI.h
@@ -36,17 +36,17 @@ struct VideoFrame {
void *mData;
size_t mSize;
int32_t mStride;
int32_t mSliceHeight;
int32_t mRotation;
VideoPlane Y;
VideoPlane Cb;
VideoPlane Cr;
- nsRefPtr<mozilla::layers::TextureClient> mGraphicBuffer;
+ mozilla::RefPtr<mozilla::layers::TextureClient> mGraphicBuffer;
VideoFrame() :
mTimeUs(0),
mKeyFrame(false),
mShouldSkip(false),
mData(nullptr),
mSize(0),
mStride(0),
--- a/dom/media/omx/MediaCodecReader.cpp
+++ b/dom/media/omx/MediaCodecReader.cpp
@@ -313,28 +313,28 @@ MediaCodecReader::Shutdown()
ReleaseResources();
return MediaDecoderReader::Shutdown();
}
void
MediaCodecReader::DispatchAudioTask()
{
if (mAudioTrack.mTaskQueue) {
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this,
&MediaCodecReader::DecodeAudioDataTask);
mAudioTrack.mTaskQueue->Dispatch(task.forget());
}
}
void
MediaCodecReader::DispatchVideoTask(int64_t aTimeThreshold)
{
if (mVideoTrack.mTaskQueue) {
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<int64_t>(this,
&MediaCodecReader::DecodeVideoFrameTask,
aTimeThreshold);
mVideoTrack.mTaskQueue->Dispatch(task.forget());
}
}
nsRefPtr<MediaDecoderReader::AudioDataPromise>
@@ -812,17 +812,17 @@ MediaCodecReader::TextureClientRecycleCa
FenceHandle handle = aClient->GetAndResetReleaseFenceHandle();
mPendingReleaseItems.AppendElement(ReleaseItem(index, handle));
mTextureClientIndexes.Remove(aClient);
}
if (mVideoTrack.mReleaseBufferTaskQueue->IsEmpty()) {
- nsRefPtr<nsIRunnable> task =
+ RefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this,
&MediaCodecReader::WaitFenceAndReleaseOutputBuffer);
mVideoTrack.mReleaseBufferTaskQueue->Dispatch(task.forget());
}
}
void
MediaCodecReader::WaitFenceAndReleaseOutputBuffer()
@@ -915,17 +915,17 @@ MediaCodecReader::DecodeVideoFrameSync(i
if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
(status == ERROR_END_OF_STREAM)) {
VideoQueue().Finish();
mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
return;
}
nsRefPtr<VideoData> v;
- nsRefPtr<TextureClient> textureClient;
+ RefPtr<TextureClient> textureClient;
sp<GraphicBuffer> graphicBuffer;
if (bufferInfo.mBuffer != nullptr) {
MOZ_ASSERT(mStreamSource);
// This is the approximate byte position in the stream.
int64_t pos = mStreamSource->Tell();
if (mVideoTrack.mNativeWindow != nullptr &&
mVideoTrack.mCodec->getOutputGraphicBufferFromIndex(bufferInfo.mIndex, &graphicBuffer) == OK &&
--- a/dom/media/omx/OmxDecoder.cpp
+++ b/dom/media/omx/OmxDecoder.cpp
@@ -626,17 +626,17 @@ bool OmxDecoder::ReadVideo(VideoFrame *a
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
unreadable = 0;
}
- nsRefPtr<mozilla::layers::TextureClient> textureClient;
+ RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
if (textureClient) {
// Manually increment reference count to keep MediaBuffer alive
// during TextureClient is in use.
mVideoBuffer->add_ref();
--- a/dom/media/platforms/PlatformDecoderModule.h
+++ b/dom/media/platforms/PlatformDecoderModule.h
@@ -7,17 +7,17 @@
#if !defined(PlatformDecoderModule_h_)
#define PlatformDecoderModule_h_
#include "FlushableTaskQueue.h"
#include "MediaDecoderReader.h"
#include "mozilla/MozPromise.h"
#include "mozilla/layers/LayersTypes.h"
#include "nsTArray.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include <queue>
namespace mozilla {
class TrackInfo;
class AudioInfo;
class VideoInfo;
class MediaRawData;
--- a/dom/media/platforms/agnostic/BlankDecoderModule.cpp
+++ b/dom/media/platforms/agnostic/BlankDecoderModule.cpp
@@ -2,17 +2,17 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaDecoderReader.h"
#include "PlatformDecoderModule.h"
#include "nsRect.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/CheckedInt.h"
#include "VideoUtils.h"
#include "ImageContainer.h"
#include "MediaInfo.h"
#include "mozilla/TaskQueue.h"
#include "TimeUnits.h"
namespace mozilla {
@@ -67,34 +67,34 @@ public:
MediaDataDecoderCallback* mCallback;
};
nsresult Input(MediaRawData* aSample) override
{
// The MediaDataDecoder must delete the sample when we're finished
// with it, so the OutputEvent stores it in an nsAutoPtr and deletes
// it once it's run.
- nsRefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
+ RefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
mTaskQueue->Dispatch(r.forget());
return NS_OK;
}
nsresult Flush() override {
mTaskQueue->Flush();
return NS_OK;
}
nsresult Drain() override {
mCallback->DrainComplete();
return NS_OK;
}
private:
nsAutoPtr<BlankMediaDataCreator> mCreator;
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
TrackInfo::TrackType mType;
};
class BlankVideoDataCreator {
public:
BlankVideoDataCreator(uint32_t aFrameWidth,
uint32_t aFrameHeight,
@@ -153,17 +153,17 @@ public:
aDTS.ToMicroseconds(),
mPicture);
}
private:
VideoInfo mInfo;
gfx::IntRect mPicture;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
- nsRefPtr<layers::ImageContainer> mImageContainer;
+ RefPtr<layers::ImageContainer> mImageContainer;
};
class BlankAudioDataCreator {
public:
BlankAudioDataCreator(uint32_t aChannelCount, uint32_t aSampleRate)
: mFrameSum(0), mChannelCount(aChannelCount), mSampleRate(aSampleRate)
{
--- a/dom/media/platforms/agnostic/OpusDecoder.cpp
+++ b/dom/media/platforms/agnostic/OpusDecoder.cpp
@@ -275,17 +275,17 @@ void
OpusDataDecoder::DoDrain()
{
mCallback->DrainComplete();
}
nsresult
OpusDataDecoder::Drain()
{
- nsRefPtr<nsIRunnable> runnable(
+ RefPtr<nsIRunnable> runnable(
NS_NewRunnableMethod(this, &OpusDataDecoder::DoDrain));
mTaskQueue->Dispatch(runnable.forget());
return NS_OK;
}
nsresult
OpusDataDecoder::Flush()
{
--- a/dom/media/platforms/agnostic/OpusDecoder.h
+++ b/dom/media/platforms/agnostic/OpusDecoder.h
@@ -33,17 +33,17 @@ public:
private:
nsresult DecodeHeader(const unsigned char* aData, size_t aLength);
void Decode (MediaRawData* aSample);
int DoDecode (MediaRawData* aSample);
void DoDrain ();
const AudioInfo& mInfo;
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// Opus decoder state
nsAutoPtr<OpusParser> mOpusParser;
OpusMSDecoder* mOpusDecoder;
uint16_t mSkip; // Samples left to trim before playback.
bool mDecodedHeader;
--- a/dom/media/platforms/agnostic/VPXDecoder.h
+++ b/dom/media/platforms/agnostic/VPXDecoder.h
@@ -44,17 +44,17 @@ public:
private:
void DecodeFrame (MediaRawData* aSample);
int DoDecodeFrame (MediaRawData* aSample);
void DoDrain ();
void OutputDelayedFrames ();
nsRefPtr<ImageContainer> mImageContainer;
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// VPx decoder state
vpx_codec_ctx_t mVPX;
const VideoInfo& mInfo;
int mCodec;
--- a/dom/media/platforms/agnostic/VorbisDecoder.h
+++ b/dom/media/platforms/agnostic/VorbisDecoder.h
@@ -36,17 +36,17 @@ public:
private:
nsresult DecodeHeader(const unsigned char* aData, size_t aLength);
void Decode (MediaRawData* aSample);
int DoDecode (MediaRawData* aSample);
void DoDrain ();
const AudioInfo& mInfo;
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// Vorbis decoder state
vorbis_info mVorbisInfo;
vorbis_comment mVorbisComment;
vorbis_dsp_state mVorbisDsp;
vorbis_block mVorbisBlock;
--- a/dom/media/platforms/agnostic/eme/SamplesWaitingForKey.cpp
+++ b/dom/media/platforms/agnostic/eme/SamplesWaitingForKey.cpp
@@ -46,17 +46,17 @@ SamplesWaitingForKey::WaitIfKeyNotUsable
void
SamplesWaitingForKey::NotifyUsable(const CencKeyId& aKeyId)
{
MutexAutoLock lock(mMutex);
size_t i = 0;
while (i < mSamples.Length()) {
if (aKeyId == mSamples[i]->mCrypto.mKeyId) {
- nsRefPtr<nsIRunnable> task;
+ RefPtr<nsIRunnable> task;
task = NS_NewRunnableMethodWithArg<nsRefPtr<MediaRawData>>(mDecoder,
&MediaDataDecoder::Input,
nsRefPtr<MediaRawData>(mSamples[i]));
mSamples.RemoveElementAt(i);
mTaskQueue->Dispatch(task.forget());
} else {
i++;
}
--- a/dom/media/platforms/android/AndroidDecoderModule.cpp
+++ b/dom/media/platforms/android/AndroidDecoderModule.cpp
@@ -203,17 +203,17 @@ protected:
}
mGLContext = GLContextProvider::CreateHeadless(CreateContextFlags::NONE);
return mGLContext;
}
layers::ImageContainer* mImageContainer;
const VideoInfo& mConfig;
- nsRefPtr<AndroidSurfaceTexture> mSurfaceTexture;
+ RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
nsRefPtr<GLContext> mGLContext;
};
class AudioDataDecoder : public MediaCodecDataDecoder {
public:
AudioDataDecoder(const AudioInfo& aConfig, MediaFormat::Param aFormat, MediaDataDecoderCallback* aCallback)
: MediaCodecDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType, aFormat, aCallback)
--- a/dom/media/platforms/gonk/GonkAudioDecoderManager.h
+++ b/dom/media/platforms/gonk/GonkAudioDecoderManager.h
@@ -2,17 +2,17 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GonkAudioDecoderManager_h_)
#define GonkAudioDecoderManager_h_
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "GonkMediaDataDecoder.h"
using namespace android;
namespace android {
struct MOZ_EXPORT ALooper;
class MOZ_EXPORT MediaBuffer;
} // namespace android
--- a/dom/media/platforms/gonk/GonkMediaDataDecoder.h
+++ b/dom/media/platforms/gonk/GonkMediaDataDecoder.h
@@ -93,17 +93,17 @@ private:
// Called on the task queue. Extracts output if available, and delivers
// it to the reader. Called after ProcessDecode() and ProcessDrain().
void ProcessOutput();
// Called on the task queue. Orders the Gonk to drain, and then extracts
// all available output.
void ProcessDrain();
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
nsAutoPtr<GonkDecoderManager> mManager;
// The last offset into the media resource that was passed into Input().
// This is used to approximate the decoder's position in the media resource.
int64_t mLastStreamOffset;
// Set it ture when there is no input data
--- a/dom/media/platforms/gonk/GonkVideoDecoderManager.cpp
+++ b/dom/media/platforms/gonk/GonkVideoDecoderManager.cpp
@@ -223,17 +223,17 @@ GonkVideoDecoderManager::CreateVideoData
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
}
- nsRefPtr<mozilla::layers::TextureClient> textureClient;
+ RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
--- a/dom/media/platforms/gonk/GonkVideoDecoderManager.h
+++ b/dom/media/platforms/gonk/GonkVideoDecoderManager.h
@@ -5,17 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GonkVideoDecoderManager_h_)
#define GonkVideoDecoderManager_h_
#include <set>
#include "nsRect.h"
#include "GonkMediaDataDecoder.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "I420ColorConverterHelper.h"
#include "MediaCodecProxy.h"
#include <stagefright/foundation/AHandler.h>
#include "GonkNativeWindow.h"
#include "GonkNativeWindowClient.h"
#include "mozilla/layers/FenceUtils.h"
#include <ui/Fence.h>
--- a/dom/media/platforms/wmf/DXVA2Manager.cpp
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -93,17 +93,17 @@ public:
Image** aOutImage) override;
bool SupportsConfig(IMFMediaType* aType, float aFramerate) override;
private:
nsRefPtr<IDirect3D9Ex> mD3D9;
nsRefPtr<IDirect3DDevice9Ex> mDevice;
nsRefPtr<IDirect3DDeviceManager9> mDeviceManager;
- nsRefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
+ RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
nsRefPtr<IDirectXVideoDecoderService> mDecoderService;
GUID mDecoderGUID;
UINT32 mResetToken;
bool mFirstFrame;
bool mIsAMDPreUVD4;
};
void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType *pType,
@@ -485,36 +485,36 @@ public:
bool IsD3D11() override { return true; }
bool SupportsConfig(IMFMediaType* aType, float aFramerate) override;
private:
HRESULT CreateFormatConverter();
- HRESULT CreateOutputSample(nsRefPtr<IMFSample>& aSample,
+ HRESULT CreateOutputSample(RefPtr<IMFSample>& aSample,
ID3D11Texture2D* aTexture);
- nsRefPtr<ID3D11Device> mDevice;
- nsRefPtr<ID3D11DeviceContext> mContext;
- nsRefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
- nsRefPtr<MFTDecoder> mTransform;
- nsRefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
+ RefPtr<ID3D11Device> mDevice;
+ RefPtr<ID3D11DeviceContext> mContext;
+ RefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
+ RefPtr<MFTDecoder> mTransform;
+ RefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
GUID mDecoderGUID;
uint32_t mWidth;
uint32_t mHeight;
UINT mDeviceManagerToken;
bool mIsAMDPreUVD4;
};
bool
D3D11DXVA2Manager::SupportsConfig(IMFMediaType* aType, float aFramerate)
{
- nsRefPtr<ID3D11VideoDevice> videoDevice;
- HRESULT hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ HRESULT hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(byRef(videoDevice)));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
D3D11_VIDEO_DECODER_DESC desc;
desc.Guid = mDecoderGUID;
UINT32 width = 0;
UINT32 height = 0;
hr = MFGetAttributeSize(aType, MF_MT_FRAME_SIZE, &width, &height);
@@ -576,23 +576,23 @@ D3D11DXVA2Manager::Init(nsACString& aFai
HRESULT hr;
mDevice = gfxWindowsPlatform::GetPlatform()->CreateD3D11DecoderDevice();
if (!mDevice) {
aFailureReason.AssignLiteral("Failed to create D3D11 device for decoder");
return E_FAIL;
}
- mDevice->GetImmediateContext(getter_AddRefs(mContext));
+ mDevice->GetImmediateContext(byRef(mContext));
if (!mContext) {
aFailureReason.AssignLiteral("Failed to get immediate context for d3d11 device");
return E_FAIL;
}
- hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, getter_AddRefs(mDXGIDeviceManager));
+ hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, byRef(mDXGIDeviceManager));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("MFCreateDXGIDeviceManager failed with code %X", hr);
return hr;
}
hr = mDXGIDeviceManager->ResetDevice(mDevice, mDeviceManagerToken);
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("IMFDXGIDeviceManager::ResetDevice failed with code %X", hr);
@@ -607,18 +607,18 @@ D3D11DXVA2Manager::Init(nsACString& aFai
}
hr = mTransform->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, ULONG_PTR(mDXGIDeviceManager.get()));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("MFTDecoder::SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER) failed with code %X", hr);
return hr;
}
- nsRefPtr<ID3D11VideoDevice> videoDevice;
- hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(byRef(videoDevice)));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("QI to ID3D11VideoDevice failed with code %X", hr);
return hr;
}
bool found = false;
UINT profileCount = videoDevice->GetVideoDecoderProfileCount();
for (UINT i = 0; i < profileCount; i++) {
@@ -641,18 +641,18 @@ D3D11DXVA2Manager::Init(nsACString& aFai
aFailureReason = nsPrintfCString("CheckVideoDecoderFormat failed with code %X", hr);
return hr;
}
if (!nv12Support) {
aFailureReason.AssignLiteral("Decoder doesn't support NV12 surfaces");
return E_FAIL;
}
- nsRefPtr<IDXGIDevice> dxgiDevice;
- hr = mDevice->QueryInterface(static_cast<IDXGIDevice**>(getter_AddRefs(dxgiDevice)));
+ RefPtr<IDXGIDevice> dxgiDevice;
+ hr = mDevice->QueryInterface(static_cast<IDXGIDevice**>(byRef(dxgiDevice)));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("QI to IDXGIDevice failed with code %X", hr);
return hr;
}
nsRefPtr<IDXGIAdapter> adapter;
hr = dxgiDevice->GetAdapter(adapter.StartAssignment());
if (!SUCCEEDED(hr)) {
@@ -679,24 +679,24 @@ D3D11DXVA2Manager::Init(nsACString& aFai
mTextureClientAllocator = new D3D11RecycleAllocator(layers::ImageBridgeChild::GetSingleton(),
mDevice);
mTextureClientAllocator->SetMaxPoolSize(5);
return S_OK;
}
HRESULT
-D3D11DXVA2Manager::CreateOutputSample(nsRefPtr<IMFSample>& aSample, ID3D11Texture2D* aTexture)
+D3D11DXVA2Manager::CreateOutputSample(RefPtr<IMFSample>& aSample, ID3D11Texture2D* aTexture)
{
- nsRefPtr<IMFSample> sample;
- HRESULT hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ RefPtr<IMFSample> sample;
+ HRESULT hr = wmf::MFCreateSample(byRef(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaBuffer> buffer;
- hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, FALSE, getter_AddRefs(buffer));
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, FALSE, byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
sample->AddBuffer(buffer);
aSample = sample;
return S_OK;
}
@@ -724,23 +724,23 @@ D3D11DXVA2Manager::CopyToImage(IMFSample
HRESULT hr = videoImage->SetData(D3D11ShareHandleImage::Data(mTextureClientAllocator,
gfx::IntSize(mWidth, mHeight),
aRegion));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = mTransform->Input(aVideoSample);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFSample> sample;
- nsRefPtr<ID3D11Texture2D> texture = videoImage->GetTexture();
+ RefPtr<IMFSample> sample;
+ RefPtr<ID3D11Texture2D> texture = videoImage->GetTexture();
hr = CreateOutputSample(sample, texture);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IDXGIKeyedMutex> keyedMutex;
- hr = texture->QueryInterface(static_cast<IDXGIKeyedMutex**>(getter_AddRefs(keyedMutex)));
+ RefPtr<IDXGIKeyedMutex> keyedMutex;
+ hr = texture->QueryInterface(static_cast<IDXGIKeyedMutex**>(byRef(keyedMutex)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && keyedMutex, hr);
hr = keyedMutex->AcquireSync(0, INFINITE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = mTransform->Output(&sample);
keyedMutex->ReleaseSync(0);
@@ -767,45 +767,45 @@ HRESULT ConfigureOutput(IMFMediaType* aO
}
HRESULT
D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
{
mWidth = aWidth;
mHeight = aHeight;
- nsRefPtr<IMFMediaType> inputType;
- HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ RefPtr<IMFMediaType> inputType;
+ HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFAttributes> attr = mTransform->GetAttributes();
+ RefPtr<IMFAttributes> attr = mTransform->GetAttributes();
hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaType> outputType;
- hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(byRef(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
--- a/dom/media/platforms/wmf/MFTDecoder.cpp
+++ b/dom/media/platforms/wmf/MFTDecoder.cpp
@@ -30,17 +30,17 @@ HRESULT
MFTDecoder::Create(const GUID& aMFTClsID)
{
// Create the IMFTransform to do the decoding.
HRESULT hr;
hr = CoCreateInstance(aMFTClsID,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IMFTransform,
- reinterpret_cast<void**>(static_cast<IMFTransform**>(getter_AddRefs(mDecoder))));
+ reinterpret_cast<void**>(static_cast<IMFTransform**>(byRef(mDecoder))));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
}
HRESULT
MFTDecoder::SetMediaTypes(IMFMediaType* aInputType,
IMFMediaType* aOutputType,
@@ -66,33 +66,33 @@ MFTDecoder::SetMediaTypes(IMFMediaType*
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
}
already_AddRefed<IMFAttributes>
MFTDecoder::GetAttributes()
{
- nsRefPtr<IMFAttributes> attr;
- HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr));
+ RefPtr<IMFAttributes> attr;
+ HRESULT hr = mDecoder->GetAttributes(byRef(attr));
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
return attr.forget();
}
HRESULT
MFTDecoder::SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
// Iterate the enumerate the output types, until we find one compatible
// with what we need.
HRESULT hr;
- nsRefPtr<IMFMediaType> outputType;
+ RefPtr<IMFMediaType> outputType;
UINT32 typeIndex = 0;
- while (SUCCEEDED(mDecoder->GetOutputAvailableType(0, typeIndex++, getter_AddRefs(outputType)))) {
+ while (SUCCEEDED(mDecoder->GetOutputAvailableType(0, typeIndex++, byRef(outputType)))) {
BOOL resultMatch;
hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS, &resultMatch);
if (SUCCEEDED(hr) && resultMatch == TRUE) {
if (aCallback) {
hr = aCallback(outputType, aData);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
}
hr = mDecoder->SetOutputType(0, outputType, 0);
@@ -118,29 +118,29 @@ MFTDecoder::SendMFTMessage(MFT_MESSAGE_T
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
}
HRESULT
MFTDecoder::CreateInputSample(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestamp,
- nsRefPtr<IMFSample>* aOutSample)
+ RefPtr<IMFSample>* aOutSample)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
HRESULT hr;
- nsRefPtr<IMFSample> sample;
- hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(byRef(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaBuffer> buffer;
+ RefPtr<IMFMediaBuffer> buffer;
int32_t bufferSize = std::max<uint32_t>(uint32_t(mInputStreamInfo.cbSize), aDataSize);
UINT32 alignment = (mInputStreamInfo.cbAlignment > 1) ? mInputStreamInfo.cbAlignment - 1 : 0;
- hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
DWORD maxLength = 0;
DWORD currentLength = 0;
BYTE* dst = nullptr;
hr = buffer->Lock(&dst, &maxLength, ¤tLength);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@@ -160,50 +160,50 @@ MFTDecoder::CreateInputSample(const uint
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
*aOutSample = sample.forget();
return S_OK;
}
HRESULT
-MFTDecoder::CreateOutputSample(nsRefPtr<IMFSample>* aOutSample)
+MFTDecoder::CreateOutputSample(RefPtr<IMFSample>* aOutSample)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
HRESULT hr;
- nsRefPtr<IMFSample> sample;
- hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(byRef(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaBuffer> buffer;
+ RefPtr<IMFMediaBuffer> buffer;
int32_t bufferSize = mOutputStreamInfo.cbSize;
UINT32 alignment = (mOutputStreamInfo.cbAlignment > 1) ? mOutputStreamInfo.cbAlignment - 1 : 0;
- hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = sample->AddBuffer(buffer);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
*aOutSample = sample.forget();
return S_OK;
}
HRESULT
-MFTDecoder::Output(nsRefPtr<IMFSample>* aOutput)
+MFTDecoder::Output(RefPtr<IMFSample>* aOutput)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
HRESULT hr;
MFT_OUTPUT_DATA_BUFFER output = {0};
bool providedSample = false;
- nsRefPtr<IMFSample> sample;
+ RefPtr<IMFSample> sample;
if (*aOutput) {
output.pSample = *aOutput;
providedSample = true;
} else if (!mMFTProvidesOutputSamples) {
hr = CreateOutputSample(&sample);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
output.pSample = sample;
}
@@ -258,17 +258,17 @@ MFTDecoder::Output(nsRefPtr<IMFSample>*
HRESULT
MFTDecoder::Input(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestamp)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
- nsRefPtr<IMFSample> input;
+ RefPtr<IMFSample> input;
HRESULT hr = CreateInputSample(aData, aDataSize, aTimestamp, &input);
NS_ENSURE_TRUE(SUCCEEDED(hr) && input != nullptr, hr);
return Input(input);
}
HRESULT
MFTDecoder::Input(IMFSample* aSample)
@@ -290,15 +290,15 @@ MFTDecoder::Flush()
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
mDiscontinuity = true;
return S_OK;
}
HRESULT
-MFTDecoder::GetOutputMediaType(nsRefPtr<IMFMediaType>& aMediaType)
+MFTDecoder::GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType)
{
NS_ENSURE_TRUE(mDecoder, E_POINTER);
- return mDecoder->GetOutputCurrentType(0, getter_AddRefs(aMediaType));
+ return mDecoder->GetOutputCurrentType(0, byRef(aMediaType));
}
} // namespace mozilla
--- a/dom/media/platforms/wmf/MFTDecoder.h
+++ b/dom/media/platforms/wmf/MFTDecoder.h
@@ -3,17 +3,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(MFTDecoder_h_)
#define MFTDecoder_h_
#include "WMF.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/ReentrantMonitor.h"
#include "nsIThread.h"
namespace mozilla {
class MFTDecoder final {
~MFTDecoder();
@@ -42,67 +42,67 @@ public:
ConfigureOutputCallback aCallback = nullptr,
void* aData = nullptr);
// Returns the MFT's IMFAttributes object.
already_AddRefed<IMFAttributes> GetAttributes();
// Retrieves the media type being output. This may not be valid until
// the first sample is decoded.
- HRESULT GetOutputMediaType(nsRefPtr<IMFMediaType>& aMediaType);
+ HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
// Submits data into the MFT for processing.
//
// Returns:
// - MF_E_NOTACCEPTING if the decoder can't accept input. The data
// must be resubmitted after Output() stops producing output.
HRESULT Input(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestampUsecs);
HRESULT Input(IMFSample* aSample);
HRESULT CreateInputSample(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestampUsecs,
- nsRefPtr<IMFSample>* aOutSample);
+ RefPtr<IMFSample>* aOutSample);
// Retrieves output from the MFT. Call this once Input() returns
// MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
// decoder MFT in particular) can't handle it if clients hold onto
// references to the output IMFSample, so don't do that.
//
// Returns:
// - MF_E_TRANSFORM_STREAM_CHANGE if the underlying stream output
// type changed. Retrieve the output media type and reconfig client,
// else you may misinterpret the MFT's output.
// - MF_E_TRANSFORM_NEED_MORE_INPUT if no output can be produced
// due to lack of input.
// - S_OK if an output frame is produced.
- HRESULT Output(nsRefPtr<IMFSample>* aOutput);
+ HRESULT Output(RefPtr<IMFSample>* aOutput);
// Sends a flush message to the MFT. This causes it to discard all
// input data. Use before seeking.
HRESULT Flush();
// Sends a message to the MFT.
HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
HRESULT SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData);
private:
- HRESULT CreateOutputSample(nsRefPtr<IMFSample>* aOutSample);
+ HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
MFT_INPUT_STREAM_INFO mInputStreamInfo;
MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
- nsRefPtr<IMFTransform> mDecoder;
+ RefPtr<IMFTransform> mDecoder;
- nsRefPtr<IMFMediaType> mOutputType;
+ RefPtr<IMFMediaType> mOutputType;
// True if the IMFTransform allocates the samples that it returns.
bool mMFTProvidesOutputSamples;
// True if we need to mark the next sample as a discontinuity.
bool mDiscontinuity;
};
--- a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
@@ -113,25 +113,25 @@ WMFAudioMFTManager::GetMediaSubtypeGUID(
};
}
bool
WMFAudioMFTManager::Init()
{
NS_ENSURE_TRUE(mStreamType != Unknown, false);
- nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(GetMFTGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
// Setup input/output media types
- nsRefPtr<IMFMediaType> inputType;
+ RefPtr<IMFMediaType> inputType;
- hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ hr = wmf::MFCreateMediaType(byRef(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
@@ -146,18 +146,18 @@ WMFAudioMFTManager::Init()
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = inputType->SetBlob(MF_MT_USER_DATA,
mUserData.Elements(),
mUserData.Length());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
}
- nsRefPtr<IMFMediaType> outputType;
- hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(byRef(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = outputType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
@@ -180,17 +180,17 @@ WMFAudioMFTManager::Input(MediaRawData*
aSample->mTime);
}
HRESULT
WMFAudioMFTManager::UpdateOutputType()
{
HRESULT hr;
- nsRefPtr<IMFMediaType> type;
+ RefPtr<IMFMediaType> type;
hr = mDecoder->GetOutputMediaType(type);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = type->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &mAudioRate);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = type->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &mAudioChannels);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@@ -198,17 +198,17 @@ WMFAudioMFTManager::UpdateOutputType()
return S_OK;
}
HRESULT
WMFAudioMFTManager::Output(int64_t aStreamOffset,
nsRefPtr<MediaData>& aOutData)
{
aOutData = nullptr;
- nsRefPtr<IMFSample> sample;
+ RefPtr<IMFSample> sample;
HRESULT hr;
int typeChangeCount = 0;
while (true) {
hr = mDecoder->Output(&sample);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
return hr;
}
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
@@ -221,18 +221,18 @@ WMFAudioMFTManager::Output(int64_t aStre
++typeChangeCount;
continue;
}
break;
}
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaBuffer> buffer;
- hr = sample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = sample->ConvertToContiguousBuffer(byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
DWORD maxLength = 0, currentLength = 0;
hr = buffer->Lock(&data, &maxLength, ¤tLength);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// Sometimes when starting decoding, the AAC decoder gives us samples
--- a/dom/media/platforms/wmf/WMFAudioMFTManager.h
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.h
@@ -4,17 +4,17 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFAudioOutputSource_h_)
#define WMFAudioOutputSource_h_
#include "WMF.h"
#include "MFTDecoder.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "WMFMediaDataDecoder.h"
extern const GUID CLSID_WebmMfVp8Dec;
extern const GUID CLSID_WebmMfVp9Dec;
namespace mozilla {
class WMFAudioMFTManager : public MFTManager {
--- a/dom/media/platforms/wmf/WMFDecoderModule.cpp
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -125,17 +125,17 @@ WMFDecoderModule::CreateAudioDecoder(con
}
static bool
CanCreateMFTDecoder(const GUID& aGuid)
{
if (FAILED(wmf::MFStartup())) {
return false;
}
- nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
bool hasH264 = SUCCEEDED(decoder->Create(aGuid));
wmf::MFShutdown();
return hasH264;
}
template<const GUID& aGuid>
static bool
CanCreateWMFDecoder()
--- a/dom/media/platforms/wmf/WMFMediaDataDecoder.h
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
@@ -5,17 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFMediaDataDecoder_h_)
#define WMFMediaDataDecoder_h_
#include "WMF.h"
#include "MFTDecoder.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "PlatformDecoderModule.h"
namespace mozilla {
// Encapsulates the initialization of the MFTDecoder appropriate for decoding
// a given stream, and the process of converting the IMFSample produced
// by the MFT into a MediaData object.
class MFTManager {
@@ -49,17 +49,17 @@ public:
virtual void Shutdown() = 0;
virtual bool IsHardwareAccelerated(nsACString& aFailureReason) const { return false; }
virtual TrackInfo::TrackType GetType() = 0;
protected:
// IMFTransform wrapper that performs the decoding.
- nsRefPtr<MFTDecoder> mDecoder;
+ RefPtr<MFTDecoder> mDecoder;
};
// Decodes audio and video using Windows Media Foundation. Samples are decoded
// using the MFTDecoder created by the MFTManager. This class implements
// the higher-level logic that drives mapping the MFT to the async
// MediaDataDecoder interface. The specifics of decoding the exact stream
// type are handled by MFTManager and the MFTDecoder it creates.
class WMFMediaDataDecoder : public MediaDataDecoder {
@@ -96,17 +96,17 @@ private:
void ProcessFlush();
// Called on the task queue. Orders the MFT to drain, and then extracts
// all available output.
void ProcessDrain();
void ProcessShutdown();
- nsRefPtr<FlushableTaskQueue> mTaskQueue;
+ RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
nsAutoPtr<MFTManager> mMFTManager;
// The last offset into the media resource that was passed into Input().
// This is used to approximate the decoder's position in the media resource.
int64_t mLastStreamOffset;
--- a/dom/media/platforms/wmf/WMFUtils.cpp
+++ b/dom/media/platforms/wmf/WMFUtils.cpp
@@ -2,17 +2,17 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WMFUtils.h"
#include <stdint.h>
#include "mozilla/ArrayUtils.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
#include "mozilla/WindowsVersion.h"
#include "mozilla/Logging.h"
#include "nsThreadUtils.h"
#include "nsWindowsHelpers.h"
#include "mozilla/CheckedInt.h"
#include "VideoUtils.h"
#include <initguid.h>
#include "nsTArray.h"
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -200,22 +200,22 @@ WMFVideoMFTManager::Init()
}
bool
WMFVideoMFTManager::InitInternal(bool aForceD3D9)
{
mUseHwAccel = false; // default value; changed if D3D setup succeeds.
bool useDxva = InitializeDXVA(aForceD3D9);
- nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(GetMFTGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
- nsRefPtr<IMFAttributes> attr(decoder->GetAttributes());
+ RefPtr<IMFAttributes> attr(decoder->GetAttributes());
UINT32 aware = 0;
if (attr) {
attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
WMFDecoderModule::GetNumDecoderThreads());
hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
if (SUCCEEDED(hr)) {
LOG("Enabling Low Latency Mode");
@@ -260,31 +260,31 @@ WMFVideoMFTManager::InitInternal(bool aF
return true;
}
HRESULT
WMFVideoMFTManager::SetDecoderMediaTypes()
{
// Setup the input/output media types.
- nsRefPtr<IMFMediaType> inputType;
- HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ RefPtr<IMFMediaType> inputType;
+ HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- nsRefPtr<IMFMediaType> outputType;
- hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(byRef(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@@ -337,17 +337,17 @@ WMFVideoMFTManager::CanUseDXVA(IMFMediaT
float framerate = 1000000.0 / mLastDuration;
return mDXVA2Manager->SupportsConfig(aType, framerate);
}
HRESULT
WMFVideoMFTManager::ConfigureVideoFrameGeometry()
{
- nsRefPtr<IMFMediaType> mediaType;
+ RefPtr<IMFMediaType> mediaType;
HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// If we enabled/disabled DXVA in response to a resolution
// change then we need to renegotiate our media types,
// and resubmit our previous frame (since the MFT appears
// to lose it otherwise).
if (mUseHwAccel && !CanUseDXVA(mediaType)) {
@@ -424,29 +424,29 @@ WMFVideoMFTManager::CreateBasicVideoFram
VideoData** aOutVideoData)
{
NS_ENSURE_TRUE(aSample, E_POINTER);
NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
*aOutVideoData = nullptr;
HRESULT hr;
- nsRefPtr<IMFMediaBuffer> buffer;
+ RefPtr<IMFMediaBuffer> buffer;
// Must convert to contiguous buffer to use IMD2DBuffer interface.
- hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ hr = aSample->ConvertToContiguousBuffer(byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// Try and use the IMF2DBuffer interface if available, otherwise fallback
// to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
// but only some systems (Windows 8?) support it.
BYTE* data = nullptr;
LONG stride = 0;
- nsRefPtr<IMF2DBuffer> twoDBuffer;
- hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(getter_AddRefs(twoDBuffer)));
+ RefPtr<IMF2DBuffer> twoDBuffer;
+ hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(byRef(twoDBuffer)));
if (SUCCEEDED(hr)) {
hr = twoDBuffer->Lock2D(&data, &stride);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
} else {
hr = buffer->Lock(&data, nullptr, nullptr);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
stride = mVideoStride;
}
@@ -561,17 +561,17 @@ WMFVideoMFTManager::CreateD3DVideoFrame(
return S_OK;
}
// Blocks until decoded sample is produced by the deoder.
HRESULT
WMFVideoMFTManager::Output(int64_t aStreamOffset,
nsRefPtr<MediaData>& aOutData)
{
- nsRefPtr<IMFSample> sample;
+ RefPtr<IMFSample> sample;
HRESULT hr;
aOutData = nullptr;
int typeChangeCount = 0;
// Loop until we decode a sample, or an unexpected error that we can't
// handle occurs.
while (true) {
hr = mDecoder->Output(&sample);
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.h
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h
@@ -6,17 +6,17 @@
#if !defined(WMFVideoMFTManager_h_)
#define WMFVideoMFTManager_h_
#include "WMF.h"
#include "MFTDecoder.h"
#include "nsRect.h"
#include "WMFMediaDataDecoder.h"
-#include "mozilla/nsRefPtr.h"
+#include "mozilla/RefPtr.h"
namespace mozilla {
class DXVA2Manager;
class WMFVideoMFTManager : public MFTManager {
public:
WMFVideoMFTManager(const VideoInfo& aConfig,
@@ -61,20 +61,20 @@ private:
// Video frame geometry.
VideoInfo mVideoInfo;
uint32_t mVideoStride;
uint32_t mVideoWidth;
uint32_t mVideoHeight;
nsIntRect mPictureRegion;
- nsRefPtr<layers::ImageContainer> mImageContainer;
+ RefPtr<layers::ImageContainer> mImageContainer;
nsAutoPtr<DXVA2Manager> mDXVA2Manager;