Merge backout of bug 965945 from m-c
authorKartikaya Gupta <kgupta@mozilla.com>
Wed, 19 Feb 2014 17:28:42 -0500
changeset 170015 ee1ea4920afbd213980ff238635ef080beac8c77
parent 170014 671b3044b16682b89c7aae16f97cc813780c91b2 (current diff)
parent 169965 660b626089513a6bcbc018880a34cb54615eadb6 (diff)
child 170016 2fca335e35c612d6b08d1f22ec55cc39880f4b6e
push id270
push userpvanderbeken@mozilla.com
push dateThu, 06 Mar 2014 09:24:21 +0000
bugs965945
milestone30.0a1
Merge backout of bug 965945 from m-c
--- a/gfx/layers/ipc/ShadowLayerUtilsGralloc.cpp
+++ b/gfx/layers/ipc/ShadowLayerUtilsGralloc.cpp
@@ -286,26 +286,16 @@ GrallocBufferActor::Create(const gfx::In
   uint32_t format = aFormat;
   uint32_t usage = aUsage;
 
   if (format == 0 || usage == 0) {
     printf_stderr("GrallocBufferActor::Create -- format and usage must be non-zero");
     return actor;
   }
 
-  // If the requested size is too big (i.e. exceeds the commonly used max GL texture size)
-  // then we risk OOMing the parent process. It's better to just deny the allocation and
-  // kill the child process, which is what the following code does.
-  // TODO: actually use GL_MAX_TEXTURE_SIZE instead of hardcoding 4096
-  if (aSize.width > 4096 || aSize.height > 4096) {
-    printf_stderr("GrallocBufferActor::Create -- requested gralloc buffer is too big. Killing child instead.");
-    delete actor;
-    return nullptr;
-  }
-
   sp<GraphicBuffer> buffer(new GraphicBuffer(aSize.width, aSize.height, format, usage));
   if (buffer->initCheck() != OK)
     return actor;
 
   size_t bpp = BytesPerPixelForPixelFormat(format);
   actor->mAllocBytes = aSize.width * aSize.height * bpp;
   GrallocReporter::sAmount += actor->mAllocBytes;