Bug 1418153 - Move ipalloc to a method of arena_t. r=njn
authorMike Hommey <mh+mozilla@glandium.org>
Tue, 14 Nov 2017 08:21:09 +0900
changeset 437009 f28c574c46dab90b54399e17d0c61abec58e01c6
parent 437008 d4967677f74f2eba167136a018549fde2f6c647f
child 437010 0ba2855990962d1a54c00e884bbe48d8b1ad3a07
push id117
push userfmarier@mozilla.com
push dateTue, 28 Nov 2017 20:17:16 +0000
reviewersnjn
bugs1418153
milestone59.0a1
Bug 1418153 - Move ipalloc to a method of arena_t. r=njn
memory/build/mozjemalloc.cpp
--- a/memory/build/mozjemalloc.cpp
+++ b/memory/build/mozjemalloc.cpp
@@ -1027,20 +1027,22 @@ private:
                    bool dirty);
 
   arena_run_t* GetNonFullBinRun(arena_bin_t* aBin);
 
   inline void* MallocSmall(size_t aSize, bool aZero);
 
   void* MallocLarge(size_t aSize, bool aZero);
 
+  void* PallocLarge(size_t aAlignment, size_t aSize, size_t aAllocSize);
+
 public:
   inline void* Malloc(size_t aSize, bool aZero);
 
-  void* PallocLarge(size_t aAlignment, size_t aSize, size_t aAllocSize);
+  void* Palloc(size_t aAlignment, size_t aSize);
 
   inline void DallocSmall(arena_chunk_t* aChunk,
                           void* aPtr,
                           arena_chunk_map_t* aMapElm);
 
   void DallocLarge(arena_chunk_t* aChunk, void* aPtr);
 
   void RallocShrinkLarge(arena_chunk_t* aChunk,
@@ -3137,18 +3139,18 @@ arena_t::PallocLarge(size_t aAlignment, 
   if (opt_junk) {
     memset(ret, kAllocJunk, aSize);
   } else if (opt_zero) {
     memset(ret, 0, aSize);
   }
   return ret;
 }
 
-static inline void*
-ipalloc(size_t aAlignment, size_t aSize, arena_t* aArena)
+void*
+arena_t::Palloc(size_t aAlignment, size_t aSize)
 {
   void* ret;
   size_t ceil_size;
 
   // Round size up to the nearest multiple of alignment.
   //
   // This done, we can take advantage of the fact that for each small
   // size class, every object is aligned at the smallest power of two
@@ -3168,20 +3170,19 @@ ipalloc(size_t aAlignment, size_t aSize,
 
   // (ceil_size < aSize) protects against the combination of maximal
   // alignment and size greater than maximal alignment.
   if (ceil_size < aSize) {
     // size_t overflow.
     return nullptr;
   }
 
-  MOZ_ASSERT(aArena);
   if (ceil_size <= gPageSize ||
       (aAlignment <= gPageSize && ceil_size <= gMaxLargeClass)) {
-    ret = aArena->Malloc(ceil_size, false);
+    ret = Malloc(ceil_size, false);
   } else {
     size_t run_size;
 
     // We can't achieve sub-page alignment, so round up alignment
     // permanently; it makes later calculations simpler.
     aAlignment = PAGE_CEILING(aAlignment);
     ceil_size = PAGE_CEILING(aSize);
 
@@ -3211,21 +3212,21 @@ ipalloc(size_t aAlignment, size_t aSize,
       // leaves us with a very large run_size.  That causes
       // the first conditional below to fail, which means
       // that the bogus run_size value never gets used for
       // anything important.
       run_size = (aAlignment << 1) - gPageSize;
     }
 
     if (run_size <= gMaxLargeClass) {
-      ret = aArena->PallocLarge(aAlignment, ceil_size, run_size);
+      ret = PallocLarge(aAlignment, ceil_size, run_size);
     } else if (aAlignment <= kChunkSize) {
-      ret = huge_malloc(ceil_size, false, aArena);
+      ret = huge_malloc(ceil_size, false, this);
     } else {
-      ret = huge_palloc(ceil_size, aAlignment, false, aArena);
+      ret = huge_palloc(ceil_size, aAlignment, false, this);
     }
   }
 
   MOZ_ASSERT((uintptr_t(ret) & (aAlignment - 1)) == 0);
   return ret;
 }
 
 // Return the size of the allocation pointed to by ptr.
@@ -4274,33 +4275,29 @@ RETURN:
   }
 
   return ret;
 }
 
 inline void*
 BaseAllocator::memalign(size_t aAlignment, size_t aSize)
 {
-  void* ret;
-
   MOZ_ASSERT(((aAlignment - 1) & aAlignment) == 0);
 
   if (!malloc_init()) {
     return nullptr;
   }
 
   if (aSize == 0) {
     aSize = 1;
   }
 
   aAlignment = aAlignment < sizeof(void*) ? sizeof(void*) : aAlignment;
   arena_t* arena = mArena ? mArena : choose_arena(aSize);
-  ret = ipalloc(aAlignment, aSize, arena);
-
-  return ret;
+  return arena->Palloc(aAlignment, aSize);
 }
 
 inline void*
 BaseAllocator::calloc(size_t aNum, size_t aSize)
 {
   void* ret;
 
   if (malloc_init()) {