author | Igor Bukanov <igor@mir2.org> |
Fri, 05 Aug 2011 18:43:59 +0200 | |
changeset 74243 | ddf95830a967af2ba480da2cdc7926746fc05579 |
parent 74242 | 48c58d9cc3f3569cc04cf4fe111e597d04435480 |
child 74244 | 10915aa173656cfb998cb28e04df37938b81b498 |
child 74258 | 8628c51e497c4d483cc359e3b91be753faa42976 |
push id | 20967 |
push user | mlamouri@mozilla.com |
push date | Thu, 11 Aug 2011 11:09:27 +0000 |
treeherder | mozilla-central@10915aa17365 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | wmccloskey |
bugs | 665354 |
milestone | 8.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
js/src/jsgc.cpp | file | annotate | diff | comparison | revisions | |
js/src/jsgc.h | file | annotate | diff | comparison | revisions | |
js/src/jsgcinlines.h | file | annotate | diff | comparison | revisions |
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -171,107 +171,106 @@ ArenaHeader::checkSynchronizedWithFreeLi /* * We can be called from the background finalization thread when the free * list in the compartment can mutate at any moment. We cannot do any * checks in this case. */ if (!compartment->rt->gcRunning) return; - FreeSpan firstSpan(address() + firstFreeSpanStart, address() + firstFreeSpanEnd); + FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets); if (firstSpan.isEmpty()) return; FreeSpan *list = &compartment->freeLists.lists[getThingKind()]; if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress()) return; /* * Here this arena has free things, FreeList::lists[thingKind] is not * empty and also points to this arena. Thus they must the same. */ - JS_ASSERT(firstSpan.start == list->start); - JS_ASSERT(firstSpan.end == list->end); + JS_ASSERT(firstSpan.isSameNonEmptySpan(list)); } #endif template<typename T> inline bool Arena::finalize(JSContext *cx) { JS_ASSERT(aheader.allocated()); JS_ASSERT(!aheader.getMarkingDelay()->link); uintptr_t thing = thingsStart(sizeof(T)); - uintptr_t end = thingsEnd(); + uintptr_t lastByte = thingsEnd() - 1; FreeSpan nextFree(aheader.getFirstFreeSpan()); nextFree.checkSpan(); FreeSpan newListHead; FreeSpan *newListTail = &newListHead; uintptr_t newFreeSpanStart = 0; bool allClear = true; #ifdef DEBUG size_t nmarked = 0; #endif for (;; thing += sizeof(T)) { - JS_ASSERT(thing <= end); - if (thing == nextFree.start) { - JS_ASSERT(nextFree.end <= end); - if (nextFree.end == end) + JS_ASSERT(thing <= lastByte + 1); + if (thing == nextFree.first) { + JS_ASSERT(nextFree.last <= lastByte); + if (nextFree.last == lastByte) break; - JS_ASSERT(Arena::isAligned(nextFree.end, sizeof(T))); + JS_ASSERT(Arena::isAligned(nextFree.last, sizeof(T))); if (!newFreeSpanStart) newFreeSpanStart = thing; - thing = nextFree.end; + thing = nextFree.last; nextFree = *nextFree.nextSpan(); nextFree.checkSpan(); } else { T *t = reinterpret_cast<T *>(thing); if (t->isMarked()) { allClear = false; #ifdef DEBUG nmarked++; #endif if (newFreeSpanStart) { JS_ASSERT(thing >= thingsStart(sizeof(T)) + sizeof(T)); - newListTail->start = newFreeSpanStart; - newListTail->end = thing - sizeof(T); - newListTail = newListTail->nextSpanUnchecked(); - newFreeSpanStart = 0; + newListTail->first = newFreeSpanStart; + newListTail->last = thing - sizeof(T); + newListTail = newListTail->nextSpanUnchecked(sizeof(T)); + newFreeSpanStart = NULL; } } else { if (!newFreeSpanStart) newFreeSpanStart = thing; t->finalize(cx); memset(t, JS_FREE_PATTERN, sizeof(T)); } } } if (allClear) { JS_ASSERT(newListTail == &newListHead); JS_ASSERT(newFreeSpanStart == thingsStart(sizeof(T))); return true; } - newListTail->start = newFreeSpanStart ? newFreeSpanStart : nextFree.start; - JS_ASSERT(Arena::isAligned(newListTail->start, sizeof(T))); - newListTail->end = end; + newListTail->first = newFreeSpanStart ? newFreeSpanStart : nextFree.first; + JS_ASSERT(Arena::isAligned(newListTail->first, sizeof(T))); + newListTail->last = lastByte; #ifdef DEBUG size_t nfree = 0; - for (FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) { + for (const FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) { span->checkSpan(); - JS_ASSERT(Arena::isAligned(span->start, sizeof(T))); - JS_ASSERT(Arena::isAligned(span->end, sizeof(T))); - nfree += (span->end - span->start) / sizeof(T) + 1; + JS_ASSERT(Arena::isAligned(span->first, sizeof(T))); + JS_ASSERT(Arena::isAligned(span->last, sizeof(T))); + nfree += (span->last - span->first) / sizeof(T) + 1; JS_ASSERT(nfree + nmarked <= thingsPerArena(sizeof(T))); } - nfree += (newListTail->end - newListTail->start) / sizeof(T); + nfree += (newListTail->last + 1 - newListTail->first) / sizeof(T); JS_ASSERT(nfree + nmarked == thingsPerArena(sizeof(T))); #endif aheader.setFirstFreeSpan(&newListHead); return false; } /* @@ -624,27 +623,27 @@ namespace js { inline bool InFreeList(ArenaHeader *aheader, uintptr_t addr) { if (!aheader->hasFreeThings()) return false; FreeSpan firstSpan(aheader->getFirstFreeSpan()); - for (FreeSpan *span = &firstSpan;;) { + for (const FreeSpan *span = &firstSpan;;) { /* If the thing comes fore the current span, it's not free. */ - if (addr < span->start) + if (addr < span->first) return false; /* * If we find it inside the span, it's dead. We use here "<=" and not * "<" even for the last span as we know that thing is inside the * arena. Thus for the last span thing < span->end. */ - if (addr <= span->end) + if (addr <= span->last) return true; /* * The last possible empty span is an the end of the arena. Here * span->end < thing < thingsEnd and so we must have more spans. */ span = span->nextSpan(); } @@ -1359,17 +1358,17 @@ RunLastDitchGC(JSContext *cx) static inline bool IsGCAllowed(JSContext *cx) { return !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota; } template <typename T> -inline Cell * +inline void * RefillTypedFreeList(JSContext *cx, unsigned thingKind) { JS_ASSERT(!cx->runtime->gcRunning); /* * For compatibility with older code we tolerate calling the allocator * during the GC in optimized builds. */ @@ -1386,17 +1385,17 @@ RefillTypedFreeList(JSContext *cx, unsig if (!RunLastDitchGC(cx)) break; /* * The JSGC_END callback can legitimately allocate new GC * things and populate the free list. If that happens, just * return that list head. */ - if (Cell *thing = compartment->freeLists.getNext(thingKind, sizeof(T))) + if (void *thing = compartment->freeLists.getNext(thingKind, sizeof(T))) return thing; } ArenaHeader *aheader = compartment->arenas[thingKind].getArenaWithFreeList<sizeof(T)>(cx, thingKind); if (aheader) { JS_ASSERT(sizeof(T) == aheader->getThingSize()); return compartment->freeLists.populate(aheader, thingKind, sizeof(T)); } @@ -1409,17 +1408,17 @@ RefillTypedFreeList(JSContext *cx, unsig break; runGC = true; } js_ReportOutOfMemory(cx); return NULL; } -Cell * +void * RefillFinalizableFreeList(JSContext *cx, unsigned thingKind) { switch (thingKind) { case FINALIZE_OBJECT0: case FINALIZE_OBJECT0_BACKGROUND: return RefillTypedFreeList<JSObject>(cx, thingKind); case FINALIZE_OBJECT2: case FINALIZE_OBJECT2_BACKGROUND: @@ -1447,17 +1446,17 @@ RefillFinalizableFreeList(JSContext *cx, case FINALIZE_SHAPE: return RefillTypedFreeList<Shape>(cx, thingKind); #if JS_HAS_XML_SUPPORT case FINALIZE_XML: return RefillTypedFreeList<JSXML>(cx, thingKind); #endif default: JS_NOT_REACHED("bad finalize kind"); - return NULL; + return 0; } } } /* namespace gc */ } /* namespace js */ uint32 js_GetGCThingTraceKind(void *thing) @@ -2315,17 +2314,17 @@ MarkAndSweep(JSContext *cx, JSCompartmen WeakMapBase::sweepAll(&gcmarker); js_SweepAtomState(cx); /* Collect watch points associated with unreachable objects. */ WatchpointMap::sweepAll(rt); /* - * We finalize objects before other GC things to ensure that object's finalizer + * We finalize objects before other GC things to ensure that object's finalizer * can access them even if they will be freed. Sweep the runtime's property trees * after finalizing objects, in case any had watchpoints referencing tree nodes. * Do this before sweeping compartments, so that we sweep all shapes in * unreachable compartments. */ if (comp) { comp->sweep(cx, 0); comp->finalizeObjectArenaLists(cx); @@ -2827,28 +2826,28 @@ IterateCompartmentsArenasCells(JSContext size_t traceKind = GetFinalizableTraceKind(thingKind); size_t thingSize = GCThingSizeMap[thingKind]; ArenaHeader *aheader = compartment->arenas[thingKind].getHead(); for (; aheader; aheader = aheader->next) { Arena *arena = aheader->getArena(); (*arenaCallback)(cx, data, arena, traceKind, thingSize); FreeSpan firstSpan(aheader->getFirstFreeSpan()); - FreeSpan *span = &firstSpan; + const FreeSpan *span = &firstSpan; for (uintptr_t thing = arena->thingsStart(thingSize); ; thing += thingSize) { JS_ASSERT(thing <= arena->thingsEnd()); - if (thing == span->start) { + if (thing == span->first) { if (!span->hasNext()) break; - thing = span->end; + thing = span->last; span = span->nextSpan(); } else { - (*cellCallback)(cx, data, reinterpret_cast<void *>(thing), traceKind, - thingSize); + void *t = reinterpret_cast<void *>(thing); + (*cellCallback)(cx, data, t, traceKind, thingSize); } } } } } } namespace gc {
--- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -121,127 +121,245 @@ const size_t ArenaMask = ArenaSize - 1; */ const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift); const size_t ArenaBitmapBits = ArenaCellCount; const size_t ArenaBitmapBytes = ArenaBitmapBits / 8; const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD; /* * A FreeSpan represents a contiguous sequence of free cells in an Arena. - * |start| is the address of the first free cell in the span. |end| is the - * address of the last free cell in the span. The last cell (starting at - * |end|) holds a FreeSpan data structure for the next span. However, the last - * FreeSpan in an Arena is special: |end| points to the end of the Arena (an - * unusable address), and no next FreeSpan is stored there. + * |first| is the address of the first free cell in the span. |last| is the + * address of the last free cell in the span. This last cell holds a FreeSpan + * data structure for the next span unless this is the last span on the list + * of spans in the arena. For this last span |last| points to the last byte of + * the last thing in the arena and no linkage is stored there, so + * |last| == arenaStart + ArenaSize - 1. If the space at the arena end is + * fully used this last span is empty and |first| == |last + 1|. * - * As things in the arena ends on its boundary that is aligned on ArenaSize, - * end & ArenaMask is zero if and only if the span is last. Also, since the - * first thing in the arena comes after the header, start & ArenaSize is zero - * if and only if the span is the empty span at the end of the arena. + * Thus |first| < |last| implies that we have either the last span with at least + * one element or that the span is not the last and contains at least 2 + * elements. In both cases to allocate a thing from this span we need simply + * to increment |first| by the allocation size. * - * The type of the start and end fields is uintptr_t, not a pointer type, to - * minimize the amount of casting when doing mask operations. + * |first| == |last| implies that we have a one element span that records the + * next span. So to allocate from it we need to update the span list head + * with a copy of the span stored at |last| address so the following + * allocations will use that span. + * + * |first| > |last| implies that we have an empty last span and the arena is + * fully used. + * + * Also only for the last span (|last| & 1)! = 0 as all allocation sizes are + * multiples of Cell::CellSize. */ struct FreeSpan { - uintptr_t start; - uintptr_t end; + uintptr_t first; + uintptr_t last; public: - FreeSpan() { } + FreeSpan() {} + + FreeSpan(uintptr_t first, uintptr_t last) + : first(first), last(last) { + checkSpan(); + } + + /* + * To minimize the size of the arena header the first span is encoded + * there as offsets from the arena start. + */ + static size_t encodeOffsets(size_t firstOffset, size_t lastOffset = ArenaSize - 1) { + /* Check that we can pack the offsets into uint16. */ + JS_STATIC_ASSERT(ArenaShift < 16); + JS_ASSERT(firstOffset <= ArenaSize); + JS_ASSERT(lastOffset < ArenaSize); + JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1))); + return firstOffset | (lastOffset << 16); + } - FreeSpan(uintptr_t start, uintptr_t end) - : start(start), end(end) { - checkSpan(); + static const size_t EmptyOffsets = ArenaSize | ((ArenaSize - 1) << 16); + + static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) { + JS_ASSERT(!(arenaAddr & ArenaMask)); + + size_t firstOffset = offsets & 0xFFFF; + size_t lastOffset = offsets >> 16; + JS_ASSERT(firstOffset <= ArenaSize); + JS_ASSERT(lastOffset < ArenaSize); + + /* + * We must not use | when calculating first as firstOffset is + * ArenaMask + 1 for the empty span. + */ + return FreeSpan(arenaAddr + firstOffset, arenaAddr | lastOffset); + } + + void initAsEmpty(uintptr_t arenaAddr = 0) { + JS_ASSERT(!(arenaAddr & ArenaMask)); + first = arenaAddr + ArenaSize; + last = arenaAddr | (ArenaSize - 1); + JS_ASSERT(isEmpty()); } bool isEmpty() const { checkSpan(); - return !(start & ArenaMask); + return first > last; } bool hasNext() const { checkSpan(); - return !!(end & ArenaMask); + return !(last & uintptr_t(1)); + } + + const FreeSpan *nextSpan() const { + JS_ASSERT(hasNext()); + return reinterpret_cast<FreeSpan *>(last); } - FreeSpan *nextSpan() const { - JS_ASSERT(hasNext()); - return reinterpret_cast<FreeSpan *>(end); + FreeSpan *nextSpanUnchecked(size_t thingSize) const { +#ifdef DEBUG + uintptr_t lastOffset = last & ArenaMask; + JS_ASSERT(!(lastOffset & 1)); + JS_ASSERT((ArenaSize - lastOffset) % thingSize == 0); +#endif + return reinterpret_cast<FreeSpan *>(last); } - FreeSpan *nextSpanUnchecked() const { - JS_ASSERT(end & ArenaMask); - return reinterpret_cast<FreeSpan *>(end); + uintptr_t arenaAddressUnchecked() const { + return last & ~ArenaMask; } uintptr_t arenaAddress() const { + checkSpan(); + return arenaAddressUnchecked(); + } + + ArenaHeader *arenaHeader() const { + return reinterpret_cast<ArenaHeader *>(arenaAddress()); + } + + bool isSameNonEmptySpan(const FreeSpan *another) const { JS_ASSERT(!isEmpty()); - return start & ~ArenaMask; + JS_ASSERT(!another->isEmpty()); + return first == another->first && last == another->last; + } + + bool isWithinArena(uintptr_t arenaAddr) const { + JS_ASSERT(!(arenaAddr & ArenaMask)); + + /* Return true for the last empty span as well. */ + return arenaAddress() == arenaAddr; + } + + size_t encodeAsOffsets() const { + /* + * We must use first - arenaAddress(), not first & ArenaMask as + * first == ArenaMask + 1 for an empty span. + */ + uintptr_t arenaAddr = arenaAddress(); + return encodeOffsets(first - arenaAddr, last & ArenaMask); + } + + /* See comments before FreeSpan for details. */ + JS_ALWAYS_INLINE void *allocate(size_t thingSize) { + JS_ASSERT(thingSize % Cell::CellSize == 0); + checkSpan(); + uintptr_t thing = first; + if (thing < last) { + /* Bump-allocate from the current span. */ + first = thing + thingSize; + } else if (JS_LIKELY(thing == last)) { + /* + * Move to the next span. We use JS_LIKELY as without PGO + * compilers mis-predict == here as unlikely to succeed. + */ + *this = *reinterpret_cast<FreeSpan *>(thing); + } else { + return NULL; + } + checkSpan(); + return reinterpret_cast<void *>(thing); } void checkSpan() const { #ifdef DEBUG - JS_ASSERT(start <= end); - JS_ASSERT(end - start <= ArenaSize); - if (!(start & ArenaMask)) { - /* The span is last and empty. */ - JS_ASSERT(start == end); + /* We do not allow spans at the end of the address space. */ + JS_ASSERT(last != uintptr_t(-1)); + JS_ASSERT(first); + JS_ASSERT(last); + JS_ASSERT(first - 1 <= last); + uintptr_t arenaAddr = arenaAddressUnchecked(); + if (last & 1) { + /* The span is the last. */ + JS_ASSERT((last & ArenaMask) == ArenaMask); + + if (first - 1 == last) { + /* The span is last and empty. The above start != 0 check + * implies that we are not at the end of the address space. + */ + return; + } + size_t spanLength = last - first + 1; + JS_ASSERT(spanLength % Cell::CellSize == 0); + + /* Start and end must belong to the same arena. */ + JS_ASSERT((first & ~ArenaMask) == arenaAddr); return; } - JS_ASSERT(start); - JS_ASSERT(end); - uintptr_t arena = start & ~ArenaMask; - if (!(end & ArenaMask)) { - /* The last span with few free things at the end of the arena. */ - JS_ASSERT(arena + ArenaSize == end); - return; - } + /* The span is not the last and we have more spans to follow. */ + JS_ASSERT(first <= last); + size_t spanLengthWithoutOneThing = last - first; + JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0); + + JS_ASSERT((first & ~ArenaMask) == arenaAddr); - /* The span is not last and we have at least one span that follows it.*/ - JS_ASSERT(arena == (end & ~ArenaMask)); - FreeSpan *next = reinterpret_cast<FreeSpan *>(end); + /* + * If there is not enough space before the arena end to allocate one + * more thing, then the span must be marked as the last one to avoid + * storing useless empty span reference. + */ + size_t beforeTail = ArenaSize - (last & ArenaMask); + JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize); + + FreeSpan *next = reinterpret_cast<FreeSpan *>(last); /* * The GC things on the list of free spans come from one arena * and the spans are linked in ascending address order with * at least one non-free thing between spans. */ - JS_ASSERT(end < next->start); + JS_ASSERT(last < next->first); + JS_ASSERT(arenaAddr == next->arenaAddressUnchecked()); - if (!(next->start & ArenaMask)) { + if (next->first > next->last) { /* * The next span is the empty span that terminates the list for * arenas that do not have any free things at the end. */ - JS_ASSERT(next->start == next->end); - JS_ASSERT(arena + ArenaSize == next->start); - } else { - /* The next spans is not empty and must starts inside the arena. */ - JS_ASSERT(arena == (next->start & ~ArenaMask)); + JS_ASSERT(next->first - 1 == next->last); + JS_ASSERT(arenaAddr + ArenaSize == next->first); } #endif } + }; /* Every arena has a header. */ struct ArenaHeader { JSCompartment *compartment; ArenaHeader *next; private: /* * The first span of free things in the arena. We encode it as the start * and end offsets within the arena, not as FreeSpan structure, to - * minimize the header size. When the arena has no free things, the span - * must be the empty one pointing to the arena's end. For such a span the - * start and end offsets must be ArenaSize. + * minimize the header size. */ - uint16_t firstFreeSpanStart; - uint16_t firstFreeSpanEnd; + size_t firstFreeSpanOffsets; /* * One of FinalizeKind constants or FINALIZE_LIMIT when the arena does not * contain any GC things and is on the list of empty arenas in the GC * chunk. The later allows to quickly check if the arena is allocated * during the conservative GC scanning without searching the arena in the * list. */ @@ -258,46 +376,47 @@ struct ArenaHeader { } bool allocated() const { return thingKind < FINALIZE_LIMIT; } inline void init(JSCompartment *comp, unsigned thingKind, size_t thingSize); + uintptr_t arenaAddress() const { + return address(); + } + Arena *getArena() { - return reinterpret_cast<Arena *>(address()); + return reinterpret_cast<Arena *>(arenaAddress()); } unsigned getThingKind() const { JS_ASSERT(allocated()); return thingKind; } bool hasFreeThings() const { - return firstFreeSpanStart != ArenaSize; + return firstFreeSpanOffsets != FreeSpan::EmptyOffsets; } void setAsFullyUsed() { - firstFreeSpanStart = firstFreeSpanEnd = uint16_t(ArenaSize); + firstFreeSpanOffsets = FreeSpan::EmptyOffsets; } FreeSpan getFirstFreeSpan() const { #ifdef DEBUG checkSynchronizedWithFreeList(); #endif - return FreeSpan(address() + firstFreeSpanStart, address() + firstFreeSpanEnd); + return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets); } void setFirstFreeSpan(const FreeSpan *span) { - span->checkSpan(); - JS_ASSERT(span->start - address() <= ArenaSize); - JS_ASSERT(span->end - address() <= ArenaSize); - firstFreeSpanStart = uint16_t(span->start - address()); - firstFreeSpanEnd = uint16_t(span->end - address()); + JS_ASSERT(span->isWithinArena(arenaAddress())); + firstFreeSpanOffsets = span->encodeAsOffsets(); } inline MarkingDelay *getMarkingDelay() const; size_t getThingSize() const { return GCThingSizeMap[getThingKind()]; } @@ -544,18 +663,17 @@ Cell::isAligned() const inline void ArenaHeader::init(JSCompartment *comp, unsigned kind, size_t thingSize) { JS_ASSERT(!allocated()); JS_ASSERT(!getMarkingDelay()->link); compartment = comp; thingKind = kind; - firstFreeSpanStart = uint16_t(Arena::thingsStartOffset(thingSize)); - firstFreeSpanEnd = uint16_t(ArenaSize); + firstFreeSpanOffsets = FreeSpan::encodeOffsets(Arena::thingsStartOffset(thingSize)); } inline uintptr_t ArenaHeader::address() const { uintptr_t addr = reinterpret_cast<uintptr_t>(this); JS_ASSERT(!(addr & ArenaMask)); JS_ASSERT(Chunk::withinArenasRange(addr)); @@ -801,113 +919,88 @@ struct FreeLists { * header after the initial allocation. When starting the GC We only move * the head of the of the list of spans back to the arena only for the * arena that was not fully allocated. */ FreeSpan lists[FINALIZE_LIMIT]; void init() { for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i) - lists[i].start = lists[i].end = 0; + lists[i].initAsEmpty(); } /* * Return the free list back to the arena so the GC finalization will not * run the finalizers over unitialized bytes from free things. */ void purge() { for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) { FreeSpan *list = &lists[i]; if (!list->isEmpty()) { - ArenaHeader *aheader = reinterpret_cast<Cell *>(list->start)->arenaHeader(); + ArenaHeader *aheader = list->arenaHeader(); JS_ASSERT(!aheader->hasFreeThings()); aheader->setFirstFreeSpan(list); - list->start = list->end = 0; + list->initAsEmpty(); } } } /* * Temporarily copy the free list heads to the arenas so the code can see * the proper value in ArenaHeader::freeList when accessing the latter * outside the GC. */ void copyToArenas() { for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) { FreeSpan *list = &lists[i]; if (!list->isEmpty()) { - ArenaHeader *aheader = reinterpret_cast<Cell *>(list->start)->arenaHeader(); + ArenaHeader *aheader = list->arenaHeader(); JS_ASSERT(!aheader->hasFreeThings()); aheader->setFirstFreeSpan(list); } } } /* * Clear the free lists in arenas that were temporarily set there using * copyToArenas. */ void clearInArenas() { for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) { FreeSpan *list = &lists[i]; if (!list->isEmpty()) { - ArenaHeader *aheader = reinterpret_cast<Cell *>(list->start)->arenaHeader(); -#ifdef DEBUG - FreeSpan span(aheader->getFirstFreeSpan()); - JS_ASSERT(span.start == list->start); - JS_ASSERT(span.end == list->end); -#endif + ArenaHeader *aheader = list->arenaHeader(); + JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(list)); aheader->setAsFullyUsed(); } } } - JS_ALWAYS_INLINE Cell *getNext(unsigned thingKind, size_t thingSize) { - FreeSpan *list = &lists[thingKind]; - list->checkSpan(); - uintptr_t thing = list->start; - if (thing != list->end) { - /* - * We either have at least one thing in the span that ends the - * arena list or we have at least two things in the non-last span. - * In both cases we just need to bump the start pointer to account - * for the allocation. - */ - list->start += thingSize; - JS_ASSERT(list->start <= list->end); - } else if (thing & ArenaMask) { - /* - * The thing points to the last thing in the span that has at - * least one more span to follow. Return the thing and update - * the list with that next span. - */ - *list = *list->nextSpan(); - } else { - return NULL; - } - return reinterpret_cast<Cell *>(thing); + JS_ALWAYS_INLINE void *getNext(unsigned thingKind, size_t thingSize) { + return lists[thingKind].allocate(thingSize); } - Cell *populate(ArenaHeader *aheader, unsigned thingKind, size_t thingSize) { - lists[thingKind] = aheader->getFirstFreeSpan(); + void *populate(ArenaHeader *aheader, unsigned thingKind, size_t thingSize) { + FreeSpan *list = &lists[thingKind]; + *list = aheader->getFirstFreeSpan(); aheader->setAsFullyUsed(); - Cell *t = getNext(thingKind, thingSize); + void *t = list->allocate(thingSize); JS_ASSERT(t); return t; } void checkEmpty() { #ifdef DEBUG for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i) JS_ASSERT(lists[i].isEmpty()); #endif } }; -extern Cell * +extern void * RefillFinalizableFreeList(JSContext *cx, unsigned thingKind); } /* namespace gc */ typedef Vector<gc::Chunk *, 32, SystemAllocPolicy> GCChunks; struct GCPtrHasher {
--- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -198,18 +198,18 @@ NewGCThing(JSContext *cx, unsigned thing #endif JS_ASSERT(!cx->runtime->gcRunning); #ifdef JS_GC_ZEAL if (cx->runtime->needZealousGC()) js::gc::RunDebugGC(cx); #endif - js::gc::Cell *cell = cx->compartment->freeLists.getNext(thingKind, thingSize); - return static_cast<T *>(cell ? cell : js::gc::RefillFinalizableFreeList(cx, thingKind)); + void *t = cx->compartment->freeLists.getNext(thingKind, thingSize); + return static_cast<T *>(t ? t : js::gc::RefillFinalizableFreeList(cx, thingKind)); } inline JSObject * js_NewGCObject(JSContext *cx, js::gc::FinalizeKind kind) { JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST); JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::GCThingSizeMap[kind]); if (obj) {