author | Terrence Cole <terrence@mozilla.com> |
Wed, 16 Apr 2014 16:26:33 -0700 | |
changeset 179624 | 84b4cf605262bff4881bc08859a45f7611482fc3 |
parent 179623 | 39041565bfa9a7f5b9e983df9f8e2fb8a3e6a3c4 |
child 179625 | 098f943ba58dcddc8e469f8ecf430998d1ef1d69 |
push id | 26632 |
push user | kwierso@gmail.com |
push date | Wed, 23 Apr 2014 00:54:42 +0000 |
treeherder | mozilla-central@02515cf4fcfd [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | jonco |
bugs | 807168 |
milestone | 31.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
js/public/GCAPI.h | file | annotate | diff | comparison | revisions | |
js/src/gc/Tracer.cpp | file | annotate | diff | comparison | revisions | |
js/src/gc/Tracer.h | file | annotate | diff | comparison | revisions | |
js/src/jsapi.h | file | annotate | diff | comparison | revisions | |
js/src/jsarray.cpp | file | annotate | diff | comparison | revisions | |
js/src/jsgc.cpp | file | annotate | diff | comparison | revisions | |
js/src/jsgc.h | file | annotate | diff | comparison | revisions | |
js/src/jsobj.h | file | annotate | diff | comparison | revisions | |
js/src/vm/Runtime.h | file | annotate | diff | comparison | revisions |
--- a/js/public/GCAPI.h +++ b/js/public/GCAPI.h @@ -8,16 +8,30 @@ #define js_GCAPI_h #include "mozilla/NullPtr.h" #include "js/HeapAPI.h" #include "js/RootingAPI.h" #include "js/Value.h" +typedef enum JSGCMode { + /* Perform only global GCs. */ + JSGC_MODE_GLOBAL = 0, + + /* Perform per-compartment GCs until too much garbage has accumulated. */ + JSGC_MODE_COMPARTMENT = 1, + + /* + * Collect in short time slices rather than all at once. Implies + * JSGC_MODE_COMPARTMENT. + */ + JSGC_MODE_INCREMENTAL = 2 +} JSGCMode; + namespace JS { #define GCREASONS(D) \ /* Reasons internal to the JS engine */ \ D(API) \ D(MAYBEGC) \ D(DESTROY_RUNTIME) \ D(DESTROY_CONTEXT) \
--- a/js/src/gc/Tracer.cpp +++ b/js/src/gc/Tracer.cpp @@ -1,27 +1,33 @@ /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * vim: set ts=8 sts=4 et sw=4 tw=99: * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "gc/Tracer.h" +#include "mozilla/DebugOnly.h" + #include "jsapi.h" #include "jsfun.h" #include "jsgc.h" #include "jsprf.h" #include "jsscript.h" #include "NamespaceImports.h" +#include "gc/GCInternals.h" #include "gc/Marking.h" +#include "jsgcinlines.h" + using namespace js; using namespace js::gc; +using mozilla::DebugOnly; JS_PUBLIC_API(void) JS_CallValueTracer(JSTracer *trc, Value *valuep, const char *name) { MarkValueUnbarriered(trc, valuep, name); } JS_PUBLIC_API(void) @@ -321,8 +327,253 @@ JSTracer::unsetTracingLocation() } void ** JSTracer::tracingLocation(void **thingp) { return realLocation_ ? (void **)realLocation_ : thingp; } #endif + +/* + * DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries, + * so we delay visting entries. + */ +GCMarker::GCMarker(JSRuntime *rt) + : JSTracer(rt, nullptr, DoNotTraceWeakMaps), + stack(size_t(-1)), + color(BLACK), + started(false), + unmarkedArenaStackTop(nullptr), + markLaterArenas(0), + grayBufferState(GRAY_BUFFER_UNUSED) +{ +} + +bool +GCMarker::init(JSGCMode gcMode) +{ + return stack.init(gcMode); +} + +void +GCMarker::start() +{ + JS_ASSERT(!started); + started = true; + color = BLACK; + + JS_ASSERT(!unmarkedArenaStackTop); + JS_ASSERT(markLaterArenas == 0); + +} + +void +GCMarker::stop() +{ + JS_ASSERT(isDrained()); + + JS_ASSERT(started); + started = false; + + JS_ASSERT(!unmarkedArenaStackTop); + JS_ASSERT(markLaterArenas == 0); + + /* Free non-ballast stack memory. */ + stack.reset(); + + resetBufferedGrayRoots(); + grayBufferState = GRAY_BUFFER_UNUSED; +} + +void +GCMarker::reset() +{ + color = BLACK; + + stack.reset(); + JS_ASSERT(isMarkStackEmpty()); + + while (unmarkedArenaStackTop) { + ArenaHeader *aheader = unmarkedArenaStackTop; + JS_ASSERT(aheader->hasDelayedMarking); + JS_ASSERT(markLaterArenas); + unmarkedArenaStackTop = aheader->getNextDelayedMarking(); + aheader->unsetDelayedMarking(); + aheader->markOverflow = 0; + aheader->allocatedDuringIncremental = 0; + markLaterArenas--; + } + JS_ASSERT(isDrained()); + JS_ASSERT(!markLaterArenas); +} + +void +GCMarker::markDelayedChildren(ArenaHeader *aheader) +{ + if (aheader->markOverflow) { + bool always = aheader->allocatedDuringIncremental; + aheader->markOverflow = 0; + + for (CellIterUnderGC i(aheader); !i.done(); i.next()) { + Cell *t = i.getCell(); + if (always || t->isMarked()) { + t->markIfUnmarked(); + JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind())); + } + } + } else { + JS_ASSERT(aheader->allocatedDuringIncremental); + PushArena(this, aheader); + } + aheader->allocatedDuringIncremental = 0; + /* + * Note that during an incremental GC we may still be allocating into + * aheader. However, prepareForIncrementalGC sets the + * allocatedDuringIncremental flag if we continue marking. + */ +} + +bool +GCMarker::markDelayedChildren(SliceBudget &budget) +{ + gcstats::MaybeAutoPhase ap; + if (runtime()->gcIncrementalState == MARK) + ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED); + + JS_ASSERT(unmarkedArenaStackTop); + do { + /* + * If marking gets delayed at the same arena again, we must repeat + * marking of its things. For that we pop arena from the stack and + * clear its hasDelayedMarking flag before we begin the marking. + */ + ArenaHeader *aheader = unmarkedArenaStackTop; + JS_ASSERT(aheader->hasDelayedMarking); + JS_ASSERT(markLaterArenas); + unmarkedArenaStackTop = aheader->getNextDelayedMarking(); + aheader->unsetDelayedMarking(); + markLaterArenas--; + markDelayedChildren(aheader); + + budget.step(150); + if (budget.isOverBudget()) + return false; + } while (unmarkedArenaStackTop); + JS_ASSERT(!markLaterArenas); + + return true; +} + +#ifdef DEBUG +void +GCMarker::checkZone(void *p) +{ + JS_ASSERT(started); + DebugOnly<Cell *> cell = static_cast<Cell *>(p); + JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting()); +} +#endif + +bool +GCMarker::hasBufferedGrayRoots() const +{ + return grayBufferState == GRAY_BUFFER_OK; +} + +void +GCMarker::startBufferingGrayRoots() +{ + JS_ASSERT(grayBufferState == GRAY_BUFFER_UNUSED); + grayBufferState = GRAY_BUFFER_OK; + for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) + JS_ASSERT(zone->gcGrayRoots.empty()); + + JS_ASSERT(!callback); + callback = GrayCallback; + JS_ASSERT(IS_GC_MARKING_TRACER(this)); +} + +void +GCMarker::endBufferingGrayRoots() +{ + JS_ASSERT(callback == GrayCallback); + callback = nullptr; + JS_ASSERT(IS_GC_MARKING_TRACER(this)); + JS_ASSERT(grayBufferState == GRAY_BUFFER_OK || + grayBufferState == GRAY_BUFFER_FAILED); +} + +void +GCMarker::resetBufferedGrayRoots() +{ + for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) + zone->gcGrayRoots.clearAndFree(); +} + +void +GCMarker::markBufferedGrayRoots(JS::Zone *zone) +{ + JS_ASSERT(grayBufferState == GRAY_BUFFER_OK); + JS_ASSERT(zone->isGCMarkingGray()); + + for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) { +#ifdef DEBUG + setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex); +#endif + void *tmp = elem->thing; + setTracingLocation((void *)&elem->thing); + MarkKind(this, &tmp, elem->kind); + JS_ASSERT(tmp == elem->thing); + } +} + +void +GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind) +{ + JS_ASSERT(started); + + if (grayBufferState == GRAY_BUFFER_FAILED) + return; + + GrayRoot root(thing, kind); +#ifdef DEBUG + root.debugPrinter = debugPrinter(); + root.debugPrintArg = debugPrintArg(); + root.debugPrintIndex = debugPrintIndex(); +#endif + + Zone *zone = static_cast<Cell *>(thing)->tenuredZone(); + if (zone->isCollecting()) { + zone->maybeAlive = true; + if (!zone->gcGrayRoots.append(root)) { + resetBufferedGrayRoots(); + grayBufferState = GRAY_BUFFER_FAILED; + } + } +} + +void +GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind) +{ + JS_ASSERT(thingp); + JS_ASSERT(*thingp); + GCMarker *gcmarker = static_cast<GCMarker *>(trc); + gcmarker->appendGrayRoot(*thingp, kind); +} + +size_t +GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const +{ + size_t size = stack.sizeOfExcludingThis(mallocSizeOf); + for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next()) + size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf); + return size; +} + +void +js::SetMarkStackLimit(JSRuntime *rt, size_t limit) +{ + JS_ASSERT(!rt->isHeapBusy()); + AutoStopVerifyingBarriers pauseVerification(rt, false); + rt->gcMarker.setMaxCapacity(limit); +} +
--- a/js/src/gc/Tracer.h +++ b/js/src/gc/Tracer.h @@ -2,11 +2,372 @@ * vim: set ts=8 sts=4 et sw=4 tw=99: * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef js_Tracer_h #define js_Tracer_h +#include "mozilla/DebugOnly.h" + +#include "jsutil.h" + +#include "js/GCAPI.h" +#include "js/SliceBudget.h" #include "js/TracingAPI.h" +namespace js { +class ObjectImpl; +namespace gc { +class ArenaHeader; +} +namespace jit { +class JitCode; +} +namespace types { +class TypeObject; +} + +static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096; +static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768; + +/* + * When the native stack is low, the GC does not call JS_TraceChildren to mark + * the reachable "children" of the thing. Rather the thing is put aside and + * JS_TraceChildren is called later with more space on the C stack. + * + * To implement such delayed marking of the children with minimal overhead for + * the normal case of sufficient native stack, the code adds a field per arena. + * The field markingDelay->link links all arenas with delayed things into a + * stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop. + * GCMarker::delayMarkingChildren adds arenas to the stack as necessary while + * markDelayedChildren pops the arenas from the stack until it empties. + */ +template<class T> +struct MarkStack { + T *stack_; + T *tos_; + T *end_; + + // The capacity we start with and reset() to. + size_t baseCapacity_; + size_t maxCapacity_; + + MarkStack(size_t maxCapacity) + : stack_(nullptr), + tos_(nullptr), + end_(nullptr), + baseCapacity_(0), + maxCapacity_(maxCapacity) + {} + + ~MarkStack() { + js_free(stack_); + } + + size_t capacity() { return end_ - stack_; } + + ptrdiff_t position() const { return tos_ - stack_; } + + void setStack(T *stack, size_t tosIndex, size_t capacity) { + stack_ = stack; + tos_ = stack + tosIndex; + end_ = stack + capacity; + } + + void setBaseCapacity(JSGCMode mode) { + switch (mode) { + case JSGC_MODE_GLOBAL: + case JSGC_MODE_COMPARTMENT: + baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY; + break; + case JSGC_MODE_INCREMENTAL: + baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY; + break; + default: + MOZ_ASSUME_UNREACHABLE("bad gc mode"); + } + + if (baseCapacity_ > maxCapacity_) + baseCapacity_ = maxCapacity_; + } + + bool init(JSGCMode gcMode) { + setBaseCapacity(gcMode); + + JS_ASSERT(!stack_); + T *newStack = js_pod_malloc<T>(baseCapacity_); + if (!newStack) + return false; + + setStack(newStack, 0, baseCapacity_); + return true; + } + + void setMaxCapacity(size_t maxCapacity) { + JS_ASSERT(isEmpty()); + maxCapacity_ = maxCapacity; + if (baseCapacity_ > maxCapacity_) + baseCapacity_ = maxCapacity_; + + reset(); + } + + bool push(T item) { + if (tos_ == end_) { + if (!enlarge(1)) + return false; + } + JS_ASSERT(tos_ < end_); + *tos_++ = item; + return true; + } + + bool push(T item1, T item2, T item3) { + T *nextTos = tos_ + 3; + if (nextTos > end_) { + if (!enlarge(3)) + return false; + nextTos = tos_ + 3; + } + JS_ASSERT(nextTos <= end_); + tos_[0] = item1; + tos_[1] = item2; + tos_[2] = item3; + tos_ = nextTos; + return true; + } + + bool isEmpty() const { + return tos_ == stack_; + } + + T pop() { + JS_ASSERT(!isEmpty()); + return *--tos_; + } + + void reset() { + if (capacity() == baseCapacity_) { + // No size change; keep the current stack. + setStack(stack_, 0, baseCapacity_); + return; + } + + T *newStack = (T *)js_realloc(stack_, sizeof(T) * baseCapacity_); + if (!newStack) { + // If the realloc fails, just keep using the existing stack; it's + // not ideal but better than failing. + newStack = stack_; + baseCapacity_ = capacity(); + } + setStack(newStack, 0, baseCapacity_); + } + + /* Grow the stack, ensuring there is space for at least count elements. */ + bool enlarge(unsigned count) { + size_t newCapacity = Min(maxCapacity_, capacity() * 2); + if (newCapacity < capacity() + count) + return false; + + size_t tosIndex = position(); + + T *newStack = (T *)js_realloc(stack_, sizeof(T) * newCapacity); + if (!newStack) + return false; + + setStack(newStack, tosIndex, newCapacity); + return true; + } + + void setGCMode(JSGCMode gcMode) { + // The mark stack won't be resized until the next call to reset(), but + // that will happen at the end of the next GC. + setBaseCapacity(gcMode); + } + + size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { + return mallocSizeOf(stack_); + } +}; + +struct GCMarker : public JSTracer { + private: + /* + * We use a common mark stack to mark GC things of different types and use + * the explicit tags to distinguish them when it cannot be deduced from + * the context of push or pop operation. + */ + enum StackTag { + ValueArrayTag, + ObjectTag, + TypeTag, + XmlTag, + SavedValueArrayTag, + JitCodeTag, + LastTag = JitCodeTag + }; + + static const uintptr_t StackTagMask = 7; + + static void staticAsserts() { + JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag)); + JS_STATIC_ASSERT(StackTagMask <= gc::CellMask); + } + + public: + explicit GCMarker(JSRuntime *rt); + bool init(JSGCMode gcMode); + + void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); } + size_t maxCapacity() const { return stack.maxCapacity_; } + + void start(); + void stop(); + void reset(); + + void pushObject(ObjectImpl *obj) { + pushTaggedPtr(ObjectTag, obj); + } + + void pushType(types::TypeObject *type) { + pushTaggedPtr(TypeTag, type); + } + + void pushJitCode(jit::JitCode *code) { + pushTaggedPtr(JitCodeTag, code); + } + + uint32_t getMarkColor() const { + return color; + } + + /* + * Care must be taken changing the mark color from gray to black. The cycle + * collector depends on the invariant that there are no black to gray edges + * in the GC heap. This invariant lets the CC not trace through black + * objects. If this invariant is violated, the cycle collector may free + * objects that are still reachable. + */ + void setMarkColorGray() { + JS_ASSERT(isDrained()); + JS_ASSERT(color == gc::BLACK); + color = gc::GRAY; + } + + void setMarkColorBlack() { + JS_ASSERT(isDrained()); + JS_ASSERT(color == gc::GRAY); + color = gc::BLACK; + } + + inline void delayMarkingArena(gc::ArenaHeader *aheader); + void delayMarkingChildren(const void *thing); + void markDelayedChildren(gc::ArenaHeader *aheader); + bool markDelayedChildren(SliceBudget &budget); + bool hasDelayedChildren() const { + return !!unmarkedArenaStackTop; + } + + bool isDrained() { + return isMarkStackEmpty() && !unmarkedArenaStackTop; + } + + bool drainMarkStack(SliceBudget &budget); + + /* + * Gray marking must be done after all black marking is complete. However, + * we do not have write barriers on XPConnect roots. Therefore, XPConnect + * roots must be accumulated in the first slice of incremental GC. We + * accumulate these roots in the each compartment's gcGrayRoots vector and + * then mark them later, after black marking is complete for each + * compartment. This accumulation can fail, but in that case we switch to + * non-incremental GC. + */ + bool hasBufferedGrayRoots() const; + void startBufferingGrayRoots(); + void endBufferingGrayRoots(); + void resetBufferedGrayRoots(); + void markBufferedGrayRoots(JS::Zone *zone); + + static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind); + + void setGCMode(JSGCMode mode) { stack.setGCMode(mode); } + + size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; + + MarkStack<uintptr_t> stack; + + private: +#ifdef DEBUG + void checkZone(void *p); +#else + void checkZone(void *p) {} +#endif + + void pushTaggedPtr(StackTag tag, void *ptr) { + checkZone(ptr); + uintptr_t addr = reinterpret_cast<uintptr_t>(ptr); + JS_ASSERT(!(addr & StackTagMask)); + if (!stack.push(addr | uintptr_t(tag))) + delayMarkingChildren(ptr); + } + + void pushValueArray(JSObject *obj, void *start, void *end) { + checkZone(obj); + + JS_ASSERT(start <= end); + uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag; + uintptr_t startAddr = reinterpret_cast<uintptr_t>(start); + uintptr_t endAddr = reinterpret_cast<uintptr_t>(end); + + /* + * Push in the reverse order so obj will be on top. If we cannot push + * the array, we trigger delay marking for the whole object. + */ + if (!stack.push(endAddr, startAddr, tagged)) + delayMarkingChildren(obj); + } + + bool isMarkStackEmpty() { + return stack.isEmpty(); + } + + bool restoreValueArray(JSObject *obj, void **vpp, void **endp); + void saveValueRanges(); + inline void processMarkStackTop(SliceBudget &budget); + void processMarkStackOther(uintptr_t tag, uintptr_t addr); + + void appendGrayRoot(void *thing, JSGCTraceKind kind); + + /* The color is only applied to objects and functions. */ + uint32_t color; + + mozilla::DebugOnly<bool> started; + + /* Pointer to the top of the stack of arenas we are delaying marking on. */ + js::gc::ArenaHeader *unmarkedArenaStackTop; + /* Count of arenas that are currently in the stack. */ + mozilla::DebugOnly<size_t> markLaterArenas; + + enum GrayBufferState + { + GRAY_BUFFER_UNUSED, + GRAY_BUFFER_OK, + GRAY_BUFFER_FAILED + }; + + GrayBufferState grayBufferState; +}; + +void +SetMarkStackLimit(JSRuntime *rt, size_t limit); + +} /* namespace js */ + +/* + * Macro to test if a traversal is the marking phase of the GC. + */ +#define IS_GC_MARKING_TRACER(trc) \ + ((trc)->callback == nullptr || (trc)->callback == GCMarker::GrayCallback) + #endif /* js_Tracer_h */
--- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -2115,30 +2115,16 @@ typedef enum JSGCParamKey { /* * We decommit memory lazily. If more than this number of megabytes is * available to be decommitted, then JS_MaybeGC will trigger a shrinking GC * to decommit it. */ JSGC_DECOMMIT_THRESHOLD = 20 } JSGCParamKey; -typedef enum JSGCMode { - /* Perform only global GCs. */ - JSGC_MODE_GLOBAL = 0, - - /* Perform per-compartment GCs until too much garbage has accumulated. */ - JSGC_MODE_COMPARTMENT = 1, - - /* - * Collect in short time slices rather than all at once. Implies - * JSGC_MODE_COMPARTMENT. - */ - JSGC_MODE_INCREMENTAL = 2 -} JSGCMode; - extern JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value); extern JS_PUBLIC_API(uint32_t) JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key); extern JS_PUBLIC_API(void) JS_SetGCParameterForThread(JSContext *cx, JSGCParamKey key, uint32_t value);
--- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -18,16 +18,17 @@ #include "jsfun.h" #include "jsiter.h" #include "jsnum.h" #include "jsobj.h" #include "jstypes.h" #include "jsutil.h" #include "ds/Sort.h" +#include "gc/Heap.h" #include "vm/ArgumentsObject.h" #include "vm/ForkJoin.h" #include "vm/Interpreter.h" #include "vm/NumericConversions.h" #include "vm/Shape.h" #include "vm/StringBuffer.h" #include "jsatominlines.h"
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1334,16 +1334,36 @@ Zone::reduceGCTriggerBytes(size_t amount gcTriggerBytes -= amount; } Allocator::Allocator(Zone *zone) : zone_(zone) {} inline void +GCMarker::delayMarkingArena(ArenaHeader *aheader) +{ + if (aheader->hasDelayedMarking) { + /* Arena already scheduled to be marked later */ + return; + } + aheader->setNextDelayedMarking(unmarkedArenaStackTop); + unmarkedArenaStackTop = aheader; + markLaterArenas++; +} + +void +GCMarker::delayMarkingChildren(const void *thing) +{ + const Cell *cell = reinterpret_cast<const Cell *>(thing); + cell->arenaHeader()->markOverflow = 1; + delayMarkingArena(cell->arenaHeader()); +} + +inline void ArenaLists::prepareForIncrementalGC(JSRuntime *rt) { for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { FreeSpan *headSpan = &freeLists[i]; if (!headSpan->isEmpty()) { ArenaHeader *aheader = headSpan->arenaHeader(); aheader->allocatedDuringIncremental = true; rt->gcMarker.delayMarkingArena(aheader); @@ -1844,294 +1864,16 @@ bool SliceBudget::checkOverBudget() { bool over = PRMJ_Now() > deadline; if (!over) counter = CounterReset; return over; } -/* - * DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries, - * so we delay visting entries. - */ -GCMarker::GCMarker(JSRuntime *rt) - : JSTracer(rt, nullptr, DoNotTraceWeakMaps), - stack(size_t(-1)), - color(BLACK), - started(false), - unmarkedArenaStackTop(nullptr), - markLaterArenas(0), - grayBufferState(GRAY_BUFFER_UNUSED) -{ -} - -bool -GCMarker::init(JSGCMode gcMode) -{ - return stack.init(gcMode); -} - -void -GCMarker::start() -{ - JS_ASSERT(!started); - started = true; - color = BLACK; - - JS_ASSERT(!unmarkedArenaStackTop); - JS_ASSERT(markLaterArenas == 0); - -} - -void -GCMarker::stop() -{ - JS_ASSERT(isDrained()); - - JS_ASSERT(started); - started = false; - - JS_ASSERT(!unmarkedArenaStackTop); - JS_ASSERT(markLaterArenas == 0); - - /* Free non-ballast stack memory. */ - stack.reset(); - - resetBufferedGrayRoots(); - grayBufferState = GRAY_BUFFER_UNUSED; -} - -void -GCMarker::reset() -{ - color = BLACK; - - stack.reset(); - JS_ASSERT(isMarkStackEmpty()); - - while (unmarkedArenaStackTop) { - ArenaHeader *aheader = unmarkedArenaStackTop; - JS_ASSERT(aheader->hasDelayedMarking); - JS_ASSERT(markLaterArenas); - unmarkedArenaStackTop = aheader->getNextDelayedMarking(); - aheader->unsetDelayedMarking(); - aheader->markOverflow = 0; - aheader->allocatedDuringIncremental = 0; - markLaterArenas--; - } - JS_ASSERT(isDrained()); - JS_ASSERT(!markLaterArenas); -} - -/* - * When the native stack is low, the GC does not call JS_TraceChildren to mark - * the reachable "children" of the thing. Rather the thing is put aside and - * JS_TraceChildren is called later with more space on the C stack. - * - * To implement such delayed marking of the children with minimal overhead for - * the normal case of sufficient native stack, the code adds a field per - * arena. The field markingDelay->link links all arenas with delayed things - * into a stack list with the pointer to stack top in - * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds - * arenas to the stack as necessary while markDelayedChildren pops the arenas - * from the stack until it empties. - */ - -inline void -GCMarker::delayMarkingArena(ArenaHeader *aheader) -{ - if (aheader->hasDelayedMarking) { - /* Arena already scheduled to be marked later */ - return; - } - aheader->setNextDelayedMarking(unmarkedArenaStackTop); - unmarkedArenaStackTop = aheader; - markLaterArenas++; -} - -void -GCMarker::delayMarkingChildren(const void *thing) -{ - const Cell *cell = reinterpret_cast<const Cell *>(thing); - cell->arenaHeader()->markOverflow = 1; - delayMarkingArena(cell->arenaHeader()); -} - -void -GCMarker::markDelayedChildren(ArenaHeader *aheader) -{ - if (aheader->markOverflow) { - bool always = aheader->allocatedDuringIncremental; - aheader->markOverflow = 0; - - for (CellIterUnderGC i(aheader); !i.done(); i.next()) { - Cell *t = i.getCell(); - if (always || t->isMarked()) { - t->markIfUnmarked(); - JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind())); - } - } - } else { - JS_ASSERT(aheader->allocatedDuringIncremental); - PushArena(this, aheader); - } - aheader->allocatedDuringIncremental = 0; - /* - * Note that during an incremental GC we may still be allocating into - * aheader. However, prepareForIncrementalGC sets the - * allocatedDuringIncremental flag if we continue marking. - */ -} - -bool -GCMarker::markDelayedChildren(SliceBudget &budget) -{ - gcstats::MaybeAutoPhase ap; - if (runtime()->gcIncrementalState == MARK) - ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED); - - JS_ASSERT(unmarkedArenaStackTop); - do { - /* - * If marking gets delayed at the same arena again, we must repeat - * marking of its things. For that we pop arena from the stack and - * clear its hasDelayedMarking flag before we begin the marking. - */ - ArenaHeader *aheader = unmarkedArenaStackTop; - JS_ASSERT(aheader->hasDelayedMarking); - JS_ASSERT(markLaterArenas); - unmarkedArenaStackTop = aheader->getNextDelayedMarking(); - aheader->unsetDelayedMarking(); - markLaterArenas--; - markDelayedChildren(aheader); - - budget.step(150); - if (budget.isOverBudget()) - return false; - } while (unmarkedArenaStackTop); - JS_ASSERT(!markLaterArenas); - - return true; -} - -#ifdef DEBUG -void -GCMarker::checkZone(void *p) -{ - JS_ASSERT(started); - DebugOnly<Cell *> cell = static_cast<Cell *>(p); - JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting()); -} -#endif - -bool -GCMarker::hasBufferedGrayRoots() const -{ - return grayBufferState == GRAY_BUFFER_OK; -} - -void -GCMarker::startBufferingGrayRoots() -{ - JS_ASSERT(grayBufferState == GRAY_BUFFER_UNUSED); - grayBufferState = GRAY_BUFFER_OK; - for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) - JS_ASSERT(zone->gcGrayRoots.empty()); - - JS_ASSERT(!callback); - callback = GrayCallback; - JS_ASSERT(IS_GC_MARKING_TRACER(this)); -} - -void -GCMarker::endBufferingGrayRoots() -{ - JS_ASSERT(callback == GrayCallback); - callback = nullptr; - JS_ASSERT(IS_GC_MARKING_TRACER(this)); - JS_ASSERT(grayBufferState == GRAY_BUFFER_OK || - grayBufferState == GRAY_BUFFER_FAILED); -} - -void -GCMarker::resetBufferedGrayRoots() -{ - for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) - zone->gcGrayRoots.clearAndFree(); -} - -void -GCMarker::markBufferedGrayRoots(JS::Zone *zone) -{ - JS_ASSERT(grayBufferState == GRAY_BUFFER_OK); - JS_ASSERT(zone->isGCMarkingGray()); - - for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) { -#ifdef DEBUG - setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex); -#endif - void *tmp = elem->thing; - setTracingLocation((void *)&elem->thing); - MarkKind(this, &tmp, elem->kind); - JS_ASSERT(tmp == elem->thing); - } -} - -void -GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind) -{ - JS_ASSERT(started); - - if (grayBufferState == GRAY_BUFFER_FAILED) - return; - - GrayRoot root(thing, kind); -#ifdef DEBUG - root.debugPrinter = debugPrinter(); - root.debugPrintArg = debugPrintArg(); - root.debugPrintIndex = debugPrintIndex(); -#endif - - Zone *zone = static_cast<Cell *>(thing)->tenuredZone(); - if (zone->isCollecting()) { - zone->maybeAlive = true; - if (!zone->gcGrayRoots.append(root)) { - resetBufferedGrayRoots(); - grayBufferState = GRAY_BUFFER_FAILED; - } - } -} - -void -GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind) -{ - JS_ASSERT(thingp); - JS_ASSERT(*thingp); - GCMarker *gcmarker = static_cast<GCMarker *>(trc); - gcmarker->appendGrayRoot(*thingp, kind); -} - -size_t -GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const -{ - size_t size = stack.sizeOfExcludingThis(mallocSizeOf); - for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next()) - size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf); - return size; -} - -void -js::SetMarkStackLimit(JSRuntime *rt, size_t limit) -{ - JS_ASSERT(!rt->isHeapBusy()); - AutoStopVerifyingBarriers pauseVerification(rt, false); - rt->gcMarker.setMaxCapacity(limit); -} - void js::MarkCompartmentActive(InterpreterFrame *fp) { fp->script()->compartment()->zone()->active = true; } static void RequestInterrupt(JSRuntime *rt, JS::gcreason::Reason reason)
--- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -10,17 +10,16 @@ #define jsgc_h #include "mozilla/DebugOnly.h" #include "mozilla/MemoryReporting.h" #include "jslock.h" #include "jsobj.h" -#include "gc/Tracer.h" #include "js/GCAPI.h" #include "js/SliceBudget.h" #include "js/Vector.h" class JSAtom; struct JSCompartment; class JSFlatString; class JSLinearString; @@ -925,352 +924,29 @@ struct GCChunkHasher { JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask)); JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask)); return k == l; } }; typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet; -static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096; -static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768; - -template<class T> -struct MarkStack { - T *stack_; - T *tos_; - T *end_; - - // The capacity we start with and reset() to. - size_t baseCapacity_; - size_t maxCapacity_; - - MarkStack(size_t maxCapacity) - : stack_(nullptr), - tos_(nullptr), - end_(nullptr), - baseCapacity_(0), - maxCapacity_(maxCapacity) - {} - - ~MarkStack() { - js_free(stack_); - } - - size_t capacity() { return end_ - stack_; } - - ptrdiff_t position() const { return tos_ - stack_; } - - void setStack(T *stack, size_t tosIndex, size_t capacity) { - stack_ = stack; - tos_ = stack + tosIndex; - end_ = stack + capacity; - } - - void setBaseCapacity(JSGCMode mode) { - switch (mode) { - case JSGC_MODE_GLOBAL: - case JSGC_MODE_COMPARTMENT: - baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY; - break; - case JSGC_MODE_INCREMENTAL: - baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY; - break; - default: - MOZ_ASSUME_UNREACHABLE("bad gc mode"); - } - - if (baseCapacity_ > maxCapacity_) - baseCapacity_ = maxCapacity_; - } - - bool init(JSGCMode gcMode) { - setBaseCapacity(gcMode); - - JS_ASSERT(!stack_); - T *newStack = js_pod_malloc<T>(baseCapacity_); - if (!newStack) - return false; - - setStack(newStack, 0, baseCapacity_); - return true; - } - - void setMaxCapacity(size_t maxCapacity) { - JS_ASSERT(isEmpty()); - maxCapacity_ = maxCapacity; - if (baseCapacity_ > maxCapacity_) - baseCapacity_ = maxCapacity_; - - reset(); - } - - bool push(T item) { - if (tos_ == end_) { - if (!enlarge(1)) - return false; - } - JS_ASSERT(tos_ < end_); - *tos_++ = item; - return true; - } - - bool push(T item1, T item2, T item3) { - T *nextTos = tos_ + 3; - if (nextTos > end_) { - if (!enlarge(3)) - return false; - nextTos = tos_ + 3; - } - JS_ASSERT(nextTos <= end_); - tos_[0] = item1; - tos_[1] = item2; - tos_[2] = item3; - tos_ = nextTos; - return true; - } - - bool isEmpty() const { - return tos_ == stack_; - } - - T pop() { - JS_ASSERT(!isEmpty()); - return *--tos_; - } - - void reset() { - if (capacity() == baseCapacity_) { - // No size change; keep the current stack. - setStack(stack_, 0, baseCapacity_); - return; - } - - T *newStack = (T *)js_realloc(stack_, sizeof(T) * baseCapacity_); - if (!newStack) { - // If the realloc fails, just keep using the existing stack; it's - // not ideal but better than failing. - newStack = stack_; - baseCapacity_ = capacity(); - } - setStack(newStack, 0, baseCapacity_); - } - - /* Grow the stack, ensuring there is space for at least count elements. */ - bool enlarge(unsigned count) { - size_t newCapacity = Min(maxCapacity_, capacity() * 2); - if (newCapacity < capacity() + count) - return false; - - size_t tosIndex = position(); - - T *newStack = (T *)js_realloc(stack_, sizeof(T) * newCapacity); - if (!newStack) - return false; - - setStack(newStack, tosIndex, newCapacity); - return true; - } - - void setGCMode(JSGCMode gcMode) { - // The mark stack won't be resized until the next call to reset(), but - // that will happen at the end of the next GC. - setBaseCapacity(gcMode); - } - - size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { - return mallocSizeOf(stack_); - } -}; - struct GrayRoot { void *thing; JSGCTraceKind kind; #ifdef DEBUG JSTraceNamePrinter debugPrinter; const void *debugPrintArg; size_t debugPrintIndex; #endif GrayRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {} }; -struct GCMarker : public JSTracer { - private: - /* - * We use a common mark stack to mark GC things of different types and use - * the explicit tags to distinguish them when it cannot be deduced from - * the context of push or pop operation. - */ - enum StackTag { - ValueArrayTag, - ObjectTag, - TypeTag, - XmlTag, - SavedValueArrayTag, - JitCodeTag, - LastTag = JitCodeTag - }; - - static const uintptr_t StackTagMask = 7; - - static void staticAsserts() { - JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag)); - JS_STATIC_ASSERT(StackTagMask <= gc::CellMask); - } - - public: - explicit GCMarker(JSRuntime *rt); - bool init(JSGCMode gcMode); - - void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); } - size_t maxCapacity() const { return stack.maxCapacity_; } - - void start(); - void stop(); - void reset(); - - void pushObject(ObjectImpl *obj) { - pushTaggedPtr(ObjectTag, obj); - } - - void pushType(types::TypeObject *type) { - pushTaggedPtr(TypeTag, type); - } - - void pushJitCode(jit::JitCode *code) { - pushTaggedPtr(JitCodeTag, code); - } - - uint32_t getMarkColor() const { - return color; - } - - /* - * Care must be taken changing the mark color from gray to black. The cycle - * collector depends on the invariant that there are no black to gray edges - * in the GC heap. This invariant lets the CC not trace through black - * objects. If this invariant is violated, the cycle collector may free - * objects that are still reachable. - */ - void setMarkColorGray() { - JS_ASSERT(isDrained()); - JS_ASSERT(color == gc::BLACK); - color = gc::GRAY; - } - - void setMarkColorBlack() { - JS_ASSERT(isDrained()); - JS_ASSERT(color == gc::GRAY); - color = gc::BLACK; - } - - inline void delayMarkingArena(gc::ArenaHeader *aheader); - void delayMarkingChildren(const void *thing); - void markDelayedChildren(gc::ArenaHeader *aheader); - bool markDelayedChildren(SliceBudget &budget); - bool hasDelayedChildren() const { - return !!unmarkedArenaStackTop; - } - - bool isDrained() { - return isMarkStackEmpty() && !unmarkedArenaStackTop; - } - - bool drainMarkStack(SliceBudget &budget); - - /* - * Gray marking must be done after all black marking is complete. However, - * we do not have write barriers on XPConnect roots. Therefore, XPConnect - * roots must be accumulated in the first slice of incremental GC. We - * accumulate these roots in the each compartment's gcGrayRoots vector and - * then mark them later, after black marking is complete for each - * compartment. This accumulation can fail, but in that case we switch to - * non-incremental GC. - */ - bool hasBufferedGrayRoots() const; - void startBufferingGrayRoots(); - void endBufferingGrayRoots(); - void resetBufferedGrayRoots(); - void markBufferedGrayRoots(JS::Zone *zone); - - static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind); - - void setGCMode(JSGCMode mode) { stack.setGCMode(mode); } - - size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; - - MarkStack<uintptr_t> stack; - - private: -#ifdef DEBUG - void checkZone(void *p); -#else - void checkZone(void *p) {} -#endif - - void pushTaggedPtr(StackTag tag, void *ptr) { - checkZone(ptr); - uintptr_t addr = reinterpret_cast<uintptr_t>(ptr); - JS_ASSERT(!(addr & StackTagMask)); - if (!stack.push(addr | uintptr_t(tag))) - delayMarkingChildren(ptr); - } - - void pushValueArray(JSObject *obj, void *start, void *end) { - checkZone(obj); - - JS_ASSERT(start <= end); - uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag; - uintptr_t startAddr = reinterpret_cast<uintptr_t>(start); - uintptr_t endAddr = reinterpret_cast<uintptr_t>(end); - - /* - * Push in the reverse order so obj will be on top. If we cannot push - * the array, we trigger delay marking for the whole object. - */ - if (!stack.push(endAddr, startAddr, tagged)) - delayMarkingChildren(obj); - } - - bool isMarkStackEmpty() { - return stack.isEmpty(); - } - - bool restoreValueArray(JSObject *obj, void **vpp, void **endp); - void saveValueRanges(); - inline void processMarkStackTop(SliceBudget &budget); - void processMarkStackOther(uintptr_t tag, uintptr_t addr); - - void appendGrayRoot(void *thing, JSGCTraceKind kind); - - /* The color is only applied to objects and functions. */ - uint32_t color; - - mozilla::DebugOnly<bool> started; - - /* Pointer to the top of the stack of arenas we are delaying marking on. */ - js::gc::ArenaHeader *unmarkedArenaStackTop; - /* Count of arenas that are currently in the stack. */ - mozilla::DebugOnly<size_t> markLaterArenas; - - enum GrayBufferState - { - GRAY_BUFFER_UNUSED, - GRAY_BUFFER_OK, - GRAY_BUFFER_FAILED - }; - - GrayBufferState grayBufferState; -}; - -void -SetMarkStackLimit(JSRuntime *rt, size_t limit); - void MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end); typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk); typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone); typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena, JSGCTraceKind traceKind, size_t thingSize); typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing, @@ -1315,22 +991,16 @@ extern void IterateScripts(JSRuntime *rt, JSCompartment *compartment, void *data, IterateScriptCallback scriptCallback); } /* namespace js */ extern void js_FinalizeStringRT(JSRuntime *rt, JSString *str); -/* - * Macro to test if a traversal is the marking phase of the GC. - */ -#define IS_GC_MARKING_TRACER(trc) \ - ((trc)->callback == nullptr || (trc)->callback == GCMarker::GrayCallback) - namespace js { JSCompartment * NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals, const JS::CompartmentOptions &options); namespace gc {
--- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -13,16 +13,17 @@ * A JS object consists of a possibly-shared object descriptor containing * ordered property names, called the map; and a dense vector of property * values, called slots. The map/slot pointer pair is GC'ed, while the map * is reference counted and the slot vector is malloc'ed. */ #include "mozilla/MemoryReporting.h" +#include "gc/Barrier.h" #include "gc/Marking.h" #include "js/GCAPI.h" #include "vm/ObjectImpl.h" #include "vm/Shape.h" #include "vm/Xdr.h" namespace JS { struct ObjectsExtraSizes;
--- a/js/src/vm/Runtime.h +++ b/js/src/vm/Runtime.h @@ -29,16 +29,17 @@ #include "frontend/ParseMaps.h" #ifdef JSGC_GENERATIONAL # include "gc/Nursery.h" #endif #include "gc/Statistics.h" #ifdef JSGC_GENERATIONAL # include "gc/StoreBuffer.h" #endif +#include "gc/Tracer.h" #ifdef XP_MACOSX # include "jit/AsmJSSignalHandlers.h" #endif #include "js/HashTable.h" #include "js/Vector.h" #include "vm/CommonPropertyNames.h" #include "vm/DateTime.h" #include "vm/MallocProvider.h"