--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -848,19 +848,20 @@ class GCRuntime
void startVerifyPreBarriers();
void endVerifyPreBarriers();
void finishVerifier();
bool isVerifyPreBarriersEnabled() const { return !!verifyPreData; }
#else
bool isVerifyPreBarriersEnabled() const { return false; }
#endif
- // Free certain LifoAlloc blocks from the background sweep thread.
+ // Free certain LifoAlloc blocks when it is safe to do so.
void freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo);
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
+ void freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo);
// Public here for ReleaseArenaLists and FinalizeTypedArenas.
void releaseArena(Arena* arena, const AutoLockGC& lock);
void releaseHeldRelocatedArenas();
void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
// Allocator
@@ -1164,19 +1165,25 @@ class GCRuntime
/* Whether any black->gray edges were found during marking. */
bool foundBlackGrayEdges;
/* Singly linekd list of zones to be swept in the background. */
ZoneList backgroundSweepZones;
/*
* Free LIFO blocks are transferred to this allocator before being freed on
- * the background GC thread.
+ * the background GC thread after sweeping.
*/
- LifoAlloc freeLifoAlloc;
+ LifoAlloc blocksToFreeAfterSweeping;
+
+ /*
+ * Free LIFO blocks are transferred to this allocator before being freed
+ * after minor GC.
+ */
+ LifoAlloc blocksToFreeAfterMinorGC;
/* Index of current zone group (for stats). */
unsigned zoneGroupIndex;
/*
* Incremental sweep state.
*/
JS::Zone* zoneGroups;
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -214,22 +214,16 @@ Zone::discardJitCode(FreeOp* fop)
#endif
/* Mark baseline scripts on the stack as active. */
jit::MarkActiveBaselineScripts(this);
/* Only mark OSI points if code is being discarded. */
jit::InvalidateAll(fop, this);
- /* The storebuffer may contain pointers into data owned by BaselineScript. */
- JSRuntime* rt = runtimeFromMainThread();
- if (!rt->isHeapCollecting())
- rt->gc.evictNursery();
- MOZ_ASSERT(rt->gc.nursery.isEmpty());
-
for (ZoneCellIter i(this, AllocKind::SCRIPT); !i.done(); i.next()) {
JSScript* script = i.get<JSScript>();
jit::FinishInvalidation(fop, script);
/*
* Discard baseline script if it's not marked as active. Note that
* this also resets the active flag.
*/
@@ -238,17 +232,25 @@ Zone::discardJitCode(FreeOp* fop)
/*
* Warm-up counter for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information such as which
* opcodes are setting array holes or accessing getter properties.
*/
script->resetWarmUpCounter();
}
- jitZone()->optimizedStubSpace()->free();
+ /*
+ * When scripts contains pointers to nursery things, the store buffer
+ * can contain entries that point into the optimized stub space. Since
+ * this method can be called outside the context of a GC, this situation
+ * could result in us trying to mark invalid store buffer entries.
+ *
+ * Defer freeing any allocated blocks until after the next minor GC.
+ */
+ jitZone()->optimizedStubSpace()->freeAllAfterMinorGC(fop->runtime());
}
}
#ifdef JSGC_HASH_TABLE_CHECKS
void
JS::Zone::checkUniqueIdTableAfterMovingGC()
{
for (UniqueIdMap::Enum e(uniqueIds_); !e.empty(); e.popFront())
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -203,17 +203,18 @@ BaselineCompiler::compile()
profilerExitFrameToggleOffset_.offset(),
traceLoggerEnterToggleOffset_.offset(),
traceLoggerExitToggleOffset_.offset(),
postDebugPrologueOffset_.offset(),
icEntries_.length(),
pcMappingIndexEntries.length(),
pcEntries.length(),
bytecodeTypeMapEntries,
- yieldOffsets_.length()));
+ yieldOffsets_.length()),
+ JS::DeletePolicy<BaselineScript>(cx->runtime()));
if (!baselineScript) {
ReportOutOfMemory(cx);
return Method_Error;
}
baselineScript->setMethod(code);
baselineScript->setTemplateScope(templateScope);
--- a/js/src/jit/BaselineDebugModeOSR.cpp
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -866,19 +866,16 @@ jit::RecompileOnStackBaselineScriptsForD
if (!CollectInterpreterStackScripts(cx, obs, iter, entries))
return false;
}
}
if (entries.empty())
return true;
- // Scripts can entrain nursery things. See note in js::ReleaseAllJITCode.
- cx->runtime()->gc.evictNursery();
-
// When the profiler is enabled, we need to have suppressed sampling,
// since the basline jit scripts are in a state of flux.
MOZ_ASSERT(!cx->runtime()->isProfilerSamplingEnabled());
// Invalidate all scripts we are recompiling.
if (Zone* zone = obs.singleZone()) {
if (!InvalidateScriptsInZone(cx, zone, entries))
return false;
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -40,16 +40,22 @@ PCMappingSlotInfo::ToSlotLocation(const
return SlotInR0;
MOZ_ASSERT(stackVal->reg() == R1);
return SlotInR1;
}
MOZ_ASSERT(stackVal->kind() != StackValue::Stack);
return SlotIgnore;
}
+void
+ICStubSpace::freeAllAfterMinorGC(JSRuntime* rt)
+{
+ rt->gc.freeAllLifoBlocksAfterMinorGC(&allocator_);
+}
+
BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
uint32_t profilerEnterToggleOffset,
uint32_t profilerExitToggleOffset,
uint32_t traceLoggerEnterToggleOffset,
uint32_t traceLoggerExitToggleOffset,
uint32_t postDebugPrologueOffset)
: method_(nullptr),
templateScope_(nullptr),
@@ -477,33 +483,40 @@ void
BaselineScript::Trace(JSTracer* trc, BaselineScript* script)
{
script->trace(trc);
}
void
BaselineScript::Destroy(FreeOp* fop, BaselineScript* script)
{
- /*
- * When the script contains pointers to nursery things, the store buffer
- * will contain entries refering to the referenced things. Since we can
- * destroy scripts outside the context of a GC, this situation can result
- * in invalid store buffer entries. Assert that if we do destroy scripts
- * outside of a GC that we at least emptied the nursery first.
- */
- MOZ_ASSERT(fop->runtime()->gc.nursery.isEmpty());
-
MOZ_ASSERT(!script->hasPendingIonBuilder());
script->unlinkDependentWasmModules(fop);
+ /*
+ * When the script contains pointers to nursery things, the store buffer can
+ * contain entries that point into the fallback stub space. Since we can
+ * destroy scripts outside the context of a GC, this situation could result
+ * in us trying to mark invalid store buffer entries.
+ *
+ * Defer freeing any allocated blocks until after the next minor GC.
+ */
+ script->fallbackStubSpace_.freeAllAfterMinorGC(fop->runtime());
+
fop->delete_(script);
}
void
+JS::DeletePolicy<js::jit::BaselineScript>::operator()(const js::jit::BaselineScript* script)
+{
+ BaselineScript::Destroy(rt_->defaultFreeOp(), const_cast<BaselineScript*>(script));
+}
+
+void
BaselineScript::clearDependentWasmModules()
{
// Remove any links from wasm::Modules that contain optimized import calls into
// this BaselineScript.
if (dependentWasmModules_) {
for (DependentWasmModuleImport dep : *dependentWasmModules_)
dep.module->deoptimizeImportExit(dep.importIndex);
dependentWasmModules_->clear();
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -240,16 +240,22 @@ struct BaselineScript
// Do not call directly, use BaselineScript::New. This is public for cx->new_.
BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
uint32_t profilerEnterToggleOffset,
uint32_t profilerExitToggleOffset,
uint32_t traceLoggerEnterToggleOffset,
uint32_t traceLoggerExitToggleOffset,
uint32_t postDebugPrologueOffset);
+ ~BaselineScript() {
+ // The contents of the fallback stub space are removed and freed
+ // separately after the next minor GC. See BaselineScript::Destroy.
+ MOZ_ASSERT(fallbackStubSpace_.isEmpty());
+ }
+
static BaselineScript* New(JSScript* jsscript, uint32_t prologueOffset,
uint32_t epilogueOffset, uint32_t postDebugPrologueOffset,
uint32_t profilerEnterToggleOffset,
uint32_t profilerExitToggleOffset,
uint32_t traceLoggerEnterToggleOffset,
uint32_t traceLoggerExitToggleOffset,
size_t icEntries, size_t pcMappingIndexEntries,
size_t pcMappingSize,
@@ -594,9 +600,23 @@ void
MarkActiveBaselineScripts(Zone* zone);
MethodStatus
BaselineCompile(JSContext* cx, JSScript* script, bool forceDebugInstrumentation = false);
} // namespace jit
} // namespace js
+namespace JS {
+
+template <>
+struct DeletePolicy<js::jit::BaselineScript>
+{
+ explicit DeletePolicy(JSRuntime* rt) : rt_(rt) {}
+ void operator()(const js::jit::BaselineScript* script);
+
+ private:
+ JSRuntime* rt_;
+};
+
+} // namespace JS
+
#endif /* jit_BaselineJIT_h */
--- a/js/src/jit/ICStubSpace.h
+++ b/js/src/jit/ICStubSpace.h
@@ -30,35 +30,39 @@ class ICStubSpace
public:
inline void* alloc(size_t size) {
return allocator_.alloc(size);
}
JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
+ void freeAllAfterMinorGC(JSRuntime* rt);
+
+#ifdef DEBUG
+ bool isEmpty() const {
+ return allocator_.isEmpty();
+ }
+#endif
+
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
// Space for optimized stubs. Every JitCompartment has a single
// OptimizedICStubSpace.
struct OptimizedICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 4096;
public:
OptimizedICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
-
- void free() {
- allocator_.freeAll();
- }
};
// Space for fallback stubs. Every BaselineScript has a
// FallbackICStubSpace.
struct FallbackICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 4096;
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1215,20 +1215,37 @@ IonScript::Trace(JSTracer* trc, IonScrip
if (script != ION_DISABLED_SCRIPT)
script->trace(trc);
}
void
IonScript::Destroy(FreeOp* fop, IonScript* script)
{
script->unlinkFromRuntime(fop);
+
+ /*
+ * When the script contains pointers to nursery things, the store buffer can
+ * contain entries that point into the fallback stub space. Since we can
+ * destroy scripts outside the context of a GC, this situation could result
+ * in us trying to mark invalid store buffer entries.
+ *
+ * Defer freeing any allocated blocks until after the next minor GC.
+ */
+ script->fallbackStubSpace_.freeAllAfterMinorGC(fop->runtime());
+
fop->delete_(script);
}
void
+JS::DeletePolicy<js::jit::IonScript>::operator()(const js::jit::IonScript* script)
+{
+ IonScript::Destroy(rt_->defaultFreeOp(), const_cast<IonScript*>(script));
+}
+
+void
IonScript::toggleBarriers(bool enabled, ReprotectCode reprotect)
{
method()->togglePreBarriers(enabled, reprotect);
}
void
IonScript::purgeOptimizedStubs(Zone* zone)
{
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -326,16 +326,22 @@ struct IonScript
private:
void trace(JSTracer* trc);
public:
// Do not call directly, use IonScript::New. This is public for cx->new_.
IonScript();
+ ~IonScript() {
+ // The contents of the fallback stub space are removed and freed
+ // separately after the next minor GC. See IonScript::Destroy.
+ MOZ_ASSERT(fallbackStubSpace_.isEmpty());
+ }
+
static IonScript* New(JSContext* cx, RecompileInfo recompileInfo,
uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
size_t snapshotsListSize, size_t snapshotsRVATableSize,
size_t recoversSize, size_t bailoutEntries,
size_t constants, size_t safepointIndexEntries,
size_t osiIndexEntries, size_t cacheEntries,
size_t runtimeSize, size_t safepointsSize,
size_t backedgeEntries, size_t sharedStubEntries,
@@ -793,11 +799,22 @@ struct Concrete<js::jit::JitCode> : Trac
protected:
explicit Concrete(js::jit::JitCode *ptr) : TracerConcrete<js::jit::JitCode>(ptr) { }
public:
static void construct(void *storage, js::jit::JitCode *ptr) { new (storage) Concrete(ptr); }
};
} // namespace ubi
+
+template <>
+struct DeletePolicy<js::jit::IonScript>
+{
+ explicit DeletePolicy(JSRuntime* rt) : rt_(rt) {}
+ void operator()(const js::jit::IonScript* script);
+
+ private:
+ JSRuntime* rt_;
+};
+
} // namespace JS
#endif /* jit_IonCode_h */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1032,17 +1032,18 @@ GCRuntime::GCRuntime(JSRuntime* rt) :
isFull(false),
#ifdef DEBUG
disableStrictProxyCheckingCount(0),
#endif
incrementalState(gc::NO_INCREMENTAL),
lastMarkSlice(false),
sweepOnBackgroundThread(false),
foundBlackGrayEdges(false),
- freeLifoAlloc(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
+ blocksToFreeAfterSweeping(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
+ blocksToFreeAfterMinorGC(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
zoneGroupIndex(0),
zoneGroups(nullptr),
currentZoneGroup(nullptr),
sweepZone(nullptr),
sweepKind(AllocKind::FIRST),
abortSweepAfterCurrentGroup(false),
arenasAllocatedDuringSweep(nullptr),
startedCompacting(false),
@@ -2756,17 +2757,17 @@ GCRuntime::updatePointersToRelocatedCell
// Sweep everything to fix up weak pointers
WatchpointMap::sweepAll(rt);
Debugger::sweepAll(rt->defaultFreeOp());
jit::JitRuntime::SweepJitcodeGlobalTable(rt);
rt->gc.sweepZoneAfterCompacting(zone);
// Type inference may put more blocks here to free.
- freeLifoAlloc.freeAll();
+ blocksToFreeAfterSweeping.freeAll();
// Call callbacks to get the rest of the system to fixup other untraced pointers.
callWeakPointerZoneGroupCallbacks();
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
callWeakPointerCompartmentCallbacks(comp);
if (rt->sweepZoneCallback)
rt->sweepZoneCallback(zone);
}
@@ -3437,17 +3438,17 @@ GCRuntime::assertBackgroundSweepingFinis
#ifdef DEBUG
MOZ_ASSERT(backgroundSweepZones.isEmpty());
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
for (auto i : AllAllocKinds()) {
MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(i));
}
}
- MOZ_ASSERT(freeLifoAlloc.computedSizeOfExcludingThis() == 0);
+ MOZ_ASSERT(blocksToFreeAfterSweeping.computedSizeOfExcludingThis() == 0);
#endif
}
unsigned
js::GetCPUCount()
{
static unsigned ncpus = 0;
if (ncpus == 0) {
@@ -3598,25 +3599,32 @@ GCRuntime::queueZonesForBackgroundSweep(
helperState.maybeStartBackgroundSweep(lock);
}
void
GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo)
{
MOZ_ASSERT(rt->isHeapBusy());
AutoLockGC lock(rt);
- freeLifoAlloc.transferUnusedFrom(lifo);
+ blocksToFreeAfterSweeping.transferUnusedFrom(lifo);
}
void
GCRuntime::freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo)
{
MOZ_ASSERT(rt->isHeapBusy());
AutoLockGC lock(rt);
- freeLifoAlloc.transferFrom(lifo);
+ blocksToFreeAfterSweeping.transferFrom(lifo);
+}
+
+void
+GCRuntime::freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo)
+{
+ MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
+ blocksToFreeAfterMinorGC.transferFrom(lifo);
}
void
GCHelperState::maybeStartBackgroundSweep(const AutoLockGC& lock)
{
MOZ_ASSERT(CanUseExtraThreads());
if (state() == IDLE)
@@ -3659,17 +3667,17 @@ GCHelperState::doSweep(AutoLockGC& lock)
do {
while (!rt->gc.backgroundSweepZones.isEmpty()) {
AutoSetThreadIsSweeping threadIsSweeping;
ZoneList zones;
zones.transferFrom(rt->gc.backgroundSweepZones);
LifoAlloc freeLifoAlloc(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
- freeLifoAlloc.transferFrom(&rt->gc.freeLifoAlloc);
+ freeLifoAlloc.transferFrom(&rt->gc.blocksToFreeAfterSweeping);
AutoUnlockGC unlock(lock);
rt->gc.sweepBackgroundThings(zones, freeLifoAlloc, BackgroundThread);
}
bool shrinking = shrinkFlag;
shrinkFlag = false;
rt->gc.expireChunksAndArenas(shrinking, lock);
@@ -5388,17 +5396,17 @@ GCRuntime::endSweepingZoneGroup()
/* Start background thread to sweep zones if required. */
ZoneList zones;
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next())
zones.append(zone);
if (sweepOnBackgroundThread)
queueZonesForBackgroundSweep(zones);
else
- sweepBackgroundThings(zones, freeLifoAlloc, MainThread);
+ sweepBackgroundThings(zones, blocksToFreeAfterSweeping, MainThread);
/* Reset the list of arenas marked as being allocated during sweep phase. */
while (Arena* arena = arenasAllocatedDuringSweep) {
arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
arena->unsetAllocDuringSweep();
}
}
@@ -5897,17 +5905,17 @@ GCRuntime::resetIncrementalGC(const char
ResetGrayList(c);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
MOZ_ASSERT(zone->isGCMarking());
zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
zone->setGCState(Zone::NoGC);
}
- freeLifoAlloc.freeAll();
+ blocksToFreeAfterSweeping.freeAll();
incrementalState = NO_INCREMENTAL;
MOZ_ASSERT(!marker.shouldCheckCompartments());
break;
}
@@ -6738,16 +6746,18 @@ GCRuntime::minorGCImpl(JS::gcreason::Rea
return;
minorGCTriggerReason = JS::gcreason::NO_REASON;
TraceLoggerThread* logger = TraceLoggerForMainThread(rt);
AutoTraceLog logMinorGC(logger, TraceLogger_MinorGC);
nursery.collect(rt, reason, pretenureGroups);
MOZ_ASSERT(nursery.isEmpty());
+ blocksToFreeAfterMinorGC.freeAll();
+
AutoLockGC lock(rt);
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
maybeAllocTriggerZoneGC(zone, lock);
}
// Alternate to the runtime-taking form that allows marking object groups as
// needing pretenuring.
void
@@ -7092,22 +7102,16 @@ void PreventGCDuringInteractiveDebug()
TlsPerThreadData.get()->suppressGC++;
}
#endif
void
js::ReleaseAllJITCode(FreeOp* fop)
{
- /*
- * Scripts can entrain nursery things, inserting references to the script
- * into the store buffer. Clear the store buffer before discarding scripts.
- */
- fop->runtime()->gc.evictNursery();
-
for (ZonesIter zone(fop->runtime(), SkipAtoms); !zone.done(); zone.next()) {
zone->setPreservingCode(false);
zone->discardJitCode(fop);
}
}
void
js::PurgeJITCaches(Zone* zone)
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2262,19 +2262,16 @@ AppendAndInvalidateScript(JSContext* cx,
static bool
UpdateExecutionObservabilityOfScriptsInZone(JSContext* cx, Zone* zone,
const Debugger::ExecutionObservableSet& obs,
Debugger::IsObserving observing)
{
using namespace js::jit;
- // See note in js::ReleaseAllJITCode.
- cx->runtime()->gc.evictNursery();
-
AutoSuppressProfilerSampling suppressProfilerSampling(cx);
JSRuntime* rt = cx->runtime();
FreeOp* fop = cx->runtime()->defaultFreeOp();
Vector<JSScript*> scripts(cx);
// Iterate through observable scripts, invalidating their Ion scripts and