--- a/js/src/builtin/WeakMapObject.cpp
+++ b/js/src/builtin/WeakMapObject.cpp
@@ -126,18 +126,17 @@ TryPreserveReflector(JSContext* cx, Hand
}
static MOZ_ALWAYS_INLINE bool
SetWeakMapEntryInternal(JSContext* cx, Handle<WeakMapObject*> mapObj,
HandleObject key, HandleValue value)
{
ObjectValueMap* map = mapObj->getMap();
if (!map) {
- AutoInitGCManagedObject<ObjectValueMap> newMap(
- cx->make_unique<ObjectValueMap>(cx, mapObj.get()));
+ auto newMap = cx->make_unique<ObjectValueMap>(cx, mapObj.get());
if (!newMap)
return false;
if (!newMap->init()) {
JS_ReportOutOfMemory(cx);
return false;
}
map = newMap.release();
mapObj->setPrivate(map);
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -60,20 +60,20 @@ CurrentThreadIsIonCompilingSafeForMinorG
bool
CurrentThreadIsGCSweeping()
{
return TlsPerThreadData.get()->gcSweeping;
}
bool
-CurrentThreadIsHandlingInitFailure()
+CurrentThreadCanSkipPostBarrier(bool inNursery)
{
- JSRuntime* rt = TlsPerThreadData.get()->runtimeIfOnOwnerThread();
- return rt && rt->handlingInitFailure;
+ bool onMainThread = TlsPerThreadData.get()->runtimeIfOnOwnerThread() != nullptr;
+ return !onMainThread && !inNursery;
}
#endif // DEBUG
template <typename S>
template <typename T>
void
ReadBarrierFunctor<S>::operator()(T* t)
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -238,17 +238,17 @@ CurrentThreadIsIonCompiling();
bool
CurrentThreadIsIonCompilingSafeForMinorGC();
bool
CurrentThreadIsGCSweeping();
bool
-CurrentThreadIsHandlingInitFailure();
+CurrentThreadCanSkipPostBarrier(bool inNursery);
#endif
namespace gc {
// Marking.h depends on these barrier definitions, so we need a separate
// entry point for marking to implement the pre-barrier.
void MarkValueForBarrier(JSTracer* trc, Value* v, const char* name);
void MarkIdForBarrier(JSTracer* trc, jsid* idp, const char* name);
@@ -265,16 +265,18 @@ struct InternalBarrierMethods<T*>
static bool isMarkableTaggedPointer(T* v) { return !IsNullTaggedPointer(v); }
static void preBarrier(T* v) { T::writeBarrierPre(v); }
static void postBarrier(T** vp, T* prev, T* next) { T::writeBarrierPost(vp, prev, next); }
static void readBarrier(T* v) { T::readBarrier(v); }
+
+ static bool isInsideNursery(T* v) { return IsInsideNursery(v); }
};
template <typename S> struct PreBarrierFunctor : public VoidDefaultAdaptor<S> {
template <typename T> void operator()(T* t);
};
template <typename S> struct ReadBarrierFunctor : public VoidDefaultAdaptor<S> {
template <typename T> void operator()(T* t);
@@ -309,26 +311,32 @@ struct InternalBarrierMethods<Value>
// Remove the prev entry if the new value does not need it.
if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
sb->unputValue(vp);
}
static void readBarrier(const Value& v) {
DispatchTyped(ReadBarrierFunctor<Value>(), v);
}
+
+ static bool isInsideNursery(const Value& v) {
+ return v.isMarkable() && IsInsideNursery(v.toGCThing());
+ }
};
template <>
struct InternalBarrierMethods<jsid>
{
static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
static bool isMarkableTaggedPointer(jsid id) { return isMarkable(id); }
static void preBarrier(jsid id) { DispatchTyped(PreBarrierFunctor<jsid>(), id); }
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
+
+ static bool isInsideNursery(jsid id) { return false; }
};
// Barrier classes can use Mixins to add methods to a set of barrier
// instantiations, to make the barriered thing look and feel more like the
// thing itself.
template <typename T>
class BarrieredBaseMixins {};
@@ -438,18 +446,21 @@ class GCPtr : public WriteBarrieredBase<
this->post(JS::GCPolicy<T>::initial(), v);
}
explicit GCPtr(const GCPtr<T>& v) : WriteBarrieredBase<T>(v) {
this->post(JS::GCPolicy<T>::initial(), v);
}
#ifdef DEBUG
~GCPtr() {
// No prebarrier necessary as this only happens when we are sweeping or
- // before the containing object becomes part of the GC graph.
- MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
+ // after we have just collected the nursery.
+ bool inNursery = InternalBarrierMethods<T>::isInsideNursery(this->value);
+ MOZ_ASSERT(CurrentThreadIsGCSweeping() ||
+ CurrentThreadCanSkipPostBarrier(inNursery));
+ Poison(this, JS_FREED_HEAP_PTR_PATTERN, sizeof(*this));
}
#endif
void init(T v) {
this->value = v;
this->post(JS::GCPolicy<T>::initial(), v);
}
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -872,16 +872,21 @@ class GCRuntime
bool isVerifyPreBarriersEnabled() const { return false; }
#endif
// Free certain LifoAlloc blocks when it is safe to do so.
void freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo);
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
void freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo);
+ // Queue a thunk to run after the next minor GC.
+ void callAfterMinorGC(void (*thunk)(void* data), void* data) {
+ nursery.queueSweepAction(thunk, data);
+ }
+
// Public here for ReleaseArenaLists and FinalizeTypedArenas.
void releaseArena(Arena* arena, const AutoLockGC& lock);
void releaseHeldRelocatedArenas();
void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
// Allocator
template <AllowGC allowGC>
@@ -1446,11 +1451,12 @@ GCRuntime::needZealousGC() {
#else
inline bool GCRuntime::hasZealMode(ZealMode mode) { return false; }
inline void GCRuntime::clearZealMode(ZealMode mode) { }
inline bool GCRuntime::upcomingZealousGC() { return false; }
inline bool GCRuntime::needZealousGC() { return false; }
#endif
} /* namespace gc */
+
} /* namespace js */
#endif
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -53,31 +53,48 @@ struct js::Nursery::FreeMallocedBuffersT
private:
FreeOp* fop_;
MallocedBuffersSet buffers_;
virtual void run() override;
};
+struct js::Nursery::SweepAction
+{
+ SweepAction(SweepThunk thunk, void* data, SweepAction* next)
+ : thunk(thunk), data(data), next(next)
+ {}
+
+ SweepThunk thunk;
+ void* data;
+ SweepAction* next;
+
+#if JS_BITS_PER_WORD == 32
+ protected:
+ uint32_t padding;
+#endif
+};
+
js::Nursery::Nursery(JSRuntime* rt)
: runtime_(rt)
, position_(0)
, currentStart_(0)
, currentEnd_(0)
, heapStart_(0)
, heapEnd_(0)
, currentChunk_(0)
, numActiveChunks_(0)
, numNurseryChunks_(0)
, previousPromotionRate_(0)
, profileThreshold_(0)
, enableProfiling_(false)
, minorGcCount_(0)
, freeMallocedBuffersTask(nullptr)
+ , sweepActions_(nullptr)
#ifdef JS_GC_ZEAL
, lastCanary_(nullptr)
#endif
{}
bool
js::Nursery::init(uint32_t maxNurseryBytes)
{
@@ -718,16 +735,18 @@ js::Nursery::sweep()
JSObject* obj = static_cast<JSObject*>(e.front());
if (!IsForwarded(obj))
obj->zone()->removeUniqueId(obj);
else
MOZ_ASSERT(Forwarded(obj)->zone()->hasUniqueId(Forwarded(obj)));
}
cellsWithUid_.clear();
+ runSweepActions();
+
#ifdef JS_GC_ZEAL
/* Poison the nursery contents so touching a freed object will crash. */
JS_POISON((void*)start(), JS_SWEPT_NURSERY_PATTERN, nurserySize());
for (int i = 0; i < numNurseryChunks_; ++i)
initChunk(i);
if (runtime()->hasZealMode(ZealMode::GenerationalGC)) {
MOZ_ASSERT(numActiveChunks_ == numNurseryChunks_);
@@ -788,8 +807,42 @@ js::Nursery::updateNumActiveChunks(int n
uintptr_t decommitSize = chunk(priorChunks - 1).start() + ChunkSize - decommitStart;
MOZ_ASSERT(decommitSize != 0);
MOZ_ASSERT(decommitStart == AlignBytes(decommitStart, Alignment));
MOZ_ASSERT(decommitSize == AlignBytes(decommitSize, Alignment));
MarkPagesUnused((void*)decommitStart, decommitSize);
}
#endif // !defined(JS_GC_ZEAL)
}
+
+void
+js::Nursery::queueSweepAction(SweepThunk thunk, void* data)
+{
+ static_assert(sizeof(SweepAction) % CellSize == 0,
+ "SweepAction size must be a multiple of cell size");
+ MOZ_ASSERT(!runtime()->mainThread.suppressGC);
+
+ SweepAction* action = nullptr;
+ if (isEnabled() && !js::oom::ShouldFailWithOOM())
+ action = reinterpret_cast<SweepAction*>(allocate(sizeof(SweepAction)));
+
+ if (!action) {
+ runtime()->gc.evictNursery();
+ AutoSetThreadIsSweeping threadIsSweeping;
+ thunk(data);
+ return;
+ }
+
+ new (action) SweepAction(thunk, data, sweepActions_);
+ sweepActions_ = action;
+}
+
+void
+js::Nursery::runSweepActions()
+{
+ // The hazard analysis doesn't know whether the thunks can GC.
+ JS::AutoSuppressGCAnalysis nogc;
+
+ AutoSetThreadIsSweeping threadIsSweeping;
+ for (auto action = sweepActions_; action; action = action->next)
+ action->thunk(action->data);
+ sweepActions_ = nullptr;
+}
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -203,16 +203,19 @@ class Nursery
MOZ_MUST_USE bool addedUniqueIdToCell(gc::Cell* cell) {
if (!IsInsideNursery(cell) || !isEnabled())
return true;
MOZ_ASSERT(cellsWithUid_.initialized());
MOZ_ASSERT(!cellsWithUid_.has(cell));
return cellsWithUid_.put(cell);
}
+ using SweepThunk = void (*)(void *data);
+ void queueSweepAction(SweepThunk thunk, void* data);
+
size_t sizeOfHeapCommitted() const {
return numActiveChunks_ * gc::ChunkSize;
}
size_t sizeOfHeapDecommitted() const {
return (numNurseryChunks_ - numActiveChunks_) * gc::ChunkSize;
}
size_t sizeOfMallocedBuffers(mozilla::MallocSizeOf mallocSizeOf) const {
size_t total = 0;
@@ -330,16 +333,20 @@ class Nursery
*
* Note: we store the pointers as Cell* here, resulting in an ugly cast in
* sweep. This is because this structure is used to help implement
* stable object hashing and we have to break the cycle somehow.
*/
using CellsWithUniqueIdSet = HashSet<gc::Cell*, PointerHasher<gc::Cell*, 3>, SystemAllocPolicy>;
CellsWithUniqueIdSet cellsWithUid_;
+ struct SweepAction;
+ SweepAction* sweepActions_;
+ SweepAction* reservedSweepAction_;
+
#ifdef JS_GC_ZEAL
struct Canary
{
uintptr_t magicValue;
Canary* next;
};
Canary* lastCanary_;
@@ -424,16 +431,18 @@ class Nursery
void freeMallocedBuffers();
/*
* Frees all non-live nursery-allocated things at the end of a minor
* collection.
*/
void sweep();
+ void runSweepActions();
+
/* Change the allocable space provided by the nursery. */
void growAllocableSpace();
void shrinkAllocableSpace();
/* Profile recording and printing. */
void startProfile(ProfileKey key);
void endProfile(ProfileKey key);
void maybeStartProfile(ProfileKey key);
--- a/js/src/jsapi-tests/testGCHeapPostBarriers.cpp
+++ b/js/src/jsapi-tests/testGCHeapPostBarriers.cpp
@@ -4,88 +4,157 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/UniquePtr.h"
#include "js/RootingAPI.h"
#include "jsapi-tests/tests.h"
+#include "vm/Runtime.h"
+
+template <typename T>
+static T* CreateGCThing(JSContext* cx)
+{
+ MOZ_CRASH();
+ return nullptr;
+}
+
+template <>
+JSObject* CreateGCThing(JSContext* cx)
+{
+ JS::RootedObject obj(cx, JS_NewPlainObject(cx));
+ if (!obj)
+ return nullptr;
+ JS_DefineProperty(cx, obj, "x", 42, 0);
+ return obj;
+}
+
+template <>
+JSFunction* CreateGCThing(JSContext* cx)
+{
+ /*
+ * We don't actually use the function as a function, so here we cheat and
+ * cast a JSObject.
+ */
+ return static_cast<JSFunction*>(CreateGCThing<JSObject>(cx));
+}
BEGIN_TEST(testGCHeapPostBarriers)
{
#ifdef JS_GC_ZEAL
AutoLeaveZeal nozeal(cx);
#endif /* JS_GC_ZEAL */
/* Sanity check - objects start in the nursery and then become tenured. */
JS_GC(cx);
- JS::RootedObject obj(cx, NurseryObject());
+ JS::RootedObject obj(cx, CreateGCThing<JSObject>(cx));
CHECK(js::gc::IsInsideNursery(obj.get()));
JS_GC(cx);
CHECK(!js::gc::IsInsideNursery(obj.get()));
JS::RootedObject tenuredObject(cx, obj);
/* Currently JSObject and JSFunction objects are nursery allocated. */
- CHECK(TestHeapPostBarriers(NurseryObject()));
- CHECK(TestHeapPostBarriers(NurseryFunction()));
+ CHECK(TestHeapPostBarriersForType<JSObject>());
+ CHECK(TestHeapPostBarriersForType<JSFunction>());
return true;
}
MOZ_NEVER_INLINE bool
Passthrough(bool value)
{
/* Work around a Win64 optimization bug in VS2010. (Bug 1033146) */
return value;
}
+bool
+CanAccessObject(JSObject* obj)
+{
+ JS::RootedObject rootedObj(cx, obj);
+ JS::RootedValue value(cx);
+ CHECK(JS_GetProperty(cx, rootedObj, "x", &value));
+ CHECK(value.isInt32());
+ CHECK(value.toInt32() == 42);
+ return true;
+}
+
template <typename T>
bool
-TestHeapPostBarriers(T initialObj)
+TestHeapPostBarriersForType()
+{
+ CHECK((TestHeapPostBarriersForWrapper<T, JS::Heap<T*>>()));
+ CHECK((TestHeapPostBarriersForWrapper<T, js::GCPtr<T*>>()));
+ CHECK((TestHeapPostBarriersForWrapper<T, js::HeapPtr<T*>>()));
+ return true;
+}
+
+template <typename T, typename W>
+bool
+TestHeapPostBarriersForWrapper()
{
+ CHECK((TestHeapPostBarrierUpdate<T, W>()));
+ CHECK((TestHeapPostBarrierInitFailure<T, W>()));
+ return true;
+}
+
+template <typename T, typename W>
+bool
+TestHeapPostBarrierUpdate()
+{
+ // Normal case - allocate a heap object, write a nursery pointer into it and
+ // check that it gets updated on minor GC.
+
+ T* initialObj = CreateGCThing<T>(cx);
CHECK(initialObj != nullptr);
CHECK(js::gc::IsInsideNursery(initialObj));
-
- /* Construct Heap<> wrapper. */
- auto heapDataStorage = mozilla::MakeUnique<char[]>(sizeof(JS::Heap<T>));
- auto* heapData = new (heapDataStorage.get()) JS::Heap<T>();
- CHECK(heapData);
- CHECK(Passthrough(*heapData == nullptr));
- *heapData = initialObj;
-
- /* Store the pointer as an integer so that the hazard analysis will miss it. */
uintptr_t initialObjAsInt = uintptr_t(initialObj);
- /* Perform minor GC and check heap wrapper is udated with new pointer. */
- cx->minorGC(JS::gcreason::API);
- CHECK(uintptr_t(heapData) != initialObjAsInt);
- CHECK(!js::gc::IsInsideNursery(*heapData));
+ W* ptr = nullptr;
+
+ {
+ auto heapPtr = cx->make_unique<W>();
+ CHECK(heapPtr);
- /* Check object is definitely still alive. */
- JS::Rooted<T> obj(cx, *heapData);
- JS::RootedValue value(cx);
- CHECK(JS_GetProperty(cx, obj, "x", &value));
- CHECK(value.isInt32());
- CHECK(value.toInt32() == 42);
+ W& wrapper = *heapPtr;
+ CHECK(Passthrough(wrapper.get() == nullptr));
+ wrapper = initialObj;
+ CHECK(Passthrough(wrapper == initialObj));
+
+ ptr = heapPtr.release();
+ }
+
+ cx->minorGC(JS::gcreason::API);
+
+ CHECK(uintptr_t(ptr->get()) != initialObjAsInt);
+ CHECK(!js::gc::IsInsideNursery(ptr->get()));
+ CHECK(CanAccessObject(ptr->get()));
return true;
}
-JSObject* NurseryObject()
+template <typename T, typename W>
+bool
+TestHeapPostBarrierInitFailure()
{
- JS::RootedObject obj(cx, JS_NewPlainObject(cx));
- if (!obj)
- return nullptr;
- JS_DefineProperty(cx, obj, "x", 42, 0);
- return obj;
-}
+ // Failure case - allocate a heap object, write a nursery pointer into it
+ // and fail to complete initialization.
+
+ T* initialObj = CreateGCThing<T>(cx);
+ CHECK(initialObj != nullptr);
+ CHECK(js::gc::IsInsideNursery(initialObj));
-JSFunction* NurseryFunction()
-{
- /*
- * We don't actually use the function as a function, so here we cheat and
- * cast a JSObject.
- */
- return static_cast<JSFunction*>(NurseryObject());
+ {
+ auto heapPtr = cx->make_unique<W>();
+ CHECK(heapPtr);
+
+ W& wrapper = *heapPtr;
+ CHECK(Passthrough(wrapper.get() == nullptr));
+ wrapper = initialObj;
+ CHECK(Passthrough(wrapper == initialObj));
+ }
+
+ cx->minorGC(JS::gcreason::API);
+
+ return true;
}
END_TEST(testGCHeapPostBarriers)
--- a/js/src/jsutil.h
+++ b/js/src/jsutil.h
@@ -323,16 +323,17 @@ PodSet(T* aDst, T aSrc, size_t aNElem)
*/
#define JS_FRESH_NURSERY_PATTERN 0x2F
#define JS_SWEPT_NURSERY_PATTERN 0x2B
#define JS_ALLOCATED_NURSERY_PATTERN 0x2D
#define JS_FRESH_TENURED_PATTERN 0x4F
#define JS_MOVED_TENURED_PATTERN 0x49
#define JS_SWEPT_TENURED_PATTERN 0x4B
#define JS_ALLOCATED_TENURED_PATTERN 0x4D
+#define JS_FREED_HEAP_PTR_PATTERN 0x6B
/*
* Ensure JS_SWEPT_CODE_PATTERN is a byte pattern that will crash immediately
* when executed, so either an undefined instruction or an instruction that's
* illegal in user mode.
*/
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_NONE)
# define JS_SWEPT_CODE_PATTERN 0xED // IN instruction, crashes in user mode.
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -27,17 +27,17 @@ WeakMapBase::WeakMapBase(JSObject* memOf
zone(zone),
marked(false)
{
MOZ_ASSERT_IF(memberOf, memberOf->compartment()->zone() == zone);
}
WeakMapBase::~WeakMapBase()
{
- MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
+ MOZ_ASSERT(CurrentThreadIsGCSweeping());
}
void
WeakMapBase::unmarkZone(JS::Zone* zone)
{
for (WeakMapBase* m : zone->gcWeakMapList)
m->marked = false;
}
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -407,9 +407,17 @@ class ObjectWeakMap
#ifdef JSGC_HASH_TABLE_CHECKS
void checkAfterMovingGC();
#endif
};
} /* namespace js */
+namespace JS {
+
+template <>
+struct DeletePolicy<js::ObjectValueMap> : public js::GCManagedDeletePolicy<js::ObjectValueMap>
+{};
+
+} /* namespace JS */
+
#endif /* jsweakmap_h */
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -642,17 +642,16 @@ Debugger::Debugger(JSContext* cx, Native
traceLoggerLastDrainedSize(0),
traceLoggerLastDrainedIteration(0),
#endif
traceLoggerScriptedCallsLastDrainedSize(0),
traceLoggerScriptedCallsLastDrainedIteration(0)
{
assertSameCompartment(cx, dbg);
- cx->runtime()->debuggerList.insertBack(this);
JS_INIT_CLIST(&breakpoints);
JS_INIT_CLIST(&onNewGlobalObjectWatchersLink);
}
Debugger::~Debugger()
{
MOZ_ASSERT_IF(debuggees.initialized(), debuggees.empty());
allocationsLog.clear();
@@ -665,29 +664,33 @@ Debugger::~Debugger()
* background finalized.
*/
JS_REMOVE_LINK(&onNewGlobalObjectWatchersLink);
}
bool
Debugger::init(JSContext* cx)
{
- bool ok = debuggees.init() &&
- debuggeeZones.init() &&
- frames.init() &&
- scripts.init() &&
- sources.init() &&
- objects.init() &&
- observedGCs.init() &&
- environments.init() &&
- wasmInstanceScripts.init() &&
- wasmInstanceSources.init();
- if (!ok)
+ if (!debuggees.init() ||
+ !debuggeeZones.init() ||
+ !frames.init() ||
+ !scripts.init() ||
+ !sources.init() ||
+ !objects.init() ||
+ !observedGCs.init() ||
+ !environments.init() ||
+ !wasmInstanceScripts.init() ||
+ !wasmInstanceSources.init())
+ {
ReportOutOfMemory(cx);
- return ok;
+ return false;
+ }
+
+ cx->runtime()->debuggerList.insertBack(this);
+ return true;
}
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGSCRIPT_OWNER));
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGSOURCE_OWNER));
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGOBJECT_OWNER));
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(DebuggerEnvironment::OWNER_SLOT));
/* static */ Debugger*
@@ -3683,17 +3686,17 @@ Debugger::construct(JSContext* cx, unsig
return false;
for (unsigned slot = JSSLOT_DEBUG_PROTO_START; slot < JSSLOT_DEBUG_PROTO_STOP; slot++)
obj->setReservedSlot(slot, proto->getReservedSlot(slot));
obj->setReservedSlot(JSSLOT_DEBUG_MEMORY_INSTANCE, NullValue());
Debugger* debugger;
{
/* Construct the underlying C++ object. */
- AutoInitGCManagedObject<Debugger> dbg(cx->make_unique<Debugger>(cx, obj.get()));
+ auto dbg = cx->make_unique<Debugger>(cx, obj.get());
if (!dbg || !dbg->init(cx))
return false;
debugger = dbg.release();
obj->setPrivate(debugger); // owns the released pointer
}
/* Add the initial debuggees, if any. */
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -1532,10 +1532,17 @@ Debugger::onLogAllocationSite(JSContext*
RootedObject hobj(cx, obj);
return Debugger::slowPathOnLogAllocationSite(cx, hobj, frame, when, *dbgs);
}
MOZ_MUST_USE bool ReportObjectRequired(JSContext* cx);
} /* namespace js */
+namespace JS {
+
+template <>
+struct DeletePolicy<js::Debugger> : public js::GCManagedDeletePolicy<js::Debugger>
+{};
+
+} /* namespace JS */
#endif /* vm_Debugger_h */
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -196,19 +196,16 @@ JSRuntime::JSRuntime(JSRuntime* parentRu
NaNValue(DoubleNaNValue()),
negativeInfinityValue(DoubleValue(NegativeInfinity<double>())),
positiveInfinityValue(DoubleValue(PositiveInfinity<double>())),
emptyString(nullptr),
spsProfiler(thisFromCtor()),
profilingScripts(false),
suppressProfilerSampling(false),
hadOutOfMemory(false),
-#ifdef DEBUG
- handlingInitFailure(false),
-#endif
#if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
runningOOMTest(false),
#endif
allowRelazificationForTesting(false),
defaultFreeOp_(thisFromCtor()),
debuggerMutations(0),
securityCallbacks(&NullSecurityCallbacks),
DOMcallbacks(nullptr),
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -936,21 +936,16 @@ struct JSRuntime : public JS::shadow::Ru
}
void enableProfilerSampling() {
suppressProfilerSampling = false;
}
/* Had an out-of-memory error which did not populate an exception. */
bool hadOutOfMemory;
-#ifdef DEBUG
- /* We are currently deleting an object due to an initialization failure. */
- bool handlingInitFailure;
-#endif
-
#if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
/* We are currently running a simulated OOM test. */
bool runningOOMTest;
#endif
/*
* Allow relazifying functions in compartments that are active. This is
* only used by the relazifyFunctions() testing function.
@@ -1715,83 +1710,51 @@ class MOZ_RAII AutoEnterIonCompilation
pt->ionCompilingSafeForMinorGC = false;
#endif
}
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
- * AutoInitGCManagedObject is a wrapper for use when initializing a object whose
- * lifetime is managed by the GC. It ensures that the object is destroyed if
- * initialization fails but also allows us to assert the invariant that such
- * objects are only destroyed in this way or by the GC.
- *
- * It has a limited interface but is a drop-in replacement for UniquePtr<T> is
- * this situation. For example:
+ * Provides a delete policy that can be used for objects which have their
+ * lifetime managed by the GC and can only safely be destroyed while the nursery
+ * is empty.
*
- * AutoInitGCManagedObject<MyClass> ptr(cx->make_unique<MyClass>());
- * if (!ptr) {
- * ReportOutOfMemory(cx);
- * return nullptr;
- * }
- *
- * if (!ptr->init(cx))
- * return nullptr; // Object destroyed here if init() failed.
- *
- * object->setPrivate(ptr.release());
- * // Initialization successful, ptr is now owned through another object.
+ * This is necessary when initializing such an object may fail after the initial
+ * allocation. The partially-initialized object must be destroyed, but it may
+ * not be safe to do so at the current time. This policy puts the object on a
+ * queue to be destroyed at a safe time.
*/
template <typename T>
-class MOZ_STACK_CLASS AutoInitGCManagedObject
+struct GCManagedDeletePolicy
{
- typedef UniquePtr<T> UniquePtrT;
-
- UniquePtrT ptr_;
-
- public:
- explicit AutoInitGCManagedObject(UniquePtrT&& ptr)
- : ptr_(mozilla::Move(ptr))
- {}
-
- ~AutoInitGCManagedObject() {
-#ifdef DEBUG
- if (ptr_) {
- JSRuntime* rt = TlsPerThreadData.get()->runtimeFromMainThread();
- MOZ_ASSERT(!rt->handlingInitFailure);
- rt->handlingInitFailure = true;
- ptr_.reset(nullptr);
- rt->handlingInitFailure = false;
+ void operator()(const T* ptr) {
+ if (ptr) {
+ JSRuntime* rt = TlsPerThreadData.get()->runtimeIfOnOwnerThread();
+ if (rt)
+ rt->gc.callAfterMinorGC(deletePtr, const_cast<T*>(ptr));
+ else
+ js_delete(const_cast<T*>(ptr));
}
-#endif
}
- T& operator*() const {
- return *get();
- }
-
- T* operator->() const {
- return get();
- }
-
- explicit operator bool() const {
- return get() != nullptr;
+ private:
+ static void deletePtr(void* data) {
+ js_delete(reinterpret_cast<T*>(data));
}
-
- T* get() const {
- return ptr_.get();
- }
-
- T* release() {
- return ptr_.release();
- }
-
- AutoInitGCManagedObject(const AutoInitGCManagedObject<T>& other) = delete;
- AutoInitGCManagedObject& operator=(const AutoInitGCManagedObject<T>& other) = delete;
};
} /* namespace js */
+namespace JS {
+
+template <typename T>
+struct DeletePolicy<js::GCPtr<T>> : public js::GCManagedDeletePolicy<js::GCPtr<T>>
+{};
+
+} /* namespace JS */
+
#ifdef _MSC_VER
#pragma warning(pop)
#endif
#endif /* vm_Runtime_h */
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -2588,17 +2588,17 @@ CanUseDebugScopeMaps(JSContext* cx)
DebugScopes*
DebugScopes::ensureCompartmentData(JSContext* cx)
{
JSCompartment* c = cx->compartment();
if (c->debugScopes)
return c->debugScopes;
- AutoInitGCManagedObject<DebugScopes> debugScopes(cx->make_unique<DebugScopes>(cx));
+ auto debugScopes = cx->make_unique<DebugScopes>(cx);
if (!debugScopes || !debugScopes->init()) {
ReportOutOfMemory(cx);
return nullptr;
}
c->debugScopes = debugScopes.release();
return c->debugScopes;
}
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -1606,9 +1606,23 @@ bool CheckEvalDeclarationConflicts(JSCon
void DumpStaticScopeChain(JSScript* script);
void DumpStaticScopeChain(JSObject* staticScope);
bool
AnalyzeEntrainedVariables(JSContext* cx, HandleScript script);
#endif
} // namespace js
+namespace JS {
+
+template <>
+struct DeletePolicy<js::DebugScopeObject>
+{
+ explicit DeletePolicy(JSRuntime* rt) : rt_(rt) {}
+ void operator()(const js::DebugScopeObject* ptr);
+
+ private:
+ JSRuntime* rt_;
+};
+
+} // namespace JS
+
#endif /* vm_ScopeObject_h */
--- a/js/src/vm/TaggedProto.h
+++ b/js/src/vm/TaggedProto.h
@@ -67,16 +67,21 @@ struct InternalBarrierMethods<TaggedProt
static bool isMarkableTaggedPointer(TaggedProto proto) {
return proto.isObject();
}
static bool isMarkable(TaggedProto proto) {
return proto.isObject();
}
+
+ static bool isInsideNursery(TaggedProto proto) {
+ return proto.isObject() &&
+ gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(proto.toObject()));
+ }
};
template<class Outer>
class TaggedProtoOperations
{
const TaggedProto& value() const {
return static_cast<const Outer*>(this)->get();
}
--- a/js/src/vm/TypeInference-inl.h
+++ b/js/src/vm/TypeInference-inl.h
@@ -271,17 +271,21 @@ TypeIdString(jsid id)
* intermediate types (i.e. JITs) can use this to ensure that intermediate
* information is not collected and does not change.
*
* Ensures that GC cannot occur. Does additional sanity checking that inference
* is not reentrant and that recompilations occur properly.
*/
struct AutoEnterAnalysis
{
- /* Prevent GC activity in the middle of analysis. */
+ // For use when initializing an UnboxedLayout. The UniquePtr's destructor
+ // must run when GC is not suppressed.
+ UniquePtr<UnboxedLayout> unboxedLayoutToCleanUp;
+
+ // Prevent GC activity in the middle of analysis.
gc::AutoSuppressGC suppressGC;
// Allow clearing inference info on OOM during incremental sweeping.
AutoClearTypeInferenceStateOnOOM oom;
// Pending recompilations to perform before execution of JIT code can resume.
RecompileInfoVector pendingRecompiles;
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -3508,17 +3508,17 @@ PreliminaryObjectArrayWithTemplate::mayb
if (obj->inDictionaryMode() || !OnlyHasDataProperties(obj->lastProperty()))
return;
if (CommonPrefix(obj->lastProperty(), shape()) != shape())
return;
}
}
- TryConvertToUnboxedLayout(cx, shape(), group, preliminaryObjects);
+ TryConvertToUnboxedLayout(cx, enter, shape(), group, preliminaryObjects);
if (group->maybeUnboxedLayout())
return;
if (shape()) {
// We weren't able to use an unboxed layout, but since the preliminary
// objects still reflect the template object's properties, and all
// objects in the future will be created with those properties, the
// properties can be marked as definite for objects in the group.
@@ -3789,17 +3789,17 @@ TypeNewScript::maybeAnalyze(JSContext* c
if (!initializerList) {
ReportOutOfMemory(cx);
return false;
}
PodCopy(initializerList, initializerVector.begin(), initializerVector.length());
}
// Try to use an unboxed representation for the group.
- if (!TryConvertToUnboxedLayout(cx, templateObject()->lastProperty(), group, preliminaryObjects))
+ if (!TryConvertToUnboxedLayout(cx, enter, templateObject()->lastProperty(), group, preliminaryObjects))
return false;
js_delete(preliminaryObjects);
preliminaryObjects = nullptr;
if (group->maybeUnboxedLayout()) {
// An unboxed layout was constructed for the group, and this has already
// been hooked into it.
--- a/js/src/vm/UnboxedObject.cpp
+++ b/js/src/vm/UnboxedObject.cpp
@@ -1915,17 +1915,17 @@ UnboxedPlainObject::fillAfterConvert(Exc
{
initExpando();
memset(data(), 0, layout().size());
for (size_t i = 0; i < layout().properties().length(); i++)
JS_ALWAYS_TRUE(setValue(cx, layout().properties()[i], NextValue(values, valueCursor)));
}
bool
-js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
+js::TryConvertToUnboxedLayout(ExclusiveContext* cx, AutoEnterAnalysis& enter, Shape* templateShape,
ObjectGroup* group, PreliminaryObjectArray* objects)
{
bool isArray = !templateShape;
// Unboxed arrays are nightly only for now. The getenv() call will be
// removed when they are on by default. See bug 1153266.
if (isArray) {
#ifdef NIGHTLY_BUILD
@@ -2010,17 +2010,19 @@ js::TryConvertToUnboxedLayout(ExclusiveC
layoutSize = ComputePlainObjectLayout(cx, templateShape, properties);
// The entire object must be allocatable inline.
if (UnboxedPlainObject::offsetOfData() + layoutSize > JSObject::MAX_BYTE_SIZE)
return true;
}
- AutoInitGCManagedObject<UnboxedLayout> layout(group->zone()->make_unique<UnboxedLayout>());
+ UniquePtr<UnboxedLayout>& layout = enter.unboxedLayoutToCleanUp;
+ MOZ_ASSERT(!layout);
+ layout = group->zone()->make_unique<UnboxedLayout>();
if (!layout)
return false;
if (isArray) {
layout->initArray(elementType);
} else {
if (!layout->initProperties(properties, layoutSize))
return false;
@@ -2084,17 +2086,16 @@ js::TryConvertToUnboxedLayout(ExclusiveC
if (isArray)
obj->as<UnboxedArrayObject>().fillAfterConvert(cx, values, &valueCursor);
else
obj->as<UnboxedPlainObject>().fillAfterConvert(cx, values, &valueCursor);
}
MOZ_ASSERT(valueCursor == values.length());
- layout.release();
return true;
}
DefineBoxedOrUnboxedFunctor6(SetOrExtendBoxedOrUnboxedDenseElements,
ExclusiveContext*, JSObject*, uint32_t, const Value*, uint32_t,
ShouldUpdateTypes);
DenseElementResult
--- a/js/src/vm/UnboxedObject.h
+++ b/js/src/vm/UnboxedObject.h
@@ -5,16 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef vm_UnboxedObject_h
#define vm_UnboxedObject_h
#include "jsgc.h"
#include "jsobj.h"
+#include "vm/Runtime.h"
#include "vm/TypeInference.h"
namespace js {
// Memory required for an unboxed value of a given type. Returns zero for types
// which can't be used for unboxed objects.
static inline size_t
UnboxedTypeSize(JSValueType type)
@@ -315,17 +316,17 @@ class UnboxedPlainObject : public JSObje
return offsetof(UnboxedPlainObject, data_[0]);
}
};
// Try to construct an UnboxedLayout for each of the preliminary objects,
// provided they all match the template shape. If successful, converts the
// preliminary objects and their group to the new unboxed representation.
bool
-TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
+TryConvertToUnboxedLayout(ExclusiveContext* cx, AutoEnterAnalysis& enter, Shape* templateShape,
ObjectGroup* group, PreliminaryObjectArray* objects);
inline gc::AllocKind
UnboxedLayout::getAllocKind() const
{
MOZ_ASSERT(size());
return gc::GetGCObjectKindForBytes(UnboxedPlainObject::offsetOfData() + size());
}
@@ -514,9 +515,17 @@ class UnboxedArrayObject : public JSObje
MOZ_ASSERT(index <= (CapacityMask >> CapacityShift));
capacityIndexAndInitializedLength_ =
(index << CapacityShift) | initializedLength();
}
};
} // namespace js
+namespace JS {
+
+template <>
+struct DeletePolicy<js::UnboxedLayout> : public js::GCManagedDeletePolicy<js::UnboxedLayout>
+{};
+
+} /* namespace JS */
+
#endif /* vm_UnboxedObject_h */