Begin to separate out runtime fields that should be per-thread
so as to make things compatible with Parallel JS (aka, Rivertrail).
Currently I am focusing on the GC infrastructure, but there are
other fields that should eventually be refactored in a similar
fashion.
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -671,54 +671,62 @@ JS_IsBuiltinFunctionConstructor(JSFuncti
* that should happen only once across all runtimes.
*/
static JSBool js_NewRuntimeWasCalled = JS_FALSE;
/*
* Thread Local Storage slot for storing the runtime for a thread.
*/
namespace JS {
-mozilla::ThreadLocal<JSRuntime *> TlsRuntime;
+mozilla::ThreadLocal<PerThreadData *> TlsPerThreadData;
#ifdef DEBUG
JS_FRIEND_API(void)
EnterAssertNoGCScope()
{
- ++TlsRuntime.get()->gcAssertNoGCDepth;
+ ++TlsPerThreadData.get()->gcAssertNoGCDepth;
}
JS_FRIEND_API(void)
LeaveAssertNoGCScope()
{
- --TlsRuntime.get()->gcAssertNoGCDepth;
- JS_ASSERT(TlsRuntime.get()->gcAssertNoGCDepth >= 0);
+ --TlsPerThreadData.get()->gcAssertNoGCDepth;
+ JS_ASSERT(TlsPerThreadData.get()->gcAssertNoGCDepth >= 0);
}
JS_FRIEND_API(bool)
InNoGCScope()
{
- return TlsRuntime.get()->gcAssertNoGCDepth > 0;
+ return TlsPerThreadData.get()->gcAssertNoGCDepth > 0;
}
JS_FRIEND_API(bool)
NeedRelaxedRootChecks()
{
- return TlsRuntime.get()->gcRelaxRootChecks;
+ return TlsPerThreadData.get()->gcRelaxRootChecks;
}
#else
JS_FRIEND_API(void) EnterAssertNoGCScope() {}
JS_FRIEND_API(void) LeaveAssertNoGCScope() {}
JS_FRIEND_API(bool) InNoGCScope() { return false; }
JS_FRIEND_API(bool) NeedRelaxedRootChecks() { return false; }
#endif
} /* namespace JS */
static const JSSecurityCallbacks NullSecurityCallbacks = { };
+JS::PerThreadData::PerThreadData(JSRuntime *runtime)
+ : runtime(runtime)
+#ifdef DEBUG
+ , gcRelaxRootChecks(false)
+ , gcAssertNoGCDepth(0)
+#endif
+{}
+
JSRuntime::JSRuntime(JSUseHelperThreads useHelperThreads)
: atomsCompartment(NULL),
#ifdef JS_THREADSAFE
ownerThread_(NULL),
#endif
tempLifoAlloc(TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
freeLifoAlloc(TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
execAlloc_(NULL),
@@ -782,20 +790,16 @@ JSRuntime::JSRuntime(JSUseHelperThreads
gcSweepPhase(0),
gcSweepCompartmentIndex(0),
gcSweepKindIndex(0),
gcArenasAllocatedDuringSweep(NULL),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
gcExactScanningEnabled(true),
-#ifdef DEBUG
- gcRelaxRootChecks(false),
- gcAssertNoGCDepth(0),
-#endif
gcPoke(false),
heapState(Idle),
#ifdef JS_GC_ZEAL
gcZeal_(0),
gcZealFrequency(0),
gcNextScheduled(0),
gcDeterministicOnly(false),
gcIncrementalLimit(0),
@@ -856,16 +860,17 @@ JSRuntime::JSRuntime(JSUseHelperThreads
jitHardening(false),
ionTop(NULL),
ionJSContext(NULL),
ionStackLimit(0),
ionActivation(NULL),
ionPcScriptCache(NULL),
threadPool(this),
omtc(),
+ mainThread(this),
ionReturnOverride_(MagicValue(JS_ARG_POISON)),
useHelperThreads_(useHelperThreads)
{
/* Initialize infallibly first, so we can goto bad and JS_DestroyRuntime. */
JS_INIT_CLIST(&contextList);
JS_INIT_CLIST(&debuggerList);
JS_INIT_CLIST(&onNewGlobalObjectWatchers);
@@ -879,17 +884,17 @@ JSRuntime::JSRuntime(JSUseHelperThreads
bool
JSRuntime::init(uint32_t maxbytes)
{
#ifdef JS_THREADSAFE
ownerThread_ = PR_GetCurrentThread();
#endif
- JS::TlsRuntime.set(this);
+ JS::TlsPerThreadData.set(&mainThread);
#ifdef JS_METHODJIT_SPEW
JMCheckLogging();
#endif
#if defined(JSGC_ROOT_ANALYSIS) || defined(JSGC_USE_EXACT_ROOTING)
PodArrayZero(thingGCRooters);
#endif
@@ -1014,54 +1019,54 @@ JSRuntime::~JSRuntime()
#ifdef JS_THREADSAFE
void
JSRuntime::setOwnerThread()
{
JS_ASSERT(ownerThread_ == (void *)0xc1ea12); /* "clear" */
JS_ASSERT(requestDepth == 0);
JS_ASSERT(js_NewRuntimeWasCalled);
- JS_ASSERT(JS::TlsRuntime.get() == NULL);
+ JS_ASSERT(JS::TlsPerThreadData.get() == NULL);
ownerThread_ = PR_GetCurrentThread();
- JS::TlsRuntime.set(this);
+ JS::TlsPerThreadData.set(&mainThread);
nativeStackBase = GetNativeStackBase();
if (nativeStackQuota)
JS_SetNativeStackQuota(this, nativeStackQuota);
}
void
JSRuntime::clearOwnerThread()
{
assertValidThread();
JS_ASSERT(requestDepth == 0);
JS_ASSERT(js_NewRuntimeWasCalled);
ownerThread_ = (void *)0xc1ea12; /* "clear" */
- JS::TlsRuntime.set(NULL);
+ JS::TlsPerThreadData.set(NULL);
nativeStackBase = 0;
#if JS_STACK_GROWTH_DIRECTION > 0
nativeStackLimit = UINTPTR_MAX;
#else
nativeStackLimit = 0;
#endif
}
JS_FRIEND_API(void)
JSRuntime::abortIfWrongThread() const
{
if (ownerThread_ != PR_GetCurrentThread())
MOZ_CRASH();
- if (this != JS::TlsRuntime.get())
+ if (this != JS::TlsPerThreadData.get()->runtime)
MOZ_CRASH();
}
JS_FRIEND_API(void)
JSRuntime::assertValidThread() const
{
JS_ASSERT(ownerThread_ == PR_GetCurrentThread());
- JS_ASSERT(this == JS::TlsRuntime.get());
+ JS_ASSERT(this == JS::TlsPerThreadData.get()->runtime);
}
#endif /* JS_THREADSAFE */
JS_PUBLIC_API(JSRuntime *)
JS_NewRuntime(uint32_t maxbytes, JSUseHelperThreads useHelperThreads)
{
if (!js_NewRuntimeWasCalled) {
#ifdef DEBUG
@@ -1088,17 +1093,17 @@ JS_NewRuntime(uint32_t maxbytes, JSUseHe
JS_ASSERT(count == numfmtspecs); \
JS_END_MACRO;
#include "js.msg"
#undef MSG_DEF
#endif /* DEBUG */
InitMemorySubsystem();
- if (!JS::TlsRuntime.init())
+ if (!JS::TlsPerThreadData.init())
return NULL;
js_NewRuntimeWasCalled = JS_TRUE;
}
JSRuntime *rt = js_new<JSRuntime>(useHelperThreads);
if (!rt)
return NULL;
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -2965,17 +2965,18 @@ JS_EndRequest(JSContext *cx);
extern JS_PUBLIC_API(JSBool)
JS_IsInRequest(JSRuntime *rt);
#ifdef __cplusplus
JS_END_EXTERN_C
namespace JS {
-extern mozilla::ThreadLocal<JSRuntime *> TlsRuntime;
+struct PerThreadData;
+extern mozilla::ThreadLocal<PerThreadData *> TlsPerThreadData;
inline bool
IsPoisonedId(jsid iden)
{
if (JSID_IS_STRING(iden))
return JS::IsPoisonedPtr(JSID_TO_STRING(iden));
if (JSID_IS_OBJECT(iden))
return JS::IsPoisonedPtr(JSID_TO_OBJECT(iden));
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -386,16 +386,44 @@ struct JSAtomState
#define PROPERTYNAME_FIELD(name, code, init) js::FixedHeapPtr<js::PropertyName> name;
JS_FOR_EACH_PROTOTYPE(PROPERTYNAME_FIELD)
#undef PROPERTYNAME_FIELD
};
#define NAME_OFFSET(name) offsetof(JSAtomState, name)
#define OFFSET_TO_NAME(rt,off) (*(js::FixedHeapPtr<js::PropertyName>*)((char*)&(rt)->atomState + (off)))
+namespace JS {
+struct PerThreadData
+{
+ JSRuntime *runtime; // backpointer to the full shraed runtime
+
+ /*
+ * We save all conservative scanned roots in this vector so that
+ * conservative scanning can be "replayed" deterministically. In DEBUG mode,
+ * this allows us to run a non-incremental GC after every incremental GC to
+ * ensure that no objects were missed.
+ */
+#ifdef DEBUG
+ struct SavedGCRoot {
+ void *thing;
+ JSGCTraceKind kind;
+
+ SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
+ };
+ js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
+
+ bool gcRelaxRootChecks;
+ int gcAssertNoGCDepth;
+#endif
+
+ PerThreadData(JSRuntime *runtime);
+};
+}
+
struct JSRuntime : js::RuntimeFriendFields
{
/* Default compartment. */
JSCompartment *atomsCompartment;
/* List of compartments (protected by the GC lock). */
js::CompartmentVector compartments;
@@ -651,35 +679,16 @@ struct JSRuntime : js::RuntimeFriendFiel
/*
* Whether exact stack scanning is enabled for this runtime. This is
* currently only used for dynamic root analysis. Exact scanning starts out
* enabled, and is disabled if e4x has been used.
*/
bool gcExactScanningEnabled;
- /*
- * We save all conservative scanned roots in this vector so that
- * conservative scanning can be "replayed" deterministically. In DEBUG mode,
- * this allows us to run a non-incremental GC after every incremental GC to
- * ensure that no objects were missed.
- */
-#ifdef DEBUG
- struct SavedGCRoot {
- void *thing;
- JSGCTraceKind kind;
-
- SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
- };
- js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
-
- bool gcRelaxRootChecks;
- int gcAssertNoGCDepth;
-#endif
-
bool gcPoke;
enum HeapState {
Idle, // doing nothing with the GC heap
Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments()
Collecting // doing a GC of the heap
};
@@ -947,31 +956,32 @@ struct JSRuntime : js::RuntimeFriendFiel
// This points to the most recent Ion activation running on the thread.
js::ion::IonActivation *ionActivation;
// Cache for ion::GetPcScript().
js::ion::PcScriptCache *ionPcScriptCache;
js::ThreadPool threadPool;
js::OffMainThreadCompiler omtc;
+ JS::PerThreadData mainThread;
private:
// In certain cases, we want to optimize certain opcodes to typed instructions,
// to avoid carrying an extra register to feed into an unbox. Unfortunately,
// that's not always possible. For example, a GetPropertyCacheT could return a
// typed double, but if it takes its out-of-line path, it could return an
// object, and trigger invalidation. The invalidation bailout will consider the
// return value to be a double, and create a garbage Value.
//
// To allow the GetPropertyCacheT optimization, we allow the ability for
// GetPropertyCache to override the return value at the top of the stack - the
// value that will be temporarily corrupt. This special override value is set
// only in callVM() targets that are about to return *and* have invalidated
// their callee.
- js::Value ionReturnOverride_;
+ js::Value ionReturnOverride_;
public:
bool hasIonReturnOverride() const {
return !ionReturnOverride_.isMagic();
}
js::Value takeIonReturnOverride() {
js::Value v = ionReturnOverride_;
ionReturnOverride_ = js::MagicValue(JS_ARG_POISON);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1129,17 +1129,18 @@ MarkIfGCThingWord(JSTracer *trc, uintptr
#endif
JS_SET_TRACING_LOCATION(trc, (void *)w);
void *tmp = thing;
MarkKind(trc, &tmp, traceKind);
JS_ASSERT(tmp == thing);
#ifdef DEBUG
if (trc->runtime->gcIncrementalState == MARK_ROOTS)
- trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind));
+ trc->runtime->mainThread.gcSavedRoots.append(
+ JS::PerThreadData::SavedGCRoot(thing, traceKind));
#endif
return CGCT_VALID;
}
static void
MarkWordConservatively(JSTracer *trc, uintptr_t w)
{
@@ -1190,28 +1191,28 @@ MarkRangeConservativelyAndSkipIon(JSTrac
static JS_NEVER_INLINE void
MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
{
JSRuntime *rt = trc->runtime;
#ifdef DEBUG
if (useSavedRoots) {
- for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin();
- root != rt->gcSavedRoots.end();
+ for (JS::PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin();
+ root != rt->mainThread.gcSavedRoots.end();
root++)
{
JS_SET_TRACING_NAME(trc, "cstack");
MarkKind(trc, &root->thing, root->kind);
}
return;
}
if (rt->gcIncrementalState == MARK_ROOTS)
- rt->gcSavedRoots.clearAndFree();
+ rt->mainThread.gcSavedRoots.clearAndFree();
#endif
ConservativeGCData *cgcd = &rt->conservativeGC;
if (!cgcd->hasStackToScan()) {
#ifdef JS_THREADSAFE
JS_ASSERT(!rt->requestDepth);
#endif
return;
--- a/js/src/jstaskset.cpp
+++ b/js/src/jstaskset.cpp
@@ -283,19 +283,20 @@ TaskSetSharedContext::transferArenasToCo
}
}
void
TaskSetSharedContext::executeFromWorker(size_t workerId, uintptr_t stackLimit)
{
JS_ASSERT(workerId < numThreads_ - 1);
- JS::TlsRuntime.set(cx_->runtime);
+ JS::PerThreadData thisThread(cx_->runtime);
+ JS::TlsPerThreadData.set(&thisThread);
executePortion(workerId, stackLimit);
- JS::TlsRuntime.set(NULL);
+ JS::TlsPerThreadData.set(NULL);
AutoLockMonitor lock(*this);
uncompleted_ -= 1;
if (blocked_ == uncompleted_) {
// Signal the main thread that we have terminated. It will be
// either working, arranging a rendezvous, or waiting for
// workers to complete.
lock.notify();
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -3412,17 +3412,17 @@ RelaxRootChecks(JSContext *cx, unsigned
{
if (argc > 0) {
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_INVALID_ARGS,
"relaxRootChecks");
return false;
}
#ifdef DEBUG
- cx->runtime->gcRelaxRootChecks = true;
+ cx->runtime->mainThread.gcRelaxRootChecks = true;
#endif
return true;
}
static JSBool
GetMaxArgs(JSContext *cx, unsigned arg, jsval *vp)
{
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -13,16 +13,17 @@
#include "ion/Bailouts.h"
#include "Stack.h"
#include "jsgcinlines.h"
#include "jsobjinlines.h"
#include "jsinterpinlines.h"
#include "jsopcode.h"
+#include "jstaskset.h"
#include "Stack-inl.h"
/* Includes to get to low-level memory-mapping functionality. */
#ifdef XP_WIN
# include "jswin.h"
#elif defined(XP_OS2)
# define INCL_DOSMEMMGR
@@ -1436,25 +1437,37 @@ StackIter::StackIter(JSRuntime *rt, Stac
CompartmentVector &v = rt->compartments;
for (size_t i = 0; i < v.length(); i++)
mjit::ExpandInlineFrames(v[i]);
#endif
startOnSegment(&seg);
settleOnNewState();
}
+/*static*/ JSRuntime *
+StackIter::GetRuntime(const StackIter &other)
+{
+ // Note: this code is not safe to execute in parallel worker
+ // threads at the moment, I don't think.
+ JS_ASSERT(!InParallelSection());
+ if (other.maybecx_) {
+ return other.maybecx_->runtime;
+ }
+ return TlsPerThreadData.get()->runtime;
+}
+
StackIter::StackIter(const StackIter &other)
: maybecx_(other.maybecx_),
savedOption_(other.savedOption_),
state_(other.state_),
fp_(other.fp_),
calls_(other.calls_),
seg_(other.seg_),
pc_(other.pc_),
- script_(other.maybecx_ ? other.maybecx_->runtime : TlsRuntime.get(), other.script_),
+ script_(GetRuntime(other), other.script_),
args_(other.args_)
#ifdef JS_ION
, ionActivations_(other.ionActivations_),
ionFrames_(other.ionFrames_),
ionInlineFrames_(other.ionInlineFrames_)
#endif
{
}
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1730,16 +1730,17 @@ class StackIter
CallArgs args_;
#ifdef JS_ION
ion::IonActivationIterator ionActivations_;
ion::IonFrameIterator ionFrames_;
ion::InlineFrameIterator ionInlineFrames_;
#endif
+ static JSRuntime *GetRuntime(const StackIter &other);
void poisonRegs();
void popFrame();
void popCall();
#ifdef JS_ION
void popIonFrame();
#endif
void settleOnNewSegment();
void settleOnNewState();