author | Robert Sayre <sayrer@gmail.com> |
Wed, 29 Sep 2010 02:01:12 -0400 | |
changeset 54756 | d7e659b4f80c5ff0f7bf62ab4882759f8d2b8906 |
parent 54706 | a9d1ad0bc3860d77cfd95ee21f85a3fb96621967 (current diff) |
parent 54755 | eba54c4edd6fe8109a287cb8bb1e75f6c1d6e492 (diff) |
child 54757 | 6e9809698df7ee0089eaa43b343b171e1f35aba4 |
child 54768 | 20cc21ddb44fae2efa420ffce0f743c46d6ecca1 |
push id | 16011 |
push user | rsayre@mozilla.com |
push date | Wed, 29 Sep 2010 06:01:57 +0000 |
treeherder | mozilla-central@d7e659b4f80c [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | blockers |
milestone | 2.0b7pre |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
configure.in | file | annotate | diff | comparison | revisions |
--- a/configure.in +++ b/configure.in @@ -8067,16 +8067,21 @@ MOZ_ARG_ENABLE_BOOL(static, BUILD_STATIC_LIBS=1, BUILD_STATIC_LIBS=) MOZ_ARG_ENABLE_BOOL(libxul, [ --enable-libxul Enable building of libxul], MOZ_ENABLE_LIBXUL=1, MOZ_ENABLE_LIBXUL=) +# split JS out by default to avoid VS2005 PGO crash (bug 591836). +if test "$OS_ARCH" = "WINNT"; then + ENABLE_SHARED_JS=1 +fi + MOZ_ARG_ENABLE_BOOL(shared-js, [ --enable-shared-js Create a shared JavaScript library.], ENABLE_SHARED_JS=1, ENABLE_SHARED_JS=) if test -z "$MOZ_ENABLE_LIBXUL"; then dnl --disable-libxul implies shared js
--- a/dom/base/nsJSEnvironment.cpp +++ b/dom/base/nsJSEnvironment.cpp @@ -1481,18 +1481,18 @@ nsJSContext::EvaluateStringWithValue(con // SecurityManager said "ok", but don't compile if aVersion is unknown. // Since the caller is responsible for parsing the version strings, we just // check it isn't JSVERSION_UNKNOWN. if (ok && ((JSVersion)aVersion) != JSVERSION_UNKNOWN) { JSAutoRequest ar(mContext); - JSAutoCrossCompartmentCall accc; - if (!accc.enter(mContext, (JSObject *)aScopeObject)) { + JSAutoEnterCompartment ac; + if (!ac.enter(mContext, (JSObject *)aScopeObject)) { JSPRINCIPALS_DROP(mContext, jsprin); stack->Pop(nsnull); return NS_ERROR_FAILURE; } ++mExecuteDepth; ok = ::JS_EvaluateUCScriptForPrincipalsVersion(mContext, @@ -1681,18 +1681,18 @@ nsJSContext::EvaluateString(const nsAStr ++mExecuteDepth; // SecurityManager said "ok", but don't compile if aVersion is unknown. // Since the caller is responsible for parsing the version strings, we just // check it isn't JSVERSION_UNKNOWN. if (ok && ((JSVersion)aVersion) != JSVERSION_UNKNOWN) { JSAutoRequest ar(mContext); - JSAutoCrossCompartmentCall accc; - if (!accc.enter(mContext, (JSObject *)aScopeObject)) { + JSAutoEnterCompartment ac; + if (!ac.enter(mContext, (JSObject *)aScopeObject)) { stack->Pop(nsnull); JSPRINCIPALS_DROP(mContext, jsprin); return NS_ERROR_FAILURE; } ok = ::JS_EvaluateUCScriptForPrincipalsVersion(mContext, (JSObject *)aScopeObject, jsprin, @@ -2124,18 +2124,18 @@ nsJSContext::CallEventHandler(nsISupport rv = ConvertSupportsTojsvals(aargv, target, &argc, &argv, poolRelease, tvr); if (NS_FAILED(rv)) { stack->Pop(nsnull); return rv; } jsval funval = OBJECT_TO_JSVAL(static_cast<JSObject *>(aHandler)); - JSAutoCrossCompartmentCall accc; - if (!accc.enter(mContext, target)) { + JSAutoEnterCompartment ac; + if (!ac.enter(mContext, target)) { stack->Pop(nsnull); return NS_ERROR_FAILURE; } ++mExecuteDepth; PRBool ok = ::JS_CallFunctionValue(mContext, target, funval, argc, argv, &rval); --mExecuteDepth;
--- a/dom/src/threads/nsDOMThreadService.cpp +++ b/dom/src/threads/nsDOMThreadService.cpp @@ -392,33 +392,33 @@ public: // Go ahead and trigger the operation callback for this context before we // try to run any JS. That way we'll be sure to cancel or suspend as soon as // possible if the compilation takes too long. JS_TriggerOperationCallback(cx); PRBool killWorkerWhenDone; { nsLazyAutoRequest ar; - JSAutoCrossCompartmentCall axcc; + JSAutoEnterCompartment ac; // Tell the worker which context it will be using - if (mWorker->SetGlobalForContext(cx, &ar, &axcc)) { + if (mWorker->SetGlobalForContext(cx, &ar, &ac)) { NS_ASSERTION(ar.entered(), "SetGlobalForContext must enter request on success"); - NS_ASSERTION(axcc.entered(), "SetGlobalForContext must enter xcc on success"); + NS_ASSERTION(ac.entered(), "SetGlobalForContext must enter compartment on success"); RunQueue(cx, &killWorkerWhenDone); // Remove the global object from the context so that it might be garbage // collected. JS_SetGlobalObject(cx, NULL); JS_SetContextPrivate(cx, NULL); } else { NS_ASSERTION(!ar.entered(), "SetGlobalForContext must not enter request on failure"); - NS_ASSERTION(!axcc.entered(), "SetGlobalForContext must not enter xcc on failure"); + NS_ASSERTION(!ac.entered(), "SetGlobalForContext must not enter compartment on failure"); { // Code in XPConnect assumes that the context's global object won't be // replaced outside of a request. JSAutoRequest ar2(cx); // This is usually due to a parse error in the worker script... JS_SetGlobalObject(cx, NULL);
--- a/dom/src/threads/nsDOMWorker.cpp +++ b/dom/src/threads/nsDOMWorker.cpp @@ -1583,50 +1583,50 @@ nsDOMWorker::PostMessageInternal(PRBool NS_ENSURE_SUCCESS(rv, rv); } return NS_OK; } PRBool nsDOMWorker::SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, - JSAutoCrossCompartmentCall *aCall) + JSAutoEnterCompartment *aComp) { NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!"); - if (!CompileGlobalObject(aCx, aRequest, aCall)) { + if (!CompileGlobalObject(aCx, aRequest, aComp)) { return PR_FALSE; } JS_SetGlobalObject(aCx, mGlobal); return PR_TRUE; } PRBool nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, - JSAutoCrossCompartmentCall *aCall) + JSAutoEnterCompartment *aComp) { NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!"); // On success, we enter a request and a cross-compartment call that both // belong to the caller. But on failure, we must not remain in a request or // cross-compartment call. So we enter both only locally at first. On // failure, the local request and call will automatically get cleaned // up. Once success is certain, we swap them into *aRequest and *aCall. nsLazyAutoRequest localRequest; - JSAutoCrossCompartmentCall localCall; + JSAutoEnterCompartment localAutoCompartment; localRequest.enter(aCx); PRBool success; if (mGlobal) { - success = localCall.enter(aCx, mGlobal); + success = localAutoCompartment.enter(aCx, mGlobal); NS_ENSURE_TRUE(success, PR_FALSE); aRequest->swap(localRequest); - aCall->swap(localCall); + aComp->swap(localAutoCompartment); return PR_TRUE; } if (mCompileAttempted) { // Don't try to recompile a bad script. return PR_FALSE; } mCompileAttempted = PR_TRUE; @@ -1657,17 +1657,17 @@ nsDOMWorker::CompileGlobalObject(JSConte NS_ENSURE_SUCCESS(rv, PR_FALSE); JSObject* global; rv = globalWrapper->GetJSObject(&global); NS_ENSURE_SUCCESS(rv, PR_FALSE); NS_ASSERTION(JS_GetGlobalObject(aCx) == global, "Global object mismatch!"); - success = localCall.enter(aCx, global); + success = localAutoCompartment.enter(aCx, global); NS_ENSURE_TRUE(success, PR_FALSE); #ifdef DEBUG { jsval components; if (JS_GetProperty(aCx, global, "Components", &components)) { NS_ASSERTION(components == JSVAL_VOID, "Components property still defined!"); @@ -1729,17 +1729,17 @@ nsDOMWorker::CompileGlobalObject(JSConte mInnerScope = nsnull; mScopeWN = nsnull; return PR_FALSE; } NS_ASSERTION(mPrincipal && mURI, "Script loader didn't set our principal!"); aRequest->swap(localRequest); - aCall->swap(localCall); + aComp->swap(localAutoCompartment); return PR_TRUE; } void nsDOMWorker::SetPool(nsDOMWorkerPool* aPool) { NS_ASSERTION(!mPool, "Shouldn't ever set pool more than once!"); mPool = aPool;
--- a/dom/src/threads/nsDOMWorker.h +++ b/dom/src/threads/nsDOMWorker.h @@ -196,17 +196,17 @@ public: void Resume(); // This just calls IsCanceledNoLock with an autolock around the call. PRBool IsCanceled(); PRBool IsClosing(); PRBool IsSuspended(); - PRBool SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall); + PRBool SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoEnterCompartment *aComp); void SetPool(nsDOMWorkerPool* aPool); nsDOMWorkerPool* Pool() { return mPool; } PRLock* Lock() { @@ -280,17 +280,17 @@ public: eKilled }; private: ~nsDOMWorker(); nsresult PostMessageInternal(PRBool aToInner); - PRBool CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall); + PRBool CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoEnterCompartment *aComp); PRUint32 NextTimeoutId() { return ++mNextTimeoutId; } nsresult AddFeature(nsDOMWorkerFeature* aFeature, JSContext* aCx); void RemoveFeature(nsDOMWorkerFeature* aFeature,
--- a/js/jetpack/JetpackChild.cpp +++ b/js/jetpack/JetpackChild.cpp @@ -426,17 +426,17 @@ JetpackChild::CreateSandbox(JSContext* c JS_ReportError(cx, "createSandbox takes zero arguments"); return JS_FALSE; } JSObject* obj = JS_NewCompartmentAndGlobalObject(cx, const_cast<JSClass*>(&sGlobalClass), NULL); if (!obj) return JS_FALSE; - JSAutoCrossCompartmentCall ac; + JSAutoEnterCompartment ac; if (!ac.enter(cx, obj)) return JS_FALSE; JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(obj)); return JS_InitStandardClasses(cx, obj); } JSBool @@ -457,17 +457,17 @@ JetpackChild::EvalInSandbox(JSContext* c JS_ReportError(cx, "The first argument to evalInSandbox must be a global object created using createSandbox."); return JS_FALSE; } JSString* str = JS_ValueToString(cx, argv[1]); if (!str) return JS_FALSE; - JSAutoCrossCompartmentCall ac; + JSAutoEnterCompartment ac; if (!ac.enter(cx, obj)) return JS_FALSE; js::AutoValueRooter ignored(cx); return JS_EvaluateUCScript(cx, obj, JS_GetStringChars(str), JS_GetStringLength(str), "", 1, ignored.jsval_addr()); }
--- a/js/jsd/jsd_high.c +++ b/js/jsd/jsd_high.c @@ -85,17 +85,17 @@ static JSBool static JSDContext* _newJSDContext(JSRuntime* jsrt, JSD_UserCallbacks* callbacks, void* user, JSObject* scopeobj) { JSDContext* jsdc = NULL; - JSCompartment *compartment; + JSCrossCompartmentCall *call = NULL; if( ! jsrt ) return NULL; if( ! _validateUserCallbacks(callbacks) ) return NULL; jsdc = (JSDContext*) calloc(1, sizeof(JSDContext)); @@ -135,20 +135,20 @@ static JSDContext* jsdc->dumbContext = JS_NewContext(jsdc->jsrt, 256); if( ! jsdc->dumbContext ) goto label_newJSDContext_failure; JS_BeginRequest(jsdc->dumbContext); if( scopeobj ) - compartment = js_SwitchToObjectCompartment(jsdc->dumbContext, scopeobj); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, scopeobj); jsdc->glob = JS_NewGlobalObject(jsdc->dumbContext, &global_class); - if( scopeobj ) - js_SwitchToCompartment(jsdc->dumbContext, compartment); + if( call ) + JS_LeaveCrossCompartmentCall(call); if( ! jsdc->glob ) goto label_newJSDContext_failure; if( ! JS_InitStandardClasses(jsdc->dumbContext, jsdc->glob) ) goto label_newJSDContext_failure; JS_EndRequest(jsdc->dumbContext);
--- a/js/jsd/jsd_val.c +++ b/js/jsd/jsd_val.c @@ -148,26 +148,35 @@ jsd_IsValueFunction(JSDContext* jsdc, JS JSBool jsd_IsValueNative(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; jsval val = jsdval->val; JSFunction* fun; JSExceptionState* exceptionState; + JSCrossCompartmentCall *call = NULL; if(jsd_IsValueFunction(jsdc, jsdval)) { JSBool ok = JS_FALSE; JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(cx); + + return JS_FALSE; + } + exceptionState = JS_SaveExceptionState(cx); fun = JS_ValueToFunction(cx, val); JS_RestoreExceptionState(cx, exceptionState); if(fun) ok = JS_GetFunctionScript(cx, fun) ? JS_FALSE : JS_TRUE; + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); JS_ASSERT(fun); return ok; } return !JSVAL_IS_PRIMITIVE(val); } /***************************************************************************/ @@ -198,75 +207,104 @@ jsd_GetValueDouble(JSDContext* jsdc, JSD return JSVAL_TO_DOUBLE(jsdval->val); } JSString* jsd_GetValueString(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; JSExceptionState* exceptionState; + JSCrossCompartmentCall *call = NULL; if(!jsdval->string) { /* if the jsval is a string, then we don't need to double root it */ if(JSVAL_IS_STRING(jsdval->val)) jsdval->string = JSVAL_TO_STRING(jsdval->val); else { JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(cx); + + return NULL; + } + exceptionState = JS_SaveExceptionState(cx); jsdval->string = JS_ValueToString(cx, jsdval->val); JS_RestoreExceptionState(cx, exceptionState); if(jsdval->string) { if(!JS_AddNamedStringRoot(cx, &jsdval->string, "ValueString")) jsdval->string = NULL; } + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); } } return jsdval->string; } const char* jsd_GetValueFunctionName(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; JSFunction* fun; JSExceptionState* exceptionState; + JSCrossCompartmentCall *call = NULL; if(!jsdval->funName && jsd_IsValueFunction(jsdc, jsdval)) { JS_BeginRequest(cx); + + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(cx); + + return NULL; + } + exceptionState = JS_SaveExceptionState(cx); fun = JS_ValueToFunction(cx, jsdval->val); JS_RestoreExceptionState(cx, exceptionState); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); if(!fun) return NULL; jsdval->funName = JS_GetFunctionName(fun); } return jsdval->funName; } /***************************************************************************/ JSDValue* jsd_NewValue(JSDContext* jsdc, jsval val) { JSDValue* jsdval; + JSCrossCompartmentCall *call = NULL; if(!(jsdval = (JSDValue*) calloc(1, sizeof(JSDValue)))) return NULL; if(JSVAL_IS_GCTHING(val)) { JSBool ok = JS_FALSE; JS_BeginRequest(jsdc->dumbContext); + + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return NULL; + } + ok = JS_AddNamedValueRoot(jsdc->dumbContext, &jsdval->val, "JSDValue"); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); if(!ok) { free(jsdval); return NULL; } } jsdval->val = val; @@ -274,24 +312,34 @@ jsd_NewValue(JSDContext* jsdc, jsval val JS_INIT_CLIST(&jsdval->props); return jsdval; } void jsd_DropValue(JSDContext* jsdc, JSDValue* jsdval) { + JSCrossCompartmentCall *call = NULL; + JS_ASSERT(jsdval->nref > 0); if(0 == --jsdval->nref) { jsd_RefreshValue(jsdc, jsdval); if(JSVAL_IS_GCTHING(jsdval->val)) { JS_BeginRequest(jsdc->dumbContext); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return; + } + JS_RemoveValueRoot(jsdc->dumbContext, &jsdval->val); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); } free(jsdval); } } jsval jsd_GetValueWrappedJSVal(JSDContext* jsdc, JSDValue* jsdval) @@ -357,62 +405,81 @@ static void _freeProps(JSDContext* jsdc, CLEAR_BIT_FLAG(jsdval->flags, GOT_PROPS); } static JSBool _buildProps(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; JSPropertyDescArray pda; uintN i; + JSCrossCompartmentCall *call = NULL; JS_ASSERT(JS_CLIST_IS_EMPTY(&jsdval->props)); JS_ASSERT(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PROPS))); JS_ASSERT(JSVAL_IS_OBJECT(jsdval->val)); if(!JSVAL_IS_OBJECT(jsdval->val) || JSVAL_IS_NULL(jsdval->val)) return JS_FALSE; JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return JS_FALSE; + } + if(!JS_GetPropertyDescArray(cx, JSVAL_TO_OBJECT(jsdval->val), &pda)) { JS_EndRequest(cx); + JS_LeaveCrossCompartmentCall(call); return JS_FALSE; } for(i = 0; i < pda.length; i++) { JSDProperty* prop = _newProperty(jsdc, &pda.array[i], 0); if(!prop) { _freeProps(jsdc, jsdval); break; } JS_APPEND_LINK(&prop->links, &jsdval->props); } JS_PutPropertyDescArray(cx, &pda); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); SET_BIT_FLAG(jsdval->flags, GOT_PROPS); return !JS_CLIST_IS_EMPTY(&jsdval->props); } #undef DROP_CLEAR_VALUE #define DROP_CLEAR_VALUE(jsdc, x) if(x){jsd_DropValue(jsdc,x); x = NULL;} void jsd_RefreshValue(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; + JSCrossCompartmentCall *call = NULL; if(jsdval->string) { /* if the jsval is a string, then we didn't need to root the string */ if(!JSVAL_IS_STRING(jsdval->val)) { JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(cx, jsdc->glob); + if(!call) { + JS_EndRequest(cx); + + return; + } + JS_RemoveStringRoot(cx, &jsdval->string); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); } jsdval->string = NULL; } jsdval->funName = NULL; jsdval->className = NULL; DROP_CLEAR_VALUE(jsdc, jsdval->proto); @@ -474,16 +541,17 @@ jsd_GetValueProperty(JSDContext* jsdc, J JSObject* obj; uintN attrs = 0; JSBool found; JSPropertyDesc pd; const jschar * nameChars; size_t nameLen; jsval val, nameval; jsid nameid; + JSCrossCompartmentCall *call = NULL; if(!jsd_IsValueObject(jsdc, jsdval)) return NULL; /* If we already have the prop, then return it */ while(NULL != (jsdprop = jsd_IterateProperties(jsdc, jsdval, &iter))) { JSString* propName = jsd_GetValueString(jsdc, jsdprop->name); @@ -495,48 +563,57 @@ jsd_GetValueProperty(JSDContext* jsdc, J if(!(obj = JSVAL_TO_OBJECT(jsdval->val))) return NULL; nameChars = JS_GetStringChars(name); nameLen = JS_GetStringLength(name); JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(cx, jsdc->glob); + if(!call) { + JS_EndRequest(cx); + + return NULL; + } JS_GetUCPropertyAttributes(cx, obj, nameChars, nameLen, &attrs, &found); if (!found) { + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); return NULL; } JS_ClearPendingException(cx); if(!JS_GetUCProperty(cx, obj, nameChars, nameLen, &val)) { if (JS_IsExceptionPending(cx)) { if (!JS_GetPendingException(cx, &pd.value)) { + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); return NULL; } pd.flags = JSPD_EXCEPTION; } else { pd.flags = JSPD_ERROR; pd.value = JSVAL_VOID; } } else { pd.value = val; } + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); nameval = STRING_TO_JSVAL(name); if (!JS_ValueToId(cx, nameval, &nameid) || !JS_IdToValue(cx, nameid, &pd.id)) { return NULL; } @@ -548,131 +625,176 @@ jsd_GetValueProperty(JSDContext* jsdc, J return _newProperty(jsdc, &pd, JSDPD_HINTED); } JSDValue* jsd_GetValuePrototype(JSDContext* jsdc, JSDValue* jsdval) { + JSCrossCompartmentCall *call = NULL; + if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PROTO))) { JSObject* obj; JSObject* proto; JS_ASSERT(!jsdval->proto); SET_BIT_FLAG(jsdval->flags, GOT_PROTO); if(!JSVAL_IS_OBJECT(jsdval->val)) return NULL; if(!(obj = JSVAL_TO_OBJECT(jsdval->val))) return NULL; JS_BeginRequest(jsdc->dumbContext); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return NULL; + } proto = JS_GetPrototype(jsdc->dumbContext, obj); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); if(!proto) return NULL; jsdval->proto = jsd_NewValue(jsdc, OBJECT_TO_JSVAL(proto)); } if(jsdval->proto) jsdval->proto->nref++; return jsdval->proto; } JSDValue* jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval) { + JSCrossCompartmentCall *call = NULL; + if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PARENT))) { JSObject* obj; JSObject* parent; JS_ASSERT(!jsdval->parent); SET_BIT_FLAG(jsdval->flags, GOT_PARENT); if(!JSVAL_IS_OBJECT(jsdval->val)) return NULL; if(!(obj = JSVAL_TO_OBJECT(jsdval->val))) return NULL; JS_BeginRequest(jsdc->dumbContext); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return NULL; + } parent = JS_GetParent(jsdc->dumbContext,obj); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); if(!parent) return NULL; jsdval->parent = jsd_NewValue(jsdc, OBJECT_TO_JSVAL(parent)); } if(jsdval->parent) jsdval->parent->nref++; return jsdval->parent; } JSDValue* jsd_GetValueConstructor(JSDContext* jsdc, JSDValue* jsdval) { + JSCrossCompartmentCall *call = NULL; + if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_CTOR))) { JSObject* obj; JSObject* proto; JSObject* ctor; JS_ASSERT(!jsdval->ctor); SET_BIT_FLAG(jsdval->flags, GOT_CTOR); if(!JSVAL_IS_OBJECT(jsdval->val)) return NULL; if(!(obj = JSVAL_TO_OBJECT(jsdval->val))) return NULL; JS_BeginRequest(jsdc->dumbContext); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return NULL; + } proto = JS_GetPrototype(jsdc->dumbContext,obj); if(!proto) { + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); return NULL; } ctor = JS_GetConstructor(jsdc->dumbContext,proto); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); if(!ctor) return NULL; jsdval->ctor = jsd_NewValue(jsdc, OBJECT_TO_JSVAL(ctor)); } if(jsdval->ctor) jsdval->ctor->nref++; return jsdval->ctor; } const char* jsd_GetValueClassName(JSDContext* jsdc, JSDValue* jsdval) { jsval val = jsdval->val; + JSCrossCompartmentCall *call = NULL; + if(!jsdval->className && JSVAL_IS_OBJECT(val)) { JSObject* obj; if(!(obj = JSVAL_TO_OBJECT(val))) return NULL; JS_BeginRequest(jsdc->dumbContext); + call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob); + if(!call) { + JS_EndRequest(jsdc->dumbContext); + + return NULL; + } if(JS_GET_CLASS(jsdc->dumbContext, obj)) jsdval->className = JS_GET_CLASS(jsdc->dumbContext, obj)->name; + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); } return jsdval->className; } JSDScript* jsd_GetScriptForValue(JSDContext* jsdc, JSDValue* jsdval) { JSContext* cx = jsdc->dumbContext; jsval val = jsdval->val; - JSFunction* fun; + JSFunction* fun = NULL; JSExceptionState* exceptionState; JSScript* script = NULL; JSDScript* jsdscript; + JSCrossCompartmentCall *call = NULL; if (!jsd_IsValueFunction(jsdc, jsdval)) return NULL; JS_BeginRequest(cx); + call = JS_EnterCrossCompartmentCall(cx, JSVAL_TO_OBJECT(val)); + if (!call) { + JS_EndRequest(cx); + + return NULL; + } exceptionState = JS_SaveExceptionState(cx); fun = JS_ValueToFunction(cx, val); JS_RestoreExceptionState(cx, exceptionState); if (fun) script = JS_GetFunctionScript(cx, fun); + JS_LeaveCrossCompartmentCall(call); JS_EndRequest(cx); if (!script) return NULL; JSD_LOCK_SCRIPTS(jsdc); jsdscript = jsd_FindJSDScript(jsdc, script); JSD_UNLOCK_SCRIPTS(jsdc);
--- a/js/narcissus/jsexec.js +++ b/js/narcissus/jsexec.js @@ -158,44 +158,44 @@ Narcissus.interpreter = (function() { var globalHandler = definitions.makePassthruHandler(narcissusGlobal); globalHandler.has = function(name) { if (name in narcissusGlobal) { return true; } // Hide Narcissus implementation code. else if (name === "Narcissus") { return false; } else { return (name in hostGlobal); } }; globalHandler.get = function(receiver, name) { - if (narcissusGlobal.hasOwnProperty(name)) { + if (narcissusGlobal.hasOwnProperty(name)) return narcissusGlobal[name]; - } + var globalFun = hostGlobal[name]; if (definitions.isNativeCode(globalFun)) { // Enables native browser functions like 'alert' to work correctly. return Proxy.createFunction( - definitions.makePassthruHandler(globalFun), - function() { return globalFun.apply(hostGlobal, arguments); }, - function() { - var a = arguments; - switch (a.length) { - case 0: - return new globalFun(); - case 1: - return new globalFun(a[0]); - case 2: - return new globalFun(a[0], a[1]); - case 3: - return new globalFun(a[0], a[1], a[2]); - default: - var argStr = ""; - for (var i=0; i<a.length; i++) { - argStr += 'a[' + i + '],'; - } - return eval('new ' + name + '(' + argStr.slice(0,-1) + ');'); + definitions.makePassthruHandler(globalFun), + function() { return globalFun.apply(hostGlobal, arguments); }, + function() { + var a = arguments; + switch (a.length) { + case 0: + return new globalFun(); + case 1: + return new globalFun(a[0]); + case 2: + return new globalFun(a[0], a[1]); + case 3: + return new globalFun(a[0], a[1], a[2]); + default: + var argStr = ""; + for (var i=0; i<a.length; i++) { + argStr += 'a[' + i + '],'; } - }); + return eval('new ' + name + '(' + argStr.slice(0,-1) + ');'); + } + }); } else { return globalFun; }; }; var global = Proxy.create(globalHandler); // Helper to avoid Object.prototype.hasOwnProperty polluting scope objects. function hasDirectProperty(o, p) { @@ -302,29 +302,29 @@ Narcissus.interpreter = (function() { if (v !== null) return v; } var message = r + " (type " + (typeof v) + ") has no properties"; throw rn ? new TypeError(message, rn.filename, rn.lineno) : new TypeError(message); } - function valuatePhis(n, v) { + function evaluatePhis(n, v) { var ps = n.phiUses; if (!ps) return; for (var i = 0, j = ps.length; i < j; i++) { // If the thing we're valuating is already equal to the thing we want // to valuate it to, we have fully saturated (and have a cycle), and // thus we should break. if (ps[i].v === v) break; ps[i].v = v; - valuatePhis(ps[i], v); + evaluatePhis(ps[i], v); } } function execute(n, x) { var a, f, i, j, r, s, t, u, v; switch (n.type) { case FUNCTION: @@ -838,23 +838,23 @@ Narcissus.interpreter = (function() { // lvalues, so we safely get the cached value directly. var resolved = n.resolve(); if (n.forward && !resolved.intervened && !(resolved.type == FUNCTION && resolved.functionForm == parser.DECLARED_FORM)) { v = resolved.v; break; - } else { - for (s = x.scope; s; s = s.parent) { - if (n.value in s.object) - break; - } - v = new Reference(s && s.object, n.value, n); } + + for (s = x.scope; s; s = s.parent) { + if (n.value in s.object) + break; + } + v = new Reference(s && s.object, n.value, n); break; case NUMBER: case STRING: case REGEXP: v = n.value; break; @@ -864,17 +864,17 @@ Narcissus.interpreter = (function() { default: throw "PANIC: unknown operation " + n.type + ": " + uneval(n); } if (n.backwards) { n.v = v; } - valuatePhis(n, v); + evaluatePhis(n, v); return v; } function Activation(f, a) { for (var i = 0, j = f.params.length; i < j; i++) definitions.defineProperty(this, f.params[i], a[i], true); definitions.defineProperty(this, "arguments", a, true);
--- a/js/narcissus/jsparse.js +++ b/js/narcissus/jsparse.js @@ -81,20 +81,18 @@ Narcissus.parser = (function() { function bindSubBuilders(builder, proto) { for (var ns in proto) { var unbound = proto[ns]; // We do not want to bind functions like setHoists. if (typeof unbound !== "object") continue; - /* - * We store the bound sub-builder as builder's own property - * so that we can have multiple builders at the same time. - */ + // We store the bound sub-builder as builder's own property + // so that we can have multiple builders at the same time. var bound = builder[ns] = {}; for (var m in unbound) { bound[m] = bindMethod(unbound[m], builder); } } } /* @@ -1209,21 +1207,17 @@ Narcissus.parser = (function() { // See http://bugzilla.mozilla.org/show_bug.cgi?id=238945. t.match(SEMICOLON); return n; } break; case BREAK: case CONTINUE: - if (tt === BREAK) { - b = builder.BREAK; - } else { - b = builder.CONTINUE; - } + b = (tt === BREAK) ? builder.BREAK : builder.CONTINUE; n = b.build(t); if (t.peekOnSameLine() === IDENTIFIER) { t.get(); b.setLabel(n, t.token.value); } ss = x.stmtStack;
--- a/js/narcissus/jsssa.js +++ b/js/narcissus/jsssa.js @@ -1063,31 +1063,28 @@ */ function extendBuilder(child, super) { var childProto = child.prototype, superProto = super.prototype; for (var ns in super.prototype) { var childNS = childProto[ns]; var superNS = superProto[ns]; - var childNSType = typeof childNS; - if (childNSType === "undefined") { + if (childNS === undefined) { childProto[ns] = superNS; - } else if (childNSType === "object") { + } else { for (var m in superNS) { let childMethod = childNS[m]; let superMethod = superNS[m]; - if (typeof childMethod === "undefined") { + if (childMethod === undefined) { childNS[m] = superMethod; } else { childNS[m] = function() { - if (this.binds) - return childMethod.apply(this, arguments); - else - return superMethod.apply(this, arguments); + return (this.binds ? childMethod : superMethod) + .apply(this, arguments); }; } } } } } function SSABuilder() {
--- a/js/src/Makefile.in +++ b/js/src/Makefile.in @@ -127,16 +127,17 @@ CPPSRCS = \ jsdhash.cpp \ jsdtoa.cpp \ jsemit.cpp \ jsexn.cpp \ jsfun.cpp \ jsgc.cpp \ jsgcchunk.cpp \ jsgcstats.cpp \ + jscompartment.cpp \ jshash.cpp \ jsinterp.cpp \ jsinvoke.cpp \ jsiter.cpp \ jslock.cpp \ jslog2.cpp \ jsmath.cpp \ jsnativestack.cpp \ @@ -180,18 +181,20 @@ INSTALLED_HEADERS = \ jscompat.h \ jsdate.h \ jsdbgapi.h \ jsdhash.h \ jsdtoa.h \ jsemit.h \ jsfun.h \ jsgc.h \ + jscell.h \ jsgcchunk.h \ jsgcstats.h \ + jscompartment.h \ jshash.h \ jsinterp.h \ jsinttypes.h \ jsiter.h \ jslock.h \ jslong.h \ jsmath.h \ jsobj.h \ @@ -681,25 +684,27 @@ endif ifdef MOZ_MEMORY ifeq ($(OS_ARCH),Darwin) LDFLAGS += -ljemalloc endif endif ifdef SOLARIS_SUNPRO_CXX +ifeq ($(TARGET_CPU),sparc # Sun Studio SPARC doesn't work well with gcc inline asm, use lock_SunOS_sparc*.il jslock.o: jslock.cpp Makefile.in lock_sparcv8plus.il lock_sparcv9.il $(REPORT_BUILD) @$(MAKE_DEPS_AUTO_CXX) ifeq (sparcv9,$(findstring sparcv9,$(OS_TEST))) $(CXX) -o $@ -c $(COMPILE_CFLAGS) $(srcdir)/lock_sparcv9.il $< else $(CXX) -o $@ -c $(COMPILE_CFLAGS) $(srcdir)/lock_sparcv8plus.il $< endif # sparcv9 +endif # sparc endif # SOLARIS_SUNPRO_CXX ifeq ($(OS_ARCH),IRIX) ifndef GNU_CC _COMPILE_CFLAGS = $(patsubst -O%,-O1,$(COMPILE_CFLAGS)) jsapi.o jsxdrapi.o jsarena.o jsarray.o jsatom.o jsemit.o jsfun.o jsinterp.o jsreflect.o jsregexp.o jsparse.o jsopcode.o jsscript.o: %.o: %.cpp Makefile.in $(REPORT_BUILD) @$(MAKE_DEPS_AUTO_CXX)
--- a/js/src/jsapi-tests/testContexts.cpp +++ b/js/src/jsapi-tests/testContexts.cpp @@ -63,18 +63,20 @@ BEGIN_TEST(testContexts_bug561444) ThreadData *d = (ThreadData *) arg; JSContext *cx = JS_NewContext(d->rt, 8192); if (!cx) return; JS_BeginRequest(cx); { jsvalRoot v(cx); - JSAutoCrossCompartmentCall crossCall; - crossCall.enter(cx, d->obj); + + JSAutoEnterCompartment ac; + ac.enterAndIgnoreErrors(cx, d->obj); + if (!JS_EvaluateScript(cx, d->obj, d->code, strlen(d->code), __FILE__, __LINE__, v.addr())) return; } JS_DestroyContext(cx); d->ok = true; } END_TEST(testContexts_bug561444) #endif @@ -82,18 +84,18 @@ END_TEST(testContexts_bug561444) BEGIN_TEST(testContexts_bug563735) { JSContext *cx2 = JS_NewContext(rt, 8192); CHECK(cx2); JSBool ok; { JSAutoRequest req(cx2); - JSAutoCrossCompartmentCall crossCall; - CHECK(crossCall.enter(cx2, global)); + JSAutoEnterCompartment ac; + CHECK(ac.enter(cx2, global)); jsval v = JSVAL_NULL; ok = JS_SetProperty(cx2, global, "x", &v); } CHECK(ok); EXEC("(function () { for (var i = 0; i < 9; i++) ; })();"); JS_DestroyContext(cx2);
--- a/js/src/jsapi-tests/testIntString.cpp +++ b/js/src/jsapi-tests/testIntString.cpp @@ -1,17 +1,40 @@ /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * vim: set ts=8 sw=4 et tw=99: */ #include "tests.h" +#include "jsstr.h" BEGIN_TEST(testIntString_bug515273) { jsvalRoot v(cx); - EVAL("'42';", v.addr()); + EVAL("'1';", v.addr()); JSString *str = JSVAL_TO_STRING(v.value()); - const char *bytes = JS_GetStringBytes(str); - CHECK(strcmp(bytes, "42") == 0); + CHECK(JSString::isStatic(str)); + CHECK(strcmp(JS_GetStringBytes(str), "1") == 0); + + EVAL("'42';", v.addr()); + str = JSVAL_TO_STRING(v.value()); + CHECK(JSString::isStatic(str)); + CHECK(strcmp(JS_GetStringBytes(str), "42") == 0); + + EVAL("'111';", v.addr()); + str = JSVAL_TO_STRING(v.value()); + CHECK(JSString::isStatic(str)); + CHECK(strcmp(JS_GetStringBytes(str), "111") == 0); + + /* Test other types of static strings. */ + EVAL("'a';", v.addr()); + str = JSVAL_TO_STRING(v.value()); + CHECK(JSString::isStatic(str)); + CHECK(strcmp(JS_GetStringBytes(str), "a") == 0); + + EVAL("'bc';", v.addr()); + str = JSVAL_TO_STRING(v.value()); + CHECK(JSString::isStatic(str)); + CHECK(strcmp(JS_GetStringBytes(str), "bc") == 0); + return true; } END_TEST(testIntString_bug515273)
--- a/js/src/jsapi-tests/tests.h +++ b/js/src/jsapi-tests/tests.h @@ -281,17 +281,20 @@ protected: } virtual JSObject * createGlobal() { /* Create the global object. */ JSObject *global = JS_NewCompartmentAndGlobalObject(cx, getGlobalClass(), NULL); if (!global) return NULL; - JSAutoEnterCompartment enter(cx, global); + JSAutoEnterCompartment ac; + if (!ac.enter(cx, global)) + return NULL; + /* Populate the global object with the standard globals, like Object and Array. */ if (!JS_InitStandardClasses(cx, global)) return NULL; return global; } };
--- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -101,16 +101,17 @@ #include "methodjit/Logging.h" #endif #if JS_HAS_XML_SUPPORT #include "jsxml.h" #endif using namespace js; +using namespace js::gc; class AutoVersionAPI { JSContext * const cx; JSVersion oldVersion; bool oldVersionWasOverride; uint32 oldOptions; @@ -702,20 +703,16 @@ JSRuntime::~JSRuntime() if (stateChange) JS_DESTROY_CONDVAR(stateChange); if (titleSharingDone) JS_DESTROY_CONDVAR(titleSharingDone); if (debuggerLock) JS_DESTROY_LOCK(debuggerLock); #endif propertyTree.finish(); - /* Delete all remaining Compartments. Ideally only the defaultCompartment should be left. */ - for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) - delete *c; - compartments.clear(); } JS_PUBLIC_API(JSRuntime *) JS_NewRuntime(uint32 maxbytes) { #ifdef DEBUG if (!js_NewRuntimeWasCalled) { /* @@ -756,22 +753,16 @@ JS_NewRuntime(uint32 maxbytes) JS_DestroyRuntime(rt); return NULL; } return rt; } JS_PUBLIC_API(void) -JS_CommenceRuntimeShutDown(JSRuntime *rt) -{ - rt->gcFlushCodeCaches = true; -} - -JS_PUBLIC_API(void) JS_DestroyRuntime(JSRuntime *rt) { rt->~JSRuntime(); js_free(rt); } #ifdef JS_REPRMETER @@ -1185,39 +1176,29 @@ JS_LeaveCrossCompartmentCall(JSCrossComp { AutoCompartment *realcall = reinterpret_cast<AutoCompartment *>(call); CHECK_REQUEST(realcall->context); realcall->leave(); delete realcall; } bool -JSAutoCrossCompartmentCall::enter(JSContext *cx, JSObject *target) +JSAutoEnterCompartment::enter(JSContext *cx, JSObject *target) { JS_ASSERT(!call); if (cx->compartment == target->getCompartment(cx)) return true; call = JS_EnterCrossCompartmentCall(cx, target); return call != NULL; } -JS_FRIEND_API(JSCompartment *) -js_SwitchToCompartment(JSContext *cx, JSCompartment *compartment) -{ - JSCompartment *c = cx->compartment; - cx->compartment = compartment; - return c; -} - -JS_FRIEND_API(JSCompartment *) -js_SwitchToObjectCompartment(JSContext *cx, JSObject *obj) -{ - JSCompartment *c = cx->compartment; - cx->compartment = obj->getCompartment(cx); - return c; +void +JSAutoEnterCompartment::enterAndIgnoreErrors(JSContext *cx, JSObject *target) +{ + (void) enter(cx, target); } JS_PUBLIC_API(void *) JS_SetCompartmentPrivate(JSContext *cx, JSCompartment *compartment, void *data) { CHECK_REQUEST(cx); void *old = compartment->data; compartment->data = data; @@ -2078,17 +2059,17 @@ JS_TraceRuntime(JSTracer *trc) { TraceRuntime(trc); } JS_PUBLIC_API(void) JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) { JS_ASSERT(thing); - Mark(trc, thing, kind); + MarkKind(trc, thing, kind); } #ifdef DEBUG #ifdef HAVE_XPCONNECT #include "dump_xpc.h" #endif @@ -2574,17 +2555,17 @@ JS_SetGCCallbackRT(JSRuntime *rt, JSGCCa return oldcb; } JS_PUBLIC_API(JSBool) JS_IsAboutToBeFinalized(JSContext *cx, void *thing) { JS_ASSERT(thing); JS_ASSERT(!cx->runtime->gcMarkingTracer); - return js_IsAboutToBeFinalized(thing); + return IsAboutToBeFinalized(thing); } JS_PUBLIC_API(void) JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value) { switch (key) { case JSGC_MAX_BYTES: rt->gcMaxBytes = value;
--- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -712,18 +712,18 @@ JS_SameValue(JSContext *cx, jsval v1, js #define JS_NewRuntime JS_Init #define JS_DestroyRuntime JS_Finish #define JS_LockRuntime JS_Lock #define JS_UnlockRuntime JS_Unlock extern JS_PUBLIC_API(JSRuntime *) JS_NewRuntime(uint32 maxbytes); -extern JS_PUBLIC_API(void) -JS_CommenceRuntimeShutDown(JSRuntime *rt); +/* Deprecated. */ +#define JS_CommenceRuntimeShutDown(rt) ((void) 0) extern JS_PUBLIC_API(void) JS_DestroyRuntime(JSRuntime *rt); extern JS_PUBLIC_API(void) JS_ShutDown(void); JS_PUBLIC_API(void *) @@ -952,63 +952,44 @@ extern JS_PUBLIC_API(void *) JS_GetCompartmentPrivate(JSContext *cx, JSCompartment *compartment); extern JS_PUBLIC_API(JSBool) JS_WrapObject(JSContext *cx, JSObject **objp); extern JS_PUBLIC_API(JSBool) JS_WrapValue(JSContext *cx, jsval *vp); -extern JS_FRIEND_API(JSCompartment *) -js_SwitchToCompartment(JSContext *cx, JSCompartment *compartment); - -extern JS_FRIEND_API(JSCompartment *) -js_SwitchToObjectCompartment(JSContext *cx, JSObject *obj); - #ifdef __cplusplus JS_END_EXTERN_C -class JS_PUBLIC_API(JSAutoCrossCompartmentCall) +class JS_PUBLIC_API(JSAutoEnterCompartment) { JSCrossCompartmentCall *call; + public: - JSAutoCrossCompartmentCall() : call(NULL) {} + JSAutoEnterCompartment() : call(NULL) {} bool enter(JSContext *cx, JSObject *target); + void enterAndIgnoreErrors(JSContext *cx, JSObject *target); + bool entered() const { return call != NULL; } - ~JSAutoCrossCompartmentCall() { + ~JSAutoEnterCompartment() { if (call) JS_LeaveCrossCompartmentCall(call); } - void swap(JSAutoCrossCompartmentCall &other) { + void swap(JSAutoEnterCompartment &other) { JSCrossCompartmentCall *tmp = call; call = other.call; other.call = tmp; } }; -class JSAutoEnterCompartment -{ - JSContext *cx; - JSCompartment *compartment; - public: - JSAutoEnterCompartment(JSContext *cx, JSCompartment *newCompartment) : cx(cx) { - compartment = js_SwitchToCompartment(cx, newCompartment); - } - JSAutoEnterCompartment(JSContext *cx, JSObject *target) : cx(cx) { - compartment = js_SwitchToObjectCompartment(cx, target); - } - ~JSAutoEnterCompartment() { - js_SwitchToCompartment(cx, compartment); - } -}; - JS_BEGIN_EXTERN_C #endif extern JS_PUBLIC_API(JSObject *) JS_GetGlobalObject(JSContext *cx); extern JS_PUBLIC_API(void) JS_SetGlobalObject(JSContext *cx, JSObject *obj);
--- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -102,16 +102,17 @@ #include "jsstaticcheck.h" #include "jsvector.h" #include "jsatominlines.h" #include "jsobjinlines.h" #include "jscntxtinlines.h" using namespace js; +using namespace js::gc; /* 2^32 - 1 as a number and a string */ #define MAXINDEX 4294967295u #define MAXSTR "4294967295" /* Small arrays are dense, no matter what. */ #define MIN_SPARSE_INDEX 256
--- a/js/src/jsatom.cpp +++ b/js/src/jsatom.cpp @@ -60,18 +60,17 @@ #include "jsversion.h" #include "jsxml.h" #include "jsstrinlines.h" #include "jsatominlines.h" #include "jsobjinlines.h" using namespace js; - -using namespace js; +using namespace js::gc; /* * ATOM_HASH assumes that JSHashNumber is 32-bit even on 64-bit systems. */ JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4); JS_STATIC_ASSERT(sizeof(JSAtom *) == JS_BYTES_PER_WORD); /* @@ -439,18 +438,18 @@ void js_SweepAtomState(JSContext *cx) { JSAtomState *state = &cx->runtime->atomState; for (AtomSet::Enum e(state->atoms); !e.empty(); e.popFront()) { AtomEntryType entry = e.front(); if (AtomEntryFlags(entry) & (ATOM_PINNED | ATOM_INTERNED)) { /* Pinned or interned key cannot be finalized. */ - JS_ASSERT(!js_IsAboutToBeFinalized(AtomEntryToKey(entry))); - } else if (js_IsAboutToBeFinalized(AtomEntryToKey(entry))) { + JS_ASSERT(!IsAboutToBeFinalized(AtomEntryToKey(entry))); + } else if (IsAboutToBeFinalized(AtomEntryToKey(entry))) { e.removeFront(); } } } JSAtom * js_AtomizeString(JSContext *cx, JSString *str, uintN flags) { @@ -495,17 +494,17 @@ js_AtomizeString(JSContext *cx, JSString if (jsuint(i) < INT_STRING_LIMIT) return STRING_TO_ATOM(JSString::intString(i)); } } JSAtomState *state = &cx->runtime->atomState; AtomSet &atoms = state->atoms; - JS_LOCK(cx, &state->lock); + AutoLockDefaultCompartment lock(cx); AtomSet::AddPtr p = atoms.lookupForAdd(str); /* Hashing the string should have flattened it if it was a rope. */ JS_ASSERT(str->isFlat() || str->isDependent()); JSString *key; if (p) { key = AtomEntryToKey(*p); @@ -516,19 +515,19 @@ js_AtomizeString(JSContext *cx, JSString * operation. For example, it can trigger GC which may rehash the table * and make the entry invalid. */ if (!(flags & ATOM_TMPSTR) && str->isFlat()) { str->flatClearMutable(); key = str; atoms.add(p, StringToInitialAtomEntry(key)); } else { - JS_UNLOCK(cx, &state->lock); + if (flags & ATOM_TMPSTR) { + SwitchToCompartment sc(cx, cx->runtime->defaultCompartment); - if (flags & ATOM_TMPSTR) { if (flags & ATOM_NOCOPY) { key = js_NewString(cx, str->flatChars(), str->flatLength()); if (!key) return NULL; /* Finish handing off chars to the GC'ed key string. */ str->mChars = NULL; } else { @@ -538,31 +537,28 @@ js_AtomizeString(JSContext *cx, JSString } } else { JS_ASSERT(str->isDependent()); if (!str->undepend(cx)) return NULL; key = str; } - JS_LOCK(cx, &state->lock); if (!atoms.relookupOrAdd(p, key, StringToInitialAtomEntry(key))) { - JS_UNLOCK(cx, &state->lock); JS_ReportOutOfMemory(cx); /* SystemAllocPolicy does not report */ return NULL; } } key->flatSetAtomized(); } AddAtomEntryFlags(*p, flags & (ATOM_PINNED | ATOM_INTERNED)); JS_ASSERT(key->isAtomized()); JSAtom *atom = STRING_TO_ATOM(key); - JS_UNLOCK(cx, &state->lock); return atom; } JSAtom * js_Atomize(JSContext *cx, const char *bytes, size_t length, uintN flags) { jschar *chars; JSString str;
new file mode 100644 --- /dev/null +++ b/js/src/jscell.h @@ -0,0 +1,99 @@ +/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey code. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Gregor Wagner <anygregor@gmail.com> + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef jscell_h___ +#define jscell_h___ + +struct JSCompartment; + +namespace js { +namespace gc { + +template <typename T> struct Arena; +struct ArenaBitmap; +struct MarkingDelay; +struct Chunk; +struct FreeCell; + +/* + * A GC cell is the base class for GC Things like JSObject, JSShortString, + * JSFunction, JSXML and for an empty cell called FreeCell. It helps avoiding + * casts from an Object to a Cell whenever we call GC related mark functions. + * Cell is not the base Class for JSString because static initialization + * (used for unitStringTables) does not work with inheritance. + */ + +struct Cell { + static const size_t CellShift = 3; + static const size_t CellSize = size_t(1) << CellShift; + static const size_t CellMask = CellSize - 1; + + inline Arena<Cell> *arena() const; + inline Chunk *chunk() const; + inline ArenaBitmap *bitmap() const; + JS_ALWAYS_INLINE size_t cellIndex() const; + + JS_ALWAYS_INLINE void mark(uint32 color) const; + JS_ALWAYS_INLINE bool isMarked(uint32 color) const; + JS_ALWAYS_INLINE bool markIfUnmarked(uint32 color) const; + + inline JSCompartment *compartment() const; + + /* Needed for compatibility reasons because Cell can't be a base class of JSString */ + JS_ALWAYS_INLINE js::gc::Cell *asCell() { return this; } + + JS_ALWAYS_INLINE js::gc::FreeCell *asFreeCell() { + return reinterpret_cast<FreeCell *>(this); + } +}; + +/* FreeCell has always size 8 */ +struct FreeCell : Cell { + union { + FreeCell *link; + double data; + }; +}; + +JS_STATIC_ASSERT(sizeof(FreeCell) == 8); + +} /* namespace gc */ +} /* namespace js */ + +#endif /* jscell_h___ */
--- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -75,16 +75,17 @@ #include "jsstr.h" #include "jstracer.h" #ifdef JS_METHODJIT # include "assembler/assembler/MacroAssembler.h" #endif #include "jscntxtinlines.h" +#include "jscompartment.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #ifdef XP_WIN # include "jswin.h" #elif defined(XP_OS2) # define INCL_DOSMEMMGR # include <os2.h> @@ -96,16 +97,17 @@ # define MAP_ANONYMOUS MAP_ANON # else # define MAP_ANONYMOUS 0 # endif # endif #endif using namespace js; +using namespace js::gc; static const size_t ARENA_HEADER_SIZE_HACK = 40; static const size_t TEMP_POOL_CHUNK_SIZE = 4096 - ARENA_HEADER_SIZE_HACK; static void FreeContext(JSContext *cx); #ifdef DEBUG @@ -500,18 +502,16 @@ JSThreadData::init() nativeStackBase = GetNativeStackBase(); return true; } void JSThreadData::finish() { #ifdef DEBUG - /* All GC-related things must be already removed at this point. */ - JS_ASSERT(gcFreeLists.isEmpty()); for (size_t i = 0; i != JS_ARRAY_LENGTH(scriptsToGC); ++i) JS_ASSERT(!scriptsToGC[i]); #endif if (dtoaState) js_DestroyDtoaState(dtoaState); js_FinishGSNCache(&gsnCache); @@ -524,26 +524,21 @@ JSThreadData::finish() #endif stackSpace.finish(); } void JSThreadData::mark(JSTracer *trc) { stackSpace.mark(trc); -#ifdef JS_TRACER - traceMonitor.mark(trc); -#endif } void JSThreadData::purge(JSContext *cx) { - gcFreeLists.purge(); - js_PurgeGSNCache(&gsnCache); /* FIXME: bug 506341. */ propertyCache.purge(cx); #ifdef JS_TRACER /* * If we are about to regenerate shapes, we have to flush the JIT cache, @@ -709,21 +704,16 @@ js_PurgeThreads(JSContext *cx) !e.empty(); e.popFront()) { JSThread *thread = e.front().value; if (JS_CLIST_IS_EMPTY(&thread->contextList)) { JS_ASSERT(cx->thread != thread); js_DestroyScriptsToGC(cx, &thread->data); - /* - * The following is potentially suboptimal as it also zeros the - * caches in data, but the code simplicity wins here. - */ - thread->data.gcFreeLists.purge(); DestroyThread(thread); e.removeFront(); } else { thread->data.purge(cx); } } #else cx->runtime->threadData.purge(cx); @@ -2205,18 +2195,16 @@ FreeOldArenas(JSRuntime *rt, JSArenaPool JS_FreeArenaPool(pool); } } void JSContext::purge() { FreeOldArenas(runtime, ®ExpPool); - /* FIXME: bug 586161 */ - compartment->purge(this); } void JSContext::updateJITEnabled() { #ifdef JS_TRACER traceJitEnabled = ((options & JSOPTION_JIT) && (debugHooks == &js_NullDebugHooks ||
--- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -1026,36 +1026,40 @@ struct TraceMonitor { /* * profAlloc has a lifetime which spans exactly from js_InitJIT to * js_FinishJIT. */ VMAllocator* profAlloc; FragStatsMap* profTab; #endif + bool ontrace() const { + return !!tracecx; + } + /* Flush the JIT cache. */ void flush(); - /* Mark all objects baked into native code in the code cache. */ - void mark(JSTracer *trc); + /* Sweep any cache entry pointing to dead GC things. */ + void sweep(); bool outOfMemory() const; }; } /* namespace js */ /* * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current * thread, regardless of whether cx is the context in which that trace is * executing. cx must be a context on the current thread. */ #ifdef JS_TRACER -# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).tracecx != NULL) +# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).ontrace()) #else -# define JS_ON_TRACE(cx) JS_FALSE +# define JS_ON_TRACE(cx) false #endif /* Number of potentially reusable scriptsToGC to search for the eval cache. */ #ifndef JS_EVAL_CACHE_SHIFT # define JS_EVAL_CACHE_SHIFT 6 #endif #define JS_EVAL_CACHE_SIZE JS_BIT(JS_EVAL_CACHE_SHIFT) @@ -1109,18 +1113,16 @@ struct JSThreadData { /* * If non-zero, we were been asked to call the operation callback as soon * as possible. If the thread has an active request, this contributes * towards rt->interruptCounter. */ volatile int32 interruptFlags; - JSGCFreeLists gcFreeLists; - /* Keeper of the contiguous stack used by all contexts in this thread. */ js::StackSpace stackSpace; /* * Flag indicating that we are waiving any soft limits on the GC heap * because we want allocations to be infallible (except when we hit * a hard quota). */ @@ -1273,110 +1275,25 @@ typedef enum JSRuntimeState { JSRTS_LANDING } JSRuntimeState; typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; js::Shape *child; } JSPropertyTreeEntry; - -namespace js { - -struct GCPtrHasher -{ - typedef void *Lookup; - - static HashNumber hash(void *key) { - return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS); - } - - static bool match(void *l, void *k) { - return l == k; - } -}; - -typedef HashMap<void *, uint32, GCPtrHasher, SystemAllocPolicy> GCLocks; - -struct RootInfo { - RootInfo() {} - RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {} - const char *name; - JSGCRootType type; -}; - -typedef js::HashMap<void *, - RootInfo, - js::DefaultHasher<void *>, - js::SystemAllocPolicy> RootedValueMap; - -/* If HashNumber grows, need to change WrapperHasher. */ -JS_STATIC_ASSERT(sizeof(HashNumber) == 4); - -struct WrapperHasher -{ - typedef Value Lookup; - - static HashNumber hash(Value key) { - uint64 bits = JSVAL_BITS(Jsvalify(key)); - return (uint32)bits ^ (uint32)(bits >> 32); - } - - static bool match(const Value &l, const Value &k) { - return l == k; - } -}; - -typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap; - -class AutoValueVector; -class AutoIdVector; - -} /* namespace js */ - -struct JSCompartment { - JSRuntime *rt; - JSPrincipals *principals; - void *data; - bool marked; - js::WrapperMap crossCompartmentWrappers; - bool debugMode; - - /* List all scripts in this compartment. */ - JSCList scripts; - - JSCompartment(JSRuntime *cx); - ~JSCompartment(); - - bool init(); - - bool wrap(JSContext *cx, js::Value *vp); - bool wrap(JSContext *cx, JSString **strp); - bool wrap(JSContext *cx, JSObject **objp); - bool wrapId(JSContext *cx, jsid *idp); - bool wrap(JSContext *cx, js::PropertyOp *op); - bool wrap(JSContext *cx, js::PropertyDescriptor *desc); - bool wrap(JSContext *cx, js::AutoIdVector &props); - bool wrapException(JSContext *cx); - - void sweep(JSContext *cx); - -#ifdef JS_METHODJIT - bool addScript(JSContext *cx, JSScript *script); - void removeScript(JSScript *script); -#endif - void purge(JSContext *cx); -}; - typedef void (* JSActivityCallback)(void *arg, JSBool active); struct JSRuntime { /* Default compartment. */ JSCompartment *defaultCompartment; +#ifdef JS_THREADSAFE + bool defaultCompartmentIsLocked; +#endif /* List of compartments (protected by the GC lock). */ js::Vector<JSCompartment *, 0, js::SystemAllocPolicy> compartments; /* Runtime state, synchronized by the stateChange/gcLock condvar/lock. */ JSRuntimeState state; /* Context create/destroy callback. */ @@ -1411,37 +1328,30 @@ struct JSRuntime { * This comes early in JSRuntime to minimize the immediate format used by * trace-JITted code that reads it. */ uint32 protoHazardShape; /* Garbage collector state, used by jsgc.c. */ js::GCChunkSet gcChunkSet; - /* GC chunks with at least one free arena. */ - js::GCChunkInfoVector gcFreeArenaChunks; -#ifdef DEBUG - JSGCArena *gcEmptyArenaList; -#endif - JSGCArenaList gcArenaList[FINALIZE_LIMIT]; js::RootedValueMap gcRootsHash; js::GCLocks gcLocksHash; jsrefcount gcKeepAtoms; size_t gcBytes; size_t gcLastBytes; size_t gcMaxBytes; size_t gcMaxMallocBytes; size_t gcNewArenaTriggerBytes; uint32 gcEmptyArenaPoolLifespan; uint32 gcNumber; js::GCMarker *gcMarkingTracer; uint32 gcTriggerFactor; size_t gcTriggerBytes; volatile JSBool gcIsNeeded; - volatile JSBool gcFlushCodeCaches; /* * NB: do not pack another flag here by claiming gcPadding unless the new * flag is written only by the GC thread. Atomic updates to packed bytes * are not guaranteed, so stores issued by one thread may be lost due to * unsynchronized read-modify-write cycles on other threads. */ bool gcPoke; @@ -1732,18 +1642,18 @@ struct JSRuntime { * Stats on compile-time host environment and lexical scope chain lengths * (maximum depths). */ JSBasicStats hostenvScopeDepthStats; JSBasicStats lexicalScopeDepthStats; #endif #ifdef JS_GCMETER - JSGCStats gcStats; - JSGCArenaStats gcArenaStats[FINALIZE_LIMIT]; + js::gc::JSGCStats gcStats; + js::gc::JSGCArenaStats globalArenaStats[js::gc::FINALIZE_LIMIT]; #endif #ifdef DEBUG /* * If functionMeterFilename, set from an envariable in JSRuntime's ctor, is * null, the remaining members in this ifdef'ed group are not initialized. */ const char *functionMeterFilename; @@ -2862,19 +2772,17 @@ class AutoEnumStateRooter : private Auto } friend void AutoGCRooter::trace(JSTracer *trc); const Value &state() const { return stateValue; } Value *addr() { return &stateValue; } protected: - void trace(JSTracer *trc) { - JS_CALL_OBJECT_TRACER(trc, obj, "js::AutoEnumStateRooter.obj"); - } + void trace(JSTracer *trc); JSObject * const obj; private: Value stateValue; JS_DECL_USE_GUARD_OBJECT_NOTIFIER }; @@ -2906,16 +2814,52 @@ public: class AutoUnlockGC { private: JSRuntime *rt; public: explicit AutoUnlockGC(JSRuntime *rt) : rt(rt) { JS_UNLOCK_GC(rt); } ~AutoUnlockGC() { JS_LOCK_GC(rt); } }; +class AutoLockDefaultCompartment { + private: + JSContext *cx; + public: + AutoLockDefaultCompartment(JSContext *cx) : cx(cx) { + JS_LOCK(cx, &cx->runtime->atomState.lock); +#ifdef JS_THREADSAFE + cx->runtime->defaultCompartmentIsLocked = true; +#endif + } + ~AutoLockDefaultCompartment() { + JS_UNLOCK(cx, &cx->runtime->atomState.lock); +#ifdef JS_THREADSAFE + cx->runtime->defaultCompartmentIsLocked = false; +#endif + } +}; + +class AutoUnlockDefaultCompartment { + private: + JSContext *cx; + public: + AutoUnlockDefaultCompartment(JSContext *cx) : cx(cx) { + JS_UNLOCK(cx, &cx->runtime->atomState.lock); +#ifdef JS_THREADSAFE + cx->runtime->defaultCompartmentIsLocked = false; +#endif + } + ~AutoUnlockDefaultCompartment() { + JS_LOCK(cx, &cx->runtime->atomState.lock); +#ifdef JS_THREADSAFE + cx->runtime->defaultCompartmentIsLocked = true; +#endif + } +}; + class AutoKeepAtoms { JSRuntime *rt; public: explicit AutoKeepAtoms(JSRuntime *rt) : rt(rt) { JS_KEEP_ATOMS(rt); } ~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt); } }; class AutoArenaAllocator { @@ -3152,17 +3096,17 @@ js_ReportOverRecursed(JSContext *cx); extern JS_FRIEND_API(void) js_ReportAllocationOverflow(JSContext *cx); #define JS_CHECK_RECURSION(cx, onerror) \ JS_BEGIN_MACRO \ int stackDummy_; \ \ - if (!JS_CHECK_STACK_SIZE(cx, stackDummy_)) { \ + if (!JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy_)) { \ js_ReportOverRecursed(cx); \ onerror; \ } \ JS_END_MACRO /* * Report an exception using a previously composed JSErrorReport. * XXXbe remove from "friend" API @@ -3202,26 +3146,16 @@ js_ReportValueErrorFlags(JSContext *cx, spindex, v, fallback, arg1, NULL)) #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2) \ ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber, \ spindex, v, fallback, arg1, arg2)) extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit]; -/* - * See JS_SetThreadStackLimit in jsapi.c, where we check that the stack - * grows in the expected direction. - */ -#if JS_STACK_GROWTH_DIRECTION > 0 -# define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) < (cx)->stackLimit) -#else -# define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) > (cx)->stackLimit) -#endif - #ifdef JS_THREADSAFE # define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \ JS_ASSERT((cx)->thread->data.requestDepth >= 1)) #else # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0) #endif /*
--- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -469,22 +469,16 @@ FrameRegsIter::operator++() incSlow(fp, prev); return *this; } cursp = fp->formalArgsEnd(); return *this; } -void -AutoIdArray::trace(JSTracer *trc) { - JS_ASSERT(tag == IDARRAY); - MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); -} - class AutoNamespaceArray : protected AutoGCRooter { public: AutoNamespaceArray(JSContext *cx) : AutoGCRooter(cx, NAMESPACES) { array.init(); } ~AutoNamespaceArray() { array.finish(context); @@ -672,23 +666,27 @@ CallJSNativeConstructor(JSContext *cx, j JS_ASSERT(vp[1].isMagic()); if (!CallJSNative(cx, native, argc, vp)) return false; /* * Native constructors must return non-primitive values on success. * Although it is legal, if a constructor returns the callee, there is a * 99.9999% chance it is a bug. If any valid code actually wants the - * constructor to return the callee, this can be removed. + * constructor to return the callee, the assertion can be removed or + * (another) conjunct can be added to the antecedent. * * Proxies are exceptions to both rules: they can return primitives and * they allow content to return the callee. + * + * (new Object(Object)) returns the callee. */ extern JSBool proxy_Construct(JSContext *, uintN, Value *); - JS_ASSERT_IF(native != proxy_Construct, + JS_ASSERT_IF(native != proxy_Construct && + (!callee->isFunction() || callee->getFunctionPrivate()->u.n.clasp != &js_ObjectClass), !vp->isPrimitive() && callee != &vp[0].toObject()); return true; } JS_ALWAYS_INLINE bool CallJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp) {
new file mode 100644 --- /dev/null +++ b/js/src/jscompartment.cpp @@ -0,0 +1,337 @@ +/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=4 sw=4 et tw=99: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released + * May 28, 2008. + * + * The Initial Developer of the Original Code is + * Mozilla Foundation + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#include "jscompartment.h" +#include "jsgc.h" +#include "jscntxt.h" +#include "jsproxy.h" +#include "jsscope.h" +#include "methodjit/PolyIC.h" +#include "methodjit/MonoIC.h" + +#include "jsgcinlines.h" + +using namespace js; +using namespace js::gc; + +JSCompartment::JSCompartment(JSRuntime *rt) + : rt(rt), principals(NULL), data(NULL), marked(false), debugMode(false) +{ + JS_INIT_CLIST(&scripts); +} + +JSCompartment::~JSCompartment() +{ +} + +bool +JSCompartment::init() +{ + chunk = NULL; + shortStringArena.init(); + stringArena.init(); + funArena.init(); +#if JS_HAS_XML_SUPPORT + xmlArena.init(); +#endif + objArena.init(); + for (unsigned i = 0; i < JS_EXTERNAL_STRING_LIMIT; i++) + externalStringArenas[i].init(); + for (unsigned i = 0; i < FINALIZE_LIMIT; i++) + freeLists.finalizables[i] = NULL; +#ifdef JS_GCMETER + memset(&compartmentStats, 0, sizeof(JSGCArenaStats) * FINALIZE_LIMIT); +#endif + return crossCompartmentWrappers.init(); +} + +bool +JSCompartment::arenaListsAreEmpty() +{ + bool empty = objArena.isEmpty() && + funArena.isEmpty() && +#if JS_HAS_XML_SUPPORT + xmlArena.isEmpty() && +#endif + shortStringArena.isEmpty() && + stringArena.isEmpty(); + if (!empty) + return false; + + for (unsigned i = 0; i < JS_EXTERNAL_STRING_LIMIT; i++) { + if (!externalStringArenas[i].isEmpty()) + return false; + } + + return true; +} + +bool +JSCompartment::wrap(JSContext *cx, Value *vp) +{ + JS_ASSERT(cx->compartment == this); + + uintN flags = 0; + + JS_CHECK_RECURSION(cx, return false); + + /* Only GC things have to be wrapped or copied. */ + if (!vp->isMarkable()) + return true; + + /* Static strings do not have to be wrapped. */ + if (vp->isString() && JSString::isStatic(vp->toString())) + return true; + + /* Unwrap incoming objects. */ + if (vp->isObject()) { + JSObject *obj = &vp->toObject(); + + /* If the object is already in this compartment, we are done. */ + if (obj->getCompartment(cx) == this) + return true; + + /* Don't unwrap an outer window proxy. */ + if (!obj->getClass()->ext.innerObject) { + obj = vp->toObject().unwrap(&flags); + OBJ_TO_OUTER_OBJECT(cx, obj); + if (!obj) + return false; + + vp->setObject(*obj); + } + + /* If the wrapped object is already in this compartment, we are done. */ + if (obj->getCompartment(cx) == this) + return true; + } + + /* If we already have a wrapper for this value, use it. */ + if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(*vp)) { + *vp = p->value; + return true; + } + + if (vp->isString()) { + Value orig = *vp; + JSString *str = vp->toString(); + JSString *wrapped = js_NewStringCopyN(cx, str->chars(), str->length()); + if (!wrapped) + return false; + vp->setString(wrapped); + return crossCompartmentWrappers.put(orig, *vp); + } + + JSObject *obj = &vp->toObject(); + + /* + * Recurse to wrap the prototype. Long prototype chains will run out of + * stack, causing an error in CHECK_RECURSE. + * + * Wrapping the proto before creating the new wrapper and adding it to the + * cache helps avoid leaving a bad entry in the cache on OOM. But note that + * if we wrapped both proto and parent, we would get infinite recursion + * here (since Object.prototype->parent->proto leads to Object.prototype + * itself). + */ + JSObject *proto = obj->getProto(); + if (!wrap(cx, &proto)) + return false; + + /* + * We hand in the original wrapped object into the wrap hook to allow + * the wrap hook to reason over what wrappers are currently applied + * to the object. + */ + JSObject *wrapper = cx->runtime->wrapObjectCallback(cx, obj, proto, flags); + if (!wrapper) + return false; + wrapper->setProto(proto); + vp->setObject(*wrapper); + if (!crossCompartmentWrappers.put(wrapper->getProxyPrivate(), *vp)) + return false; + + /* + * Wrappers should really be parented to the wrapped parent of the wrapped + * object, but in that case a wrapped global object would have a NULL + * parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead, + * we parent all wrappers to the global object in their home compartment. + * This loses us some transparency, and is generally very cheesy. + */ + JSObject *global; + if (cx->hasfp()) { + global = cx->fp()->scopeChain().getGlobal(); + } else { + global = cx->globalObject; + OBJ_TO_INNER_OBJECT(cx, global); + if (!global) + return false; + } + + wrapper->setParent(global); + return true; +} + +bool +JSCompartment::wrap(JSContext *cx, JSString **strp) +{ + AutoValueRooter tvr(cx, StringValue(*strp)); + if (!wrap(cx, tvr.addr())) + return false; + *strp = tvr.value().toString(); + return true; +} + +bool +JSCompartment::wrap(JSContext *cx, JSObject **objp) +{ + if (!*objp) + return true; + AutoValueRooter tvr(cx, ObjectValue(**objp)); + if (!wrap(cx, tvr.addr())) + return false; + *objp = &tvr.value().toObject(); + return true; +} + +bool +JSCompartment::wrapId(JSContext *cx, jsid *idp) +{ + if (JSID_IS_INT(*idp)) + return true; + AutoValueRooter tvr(cx, IdToValue(*idp)); + if (!wrap(cx, tvr.addr())) + return false; + return ValueToId(cx, tvr.value(), idp); +} + +bool +JSCompartment::wrap(JSContext *cx, PropertyOp *propp) +{ + Value v = CastAsObjectJsval(*propp); + if (!wrap(cx, &v)) + return false; + *propp = CastAsPropertyOp(v.toObjectOrNull()); + return true; +} + +bool +JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) +{ + return wrap(cx, &desc->obj) && + (!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) && + (!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) && + wrap(cx, &desc->value); +} + +bool +JSCompartment::wrap(JSContext *cx, AutoIdVector &props) +{ + jsid *vector = props.begin(); + jsint length = props.length(); + for (size_t n = 0; n < size_t(length); ++n) { + if (!wrapId(cx, &vector[n])) + return false; + } + return true; +} + +bool +JSCompartment::wrapException(JSContext *cx) +{ + JS_ASSERT(cx->compartment == this); + + if (cx->throwing) { + AutoValueRooter tvr(cx, cx->exception); + cx->throwing = false; + cx->exception.setNull(); + if (wrap(cx, tvr.addr())) { + cx->throwing = true; + cx->exception = tvr.value(); + } + return false; + } + return true; +} + +void +JSCompartment::sweep(JSContext *cx) +{ + chunk = NULL; + /* Remove dead wrappers from the table. */ + for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { + if (IsAboutToBeFinalized(e.front().value.toGCThing())) + e.removeFront(); + } + +#if defined JS_METHODJIT && defined JS_MONOIC + for (JSCList *cursor = scripts.next; cursor != &scripts; cursor = cursor->next) { + JSScript *script = reinterpret_cast<JSScript *>(cursor); + if (script->jit) + mjit::ic::SweepCallICs(script); + } +#endif +} + +void +JSCompartment::purge(JSContext *cx) +{ + freeLists.purge(); + +#ifdef JS_METHODJIT + for (JSScript *script = (JSScript *)scripts.next; + &script->links != &scripts; + script = (JSScript *)script->links.next) { + if (script->jit) { +# if defined JS_POLYIC + mjit::ic::PurgePICs(cx, script); +# endif +# if defined JS_MONOIC + /* + * MICs do not refer to data which can be GC'ed, but are sensitive + * to shape regeneration. + */ + if (cx->runtime->gcRegenShapes) + mjit::ic::PurgeMICs(cx, script); +# endif + } + } +#endif +}
new file mode 100644 --- /dev/null +++ b/js/src/jscompartment.h @@ -0,0 +1,135 @@ +/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey code. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef jscompartment_h___ +#define jscompartment_h___ + +#include "jscntxt.h" +#include "jsgc.h" +#include "jsobj.h" +#include "jsfun.h" +#include "jsgcstats.h" +#include "jsclist.h" +#include "jsxml.h" + +struct JSCompartment { + JSRuntime *rt; + JSPrincipals *principals; + js::gc::Chunk *chunk; + + js::gc::ArenaList<JSObject> objArena; + js::gc::ArenaList<JSFunction> funArena; + js::gc::ArenaList<JSShortString> shortStringArena; + js::gc::ArenaList<JSString> stringArena; + js::gc::ArenaList<JSString> externalStringArenas[js::gc::JS_EXTERNAL_STRING_LIMIT]; +#if JS_HAS_XML_SUPPORT + js::gc::ArenaList<JSXML> xmlArena; +#endif + + js::gc::FreeLists freeLists; + +#ifdef JS_GCMETER + js::gc::JSGCArenaStats compartmentStats[js::gc::FINALIZE_LIMIT]; +#endif + + void *data; + bool marked; + js::WrapperMap crossCompartmentWrappers; + bool debugMode; + + /* List all scripts in this compartment. */ + JSCList scripts; + + JSCompartment(JSRuntime *cx); + ~JSCompartment(); + + bool init(); + + bool wrap(JSContext *cx, js::Value *vp); + bool wrap(JSContext *cx, JSString **strp); + bool wrap(JSContext *cx, JSObject **objp); + bool wrapId(JSContext *cx, jsid *idp); + bool wrap(JSContext *cx, js::PropertyOp *op); + bool wrap(JSContext *cx, js::PropertyDescriptor *desc); + bool wrap(JSContext *cx, js::AutoIdVector &props); + bool wrapException(JSContext *cx); + + void sweep(JSContext *cx); +#ifdef JS_METHODJIT + bool addScript(JSContext *cx, JSScript *script); + void removeScript(JSScript *script); +#endif + void purge(JSContext *cx); + void finishArenaLists(); + bool arenaListsAreEmpty(); +}; + +namespace js { + +class PreserveCompartment { + protected: + JSContext *cx; + private: + JSCompartment *oldCompartment; + JS_DECL_USE_GUARD_OBJECT_NOTIFIER + public: + PreserveCompartment(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM) : cx(cx) { + JS_GUARD_OBJECT_NOTIFIER_INIT; + oldCompartment = cx->compartment; + } + + ~PreserveCompartment() { + cx->compartment = oldCompartment; + } +}; + +class SwitchToCompartment : public PreserveCompartment { + public: + SwitchToCompartment(JSContext *cx, JSCompartment *newCompartment) : PreserveCompartment(cx) { + cx->compartment = newCompartment; + } + + SwitchToCompartment(JSContext *cx, JSObject *target) : PreserveCompartment(cx) { + cx->compartment = target->getCompartment(cx); + } +}; + +} + +#endif /* jscompartment_h___ */
--- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -69,16 +69,17 @@ #include "jsscopeinlines.h" #include "jsautooplen.h" #include "methodjit/MethodJIT.h" #include "methodjit/Retcon.h" using namespace js; +using namespace js::gc; typedef struct JSTrap { JSCList links; JSScript *script; jsbytecode *pc; JSOp op; JSTrapHandler handler; jsval closure; @@ -596,18 +597,18 @@ js_TraceWatchPoints(JSTracer *trc, JSObj rt = trc->context->runtime; for (wp = (JSWatchPoint *)rt->watchPointList.next; &wp->links != &rt->watchPointList; wp = (JSWatchPoint *)wp->links.next) { if (wp->object == obj) { wp->shape->trace(trc); if (wp->shape->hasSetterValue() && wp->setter) - JS_CALL_OBJECT_TRACER(trc, CastAsObject(wp->setter), "wp->setter"); - JS_CALL_OBJECT_TRACER(trc, wp->closure, "wp->closure"); + MarkObject(trc, *CastAsObject(wp->setter), "wp->setter"); + MarkObject(trc, *wp->closure, "wp->closure"); } } } void js_SweepWatchPoints(JSContext *cx) { JSRuntime *rt; @@ -615,17 +616,17 @@ js_SweepWatchPoints(JSContext *cx) uint32 sample; rt = cx->runtime; DBG_LOCK(rt); for (wp = (JSWatchPoint *)rt->watchPointList.next; &wp->links != &rt->watchPointList; wp = next) { next = (JSWatchPoint *)wp->links.next; - if (js_IsAboutToBeFinalized(wp->object)) { + if (IsAboutToBeFinalized(wp->object)) { sample = rt->debuggerMutations; /* Ignore failures. */ DropWatchPointAndUnlock(cx, wp, JSWP_LIVE); DBG_LOCK(rt); if (rt->debuggerMutations != sample + 1) next = (JSWatchPoint *)rt->watchPointList.next; }
--- a/js/src/jsemit.cpp +++ b/js/src/jsemit.cpp @@ -78,16 +78,17 @@ #define TRYNOTE_CHUNK 64 /* trynote allocation increment */ /* Macros to compute byte sizes from typed element counts. */ #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode)) #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote)) #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote)) using namespace js; +using namespace js::gc; static JSBool NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind, uintN stackDepth, size_t start, size_t end); JSCodeGenerator::JSCodeGenerator(Parser *parser, JSArenaPool *cpool, JSArenaPool *npool, uintN lineno) @@ -2118,16 +2119,17 @@ BindNameToSlot(JSContext *cx, JSCodeGene /* * Optimize accesses to undeclared globals, but only if we are in * compile-and-go mode, the global is the same as the scope chain, * and we are not in strict mode. */ if (cg->compileAndGo() && cg->compiler()->globalScope->globalObj && + !pn->isDeoptimized() && !(cg->flags & TCF_STRICT_MODE_CODE)) { switch (op) { case JSOP_NAME: op = JSOP_GETGNAME; break; case JSOP_SETNAME: op = JSOP_SETGNAME; break; case JSOP_INCNAME: op = JSOP_INCGNAME; break; case JSOP_NAMEINC: op = JSOP_GNAMEINC; break; case JSOP_DECNAME: op = JSOP_DECGNAME; break; case JSOP_NAMEDEC: op = JSOP_GNAMEDEC; break;
--- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -62,16 +62,17 @@ #include "jsscript.h" #include "jsstaticcheck.h" #include "jscntxtinlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; /* Forward declarations for js_ErrorClass's initializer. */ static JSBool Exception(JSContext *cx, uintN argc, Value *vp); static void exn_trace(JSTracer *trc, JSObject *obj); @@ -402,26 +403,24 @@ exn_trace(JSTracer *trc, JSObject *obj) JSExnPrivate *priv; JSStackTraceElem *elem; size_t vcount, i; jsval *vp, v; priv = GetExnPrivate(trc->context, obj); if (priv) { if (priv->message) - JS_CALL_STRING_TRACER(trc, priv->message, "exception message"); + MarkString(trc, priv->message, "exception message"); if (priv->filename) - JS_CALL_STRING_TRACER(trc, priv->filename, "exception filename"); + MarkString(trc, priv->filename, "exception filename"); elem = priv->stackElems; for (vcount = i = 0; i != priv->stackDepth; ++i, ++elem) { - if (elem->funName) { - JS_CALL_STRING_TRACER(trc, elem->funName, - "stack trace function name"); - } + if (elem->funName) + MarkString(trc, elem->funName, "stack trace function name"); if (IS_GC_MARKING_TRACER(trc) && elem->filename) js_MarkScriptFilename(elem->filename); vcount += elem->argc; } vp = GetStackTraceValueBuffer(priv); for (i = 0; i != vcount; ++i, ++vp) { v = *vp; JS_CALL_VALUE_TRACER(trc, v, "stack trace argument");
--- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -87,16 +87,17 @@ #include "jsatominlines.h" #include "jscntxtinlines.h" #include "jsfuninlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; inline JSObject * JSObject::getThrowTypeError() const { return &getGlobal()->getReservedSlot(JSRESERVED_GLOBAL_THROWTYPEERROR).toObject(); } JSBool
--- a/js/src/jsfun.h +++ b/js/src/jsfun.h @@ -298,18 +298,16 @@ struct JSFunction : public JSObject return u.i.script; } /* Number of extra fixed function object slots besides JSSLOT_PRIVATE. */ static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS; static const uint32 FIRST_FREE_SLOT = JSSLOT_PRIVATE + CLASS_RESERVED_SLOTS + 1; }; -JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0); - /* * Trace-annotated native. This expands to a JSFunctionSpec initializer (like * JS_FN in jsapi.h). fastcall is a FastNative; trcinfo is a * JSNativeTraceInfo*. */ #ifdef JS_TRACER /* MSVC demands the intermediate (void *) cast here. */ # define JS_TN(name,fastcall,nargs,flags,trcinfo) \
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -86,24 +86,28 @@ #endif #include "jsprobes.h" #include "jscntxtinlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jshashtable.h" +#include "jsstrinlines.h" +#include "jscompartment.h" + #ifdef MOZ_VALGRIND # define JS_VALGRIND #endif #ifdef JS_VALGRIND # include <valgrind/memcheck.h> #endif using namespace js; +using namespace js::gc; /* * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING. */ JS_STATIC_ASSERT(JSTRACE_OBJECT == 0); JS_STATIC_ASSERT(JSTRACE_STRING == 1); JS_STATIC_ASSERT(JSTRACE_XML == 2); @@ -115,498 +119,365 @@ JS_STATIC_ASSERT(JSTRACE_STRING + 1 == J /* * Check consistency of external string constants from JSFinalizeGCThingKind. */ JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 == JS_EXTERNAL_STRING_LIMIT - 1); /* - * GC memory is allocated in chunks. The size of each chunk is GC_CHUNK_SIZE. - * The chunk contains an array of GC arenas holding GC things, an array of - * the mark bitmaps for each arena, an array of JSGCArenaInfo arena - * descriptors, an array of JSGCMarkingDelay descriptors, the GCChunkInfo - * chunk descriptor and a bitmap indicating free arenas in the chunk. The - * following picture demonstrates the layout: - * - * +--------+--------------+-------+--------+------------+-----------------+ - * | arenas | mark bitmaps | infos | delays | chunk info | free arena bits | - * +--------+--------------+-------+--------+------------+-----------------+ - * - * To ensure fast O(1) lookup of mark bits and arena descriptors each chunk is - * allocated on GC_CHUNK_SIZE boundary. This way a simple mask and shift - * operation gives an arena index into the mark and JSGCArenaInfo arrays. - * - * All chunks that have at least one free arena are put on the doubly-linked - * list with the head stored in JSRuntime.gcChunkList. GCChunkInfo contains - * the head of the chunk's free arena list together with the link fields for - * gcChunkList. - * - * A GC arena contains GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary - * and holds things of the same size and kind. The size of each thing in the - * arena must be divisible by GC_CELL_SIZE, the minimal allocation unit, and - * the size of the mark bitmap is fixed and is independent of the thing's - * size with one bit per each GC_CELL_SIZE bytes. For thing sizes that exceed - * GC_CELL_SIZE this implies that we waste space in the mark bitmap. The - * advantage is that we can find the mark bit for the thing using just - * integer shifts avoiding an expensive integer division. We trade some space - * for speed here. - * - * The number of arenas in the chunk is given by GC_ARENAS_PER_CHUNK. We find - * that number as follows. Suppose chunk contains n arenas. Together with the - * word-aligned free arena bitmap and GCChunkInfo they should fit into the - * chunk. Hence GC_ARENAS_PER_CHUNK or n_max is the maximum value of n for - * which the following holds: - * - * n*s + ceil(n/B) <= M (1) - * - * where "/" denotes normal real division, - * ceil(r) gives the least integer not smaller than the number r, - * s is the number of words in the GC arena, arena's mark bitmap, - * JSGCArenaInfo and JSGCMarkingDelay or GC_ARENA_ALL_WORDS. - * B is number of bits per word or B == JS_BITS_PER_WORD - * M is the number of words in the chunk without GCChunkInfo or - * M == (GC_CHUNK_SIZE - sizeof(JSGCArenaInfo)) / sizeof(jsuword). - * - * We rewrite the inequality as - * - * n*B*s/B + ceil(n/B) <= M, - * ceil(n*B*s/B + n/B) <= M, - * ceil(n*(B*s + 1)/B) <= M (2) - * - * We define a helper function e(n, s, B), - * - * e(n, s, B) := ceil(n*(B*s + 1)/B) - n*(B*s + 1)/B, 0 <= e(n, s, B) < 1. - * - * It gives: - * - * n*(B*s + 1)/B + e(n, s, B) <= M, - * n + e*B/(B*s + 1) <= M*B/(B*s + 1) - * - * We apply the floor function to both sides of the last equation, where - * floor(r) gives the biggest integer not greater than r. As a consequence we - * have: - * - * floor(n + e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)), - * n + floor(e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)), - * n <= floor(M*B/(B*s + 1)), (3) - * - * where floor(e*B/(B*s + 1)) is zero as e*B/(B*s + 1) < B/(B*s + 1) < 1. - * Thus any n that satisfies the original constraint (1) or its equivalent (2), - * must also satisfy (3). That is, we got an upper estimate for the maximum - * value of n. Lets show that this upper estimate, - * - * floor(M*B/(B*s + 1)), (4) - * - * also satisfies (1) and, as such, gives the required maximum value. - * Substituting it into (2) gives: - * - * ceil(floor(M*B/(B*s + 1))*(B*s + 1)/B) == ceil(floor(M/X)*X) - * - * where X == (B*s + 1)/B > 1. But then floor(M/X)*X <= M/X*X == M and - * - * ceil(floor(M/X)*X) <= ceil(M) == M. - * - * Thus the value of (4) gives the maximum n satisfying (1). - * - * For the final result we observe that in (4) - * - * M*B == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword) * - * JS_BITS_PER_WORD - * == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE - * - * since GC_CHUNK_SIZE and sizeof(GCChunkInfo) are at least word-aligned. + * Everything we store in the heap must be a multiple of the cell size. */ - -const jsuword GC_ARENA_SHIFT = 12; -const jsuword GC_ARENA_MASK = JS_BITMASK(GC_ARENA_SHIFT); -const jsuword GC_ARENA_SIZE = JS_BIT(GC_ARENA_SHIFT); - -const jsuword GC_MAX_CHUNK_AGE = 3; - -const size_t GC_CELL_SHIFT = 3; -const size_t GC_CELL_SIZE = size_t(1) << GC_CELL_SHIFT; -const size_t GC_CELL_MASK = GC_CELL_SIZE - 1; - -const size_t BITS_PER_GC_CELL = GC_CELL_SIZE * JS_BITS_PER_BYTE; +JS_STATIC_ASSERT(sizeof(JSString) % sizeof(FreeCell) == 0); +JS_STATIC_ASSERT(sizeof(JSShortString) % sizeof(FreeCell) == 0); +JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(FreeCell) == 0); +JS_STATIC_ASSERT(sizeof(JSFunction) % sizeof(FreeCell) == 0); +#ifdef JSXML +JS_STATIC_ASSERT(sizeof(JSXML) % sizeof(FreeCell) == 0); +#endif -const size_t GC_CELLS_PER_ARENA = size_t(1) << (GC_ARENA_SHIFT - GC_CELL_SHIFT); -const size_t GC_MARK_BITMAP_SIZE = GC_CELLS_PER_ARENA / JS_BITS_PER_BYTE; -const size_t GC_MARK_BITMAP_WORDS = GC_CELLS_PER_ARENA / JS_BITS_PER_WORD; - -JS_STATIC_ASSERT(sizeof(jsbitmap) == sizeof(jsuword)); - -JS_STATIC_ASSERT(sizeof(JSString) % GC_CELL_SIZE == 0); -JS_STATIC_ASSERT(sizeof(JSObject) % GC_CELL_SIZE == 0); -JS_STATIC_ASSERT(sizeof(JSFunction) % GC_CELL_SIZE == 0); -#ifdef JSXML -JS_STATIC_ASSERT(sizeof(JSXML) % GC_CELL_SIZE == 0); -#endif +/* + * All arenas must be exactly 4k. + */ +JS_STATIC_ASSERT(sizeof(Arena<JSString>) == 4096); +JS_STATIC_ASSERT(sizeof(Arena<JSShortString>) == 4096); +JS_STATIC_ASSERT(sizeof(Arena<JSObject>) == 4096); +JS_STATIC_ASSERT(sizeof(Arena<JSFunction>) == 4096); +JS_STATIC_ASSERT(sizeof(Arena<JSXML>) == 4096); #ifdef JS_GCMETER # define METER(x) ((void) (x)) # define METER_IF(condition, x) ((void) ((condition) && (x))) #else # define METER(x) ((void) 0) # define METER_IF(condition, x) ((void) 0) #endif -struct JSGCArenaInfo { - /* - * Allocation list for the arena. - */ - JSGCArenaList *list; - - /* - * Pointer to the previous arena in a linked list. The arena can either - * belong to one of JSContext.gcArenaList lists or, when it does not have - * any allocated GC things, to the list of free arenas in the chunk with - * head stored in GCChunkInfo.lastFreeArena. - */ - JSGCArena *prev; - - JSGCThing *freeList; - - static inline JSGCArenaInfo *fromGCThing(void* thing); -}; - -/* See comments before ThingsPerUnmarkedBit below. */ -struct JSGCMarkingDelay { - JSGCArena *link; - jsuword unmarkedChildren; -}; - -struct JSGCArena { - uint8 data[GC_ARENA_SIZE]; - - void checkAddress() const { - JS_ASSERT(!(reinterpret_cast<jsuword>(this) & GC_ARENA_MASK)); - } - - jsuword toPageStart() const { - checkAddress(); - return reinterpret_cast<jsuword>(this); - } - - static inline JSGCArena *fromGCThing(void* thing); - - static inline JSGCArena *fromChunkAndIndex(jsuword chunk, size_t index); +# define METER_UPDATE_MAX(maxLval, rval) \ + METER_IF((maxLval) < (rval), (maxLval) = (rval)) - jsuword getChunk() { - return toPageStart() & ~GC_CHUNK_MASK; - } - - jsuword getIndex() { - return (toPageStart() & GC_CHUNK_MASK) >> GC_ARENA_SHIFT; - } - - inline JSGCArenaInfo *getInfo(); - - inline JSGCMarkingDelay *getMarkingDelay(); - - inline jsbitmap *getMarkBitmap(); -}; - -namespace js { - -struct GCChunkInfo { - JSRuntime *runtime; - size_t numFreeArenas; - size_t gcChunkAge; - - inline void init(JSRuntime *rt); - - inline jsbitmap *getFreeArenaBitmap(); - - inline jsuword getChunk(); - - inline void clearMarkBitmap(); - - static inline GCChunkInfo *fromChunk(jsuword chunk); -}; - -} /* namespace js */ - -/* Check that all chunk arrays at least word-aligned. */ -JS_STATIC_ASSERT(sizeof(JSGCArena) == GC_ARENA_SIZE); -JS_STATIC_ASSERT(GC_MARK_BITMAP_WORDS % sizeof(jsuword) == 0); -JS_STATIC_ASSERT(sizeof(JSGCArenaInfo) % sizeof(jsuword) == 0); -JS_STATIC_ASSERT(sizeof(JSGCMarkingDelay) % sizeof(jsuword) == 0); - -const size_t GC_ARENA_ALL_WORDS = (GC_ARENA_SIZE + GC_MARK_BITMAP_SIZE + - sizeof(JSGCArenaInfo) + - sizeof(JSGCMarkingDelay)) / sizeof(jsuword); +namespace js{ +namespace gc{ -/* The value according (4) above. */ -const size_t GC_ARENAS_PER_CHUNK = - (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE / - (JS_BITS_PER_WORD * GC_ARENA_ALL_WORDS + 1); - -const size_t GC_FREE_ARENA_BITMAP_WORDS = (GC_ARENAS_PER_CHUNK + - JS_BITS_PER_WORD - 1) / - JS_BITS_PER_WORD; - -const size_t GC_FREE_ARENA_BITMAP_SIZE = GC_FREE_ARENA_BITMAP_WORDS * - sizeof(jsuword); - -/* Check that GC_ARENAS_PER_CHUNK indeed maximises (1). */ -JS_STATIC_ASSERT(GC_ARENAS_PER_CHUNK * GC_ARENA_ALL_WORDS + - GC_FREE_ARENA_BITMAP_WORDS <= - (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword)); - -JS_STATIC_ASSERT((GC_ARENAS_PER_CHUNK + 1) * GC_ARENA_ALL_WORDS + - (GC_ARENAS_PER_CHUNK + 1 + JS_BITS_PER_WORD - 1) / - JS_BITS_PER_WORD > - (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword)); - - -const size_t GC_MARK_BITMAP_ARRAY_OFFSET = GC_ARENAS_PER_CHUNK - << GC_ARENA_SHIFT; - -const size_t GC_ARENA_INFO_ARRAY_OFFSET = - GC_MARK_BITMAP_ARRAY_OFFSET + GC_MARK_BITMAP_SIZE * GC_ARENAS_PER_CHUNK; - -const size_t GC_MARKING_DELAY_ARRAY_OFFSET = - GC_ARENA_INFO_ARRAY_OFFSET + sizeof(JSGCArenaInfo) * GC_ARENAS_PER_CHUNK; - -const size_t GC_CHUNK_INFO_OFFSET = GC_CHUNK_SIZE - GC_FREE_ARENA_BITMAP_SIZE - - sizeof(GCChunkInfo); - -inline jsuword -GCChunkInfo::getChunk() { - jsuword addr = reinterpret_cast<jsuword>(this); - JS_ASSERT((addr & GC_CHUNK_MASK) == GC_CHUNK_INFO_OFFSET); - jsuword chunk = addr & ~GC_CHUNK_MASK; - return chunk; -} - -inline void -GCChunkInfo::clearMarkBitmap() +/* Initialize the arena and setup the free list. */ +template <typename T> +void +Arena<T>::init(JSCompartment *compartment, unsigned thingKind) { - PodZero(reinterpret_cast<jsbitmap *>(getChunk() + GC_MARK_BITMAP_ARRAY_OFFSET), - GC_MARK_BITMAP_WORDS * GC_ARENAS_PER_CHUNK); -} - -/* static */ -inline GCChunkInfo * -GCChunkInfo::fromChunk(jsuword chunk) { - JS_ASSERT(!(chunk & GC_CHUNK_MASK)); - jsuword addr = chunk | GC_CHUNK_INFO_OFFSET; - return reinterpret_cast<GCChunkInfo *>(addr); -} - -inline jsbitmap * -GCChunkInfo::getFreeArenaBitmap() -{ - jsuword addr = reinterpret_cast<jsuword>(this); - return reinterpret_cast<jsbitmap *>(addr + sizeof(GCChunkInfo)); -} - -inline void -GCChunkInfo::init(JSRuntime *rt) -{ - runtime = rt; - numFreeArenas = GC_ARENAS_PER_CHUNK; - gcChunkAge = 0; - - /* - * For simplicity we set all bits to 1 including the high bits in the - * last word that corresponds to nonexistent arenas. This is fine since - * the arena scans the bitmap words from lowest to highest bits and the - * allocation checks numFreeArenas before doing the search. - */ - memset(getFreeArenaBitmap(), 0xFF, GC_FREE_ARENA_BITMAP_SIZE); -} - -inline void -CheckValidGCThingPtr(void *thing) -{ + aheader.compartment = compartment; + aheader.thingKind = thingKind; + aheader.freeList = &t.things[0].cell; + aheader.thingSize = sizeof(T); + aheader.isUsed = true; + JS_ASSERT(sizeof(T) == sizeof(ThingOrCell<T>)); + ThingOrCell<T> *thing = &t.things[0]; + ThingOrCell<T> *last = &t.things[JS_ARRAY_LENGTH(t.things) - 1]; + while (thing < last) { + thing->cell.link = &(thing + 1)->cell; + ++thing; + } + last->cell.link = NULL; #ifdef DEBUG - JS_ASSERT(!JSString::isStatic(thing)); - jsuword addr = reinterpret_cast<jsuword>(thing); - JS_ASSERT(!(addr & GC_CELL_MASK)); - JS_ASSERT((addr & GC_CHUNK_MASK) < GC_MARK_BITMAP_ARRAY_OFFSET); + aheader.hasFreeThings = true; #endif } -/* static */ -inline JSGCArenaInfo * -JSGCArenaInfo::fromGCThing(void* thing) +template <typename T> +bool +Arena<T>::inFreeList(void *thing) const { - CheckValidGCThingPtr(thing); - jsuword addr = reinterpret_cast<jsuword>(thing); - jsuword chunk = addr & ~GC_CHUNK_MASK; - JSGCArenaInfo *array = - reinterpret_cast<JSGCArenaInfo *>(chunk | GC_ARENA_INFO_ARRAY_OFFSET); - size_t arenaIndex = (addr & GC_CHUNK_MASK) >> GC_ARENA_SHIFT; - return array + arenaIndex; + FreeCell *cursor = aheader.freeList; + while (cursor) { + JS_ASSERT(aheader.thingSize == sizeof(T)); + JS_ASSERT(!cursor->isMarked()); + + /* If the cursor moves past the thing, it's not in the freelist. */ + if (thing < cursor) + break; + + /* If we find it on the freelist, it's dead. */ + if (thing == cursor) + return true; + JS_ASSERT_IF(cursor->link, cursor < cursor->link); + cursor = cursor->link; + } + return false; } -/* static */ -inline JSGCArena * -JSGCArena::fromGCThing(void* thing) +template<typename T> +inline T * +Arena<T>::getAlignedThing(T *thing) { - CheckValidGCThingPtr(thing); - jsuword addr = reinterpret_cast<jsuword>(thing); - return reinterpret_cast<JSGCArena *>(addr & ~GC_ARENA_MASK); + jsuword start = reinterpret_cast<jsuword>(&t.things[0]); + jsuword offset = reinterpret_cast<jsuword>(thing) - start; + offset -= offset % aheader.thingSize; + return reinterpret_cast<T *>(start + offset); } -/* static */ -inline JSGCArena * -JSGCArena::fromChunkAndIndex(jsuword chunk, size_t index) { - JS_ASSERT(chunk); - JS_ASSERT(!(chunk & GC_CHUNK_MASK)); - JS_ASSERT(index < GC_ARENAS_PER_CHUNK); - return reinterpret_cast<JSGCArena *>(chunk | (index << GC_ARENA_SHIFT)); -} - -inline JSGCArenaInfo * -JSGCArena::getInfo() +#ifdef DEBUG +template <typename T> +bool +Arena<T>::assureThingIsAligned(T *thing) { - jsuword chunk = getChunk(); - jsuword index = getIndex(); - jsuword offset = GC_ARENA_INFO_ARRAY_OFFSET + index * sizeof(JSGCArenaInfo); - return reinterpret_cast<JSGCArenaInfo *>(chunk | offset); + return (getAlignedThing(thing) == thing); } -inline JSGCMarkingDelay * -JSGCArena::getMarkingDelay() +template +bool +Arena<JSObject>::assureThingIsAligned(JSObject *thing); + +template +bool +Arena<JSFunction>::assureThingIsAligned(JSFunction *thing); + +template +bool +Arena<JSString>::assureThingIsAligned(JSString *thing); + +template +bool +Arena<JSShortString>::assureThingIsAligned(JSShortString *thing); + +#if JS_HAS_XML_SUPPORT +template +bool +Arena<JSXML>::assureThingIsAligned(JSXML *thing); +#endif + +#endif + +template<typename T> +inline ConservativeGCTest +Arena<T>::mark(T *thing, JSTracer *trc) { - jsuword chunk = getChunk(); - jsuword index = getIndex(); - jsuword offset = GC_MARKING_DELAY_ARRAY_OFFSET + - index * sizeof(JSGCMarkingDelay); - return reinterpret_cast<JSGCMarkingDelay *>(chunk | offset); -} + thing = getAlignedThing(thing); + + if (thing > &t.things[ThingsPerArena-1].t || thing < &t.things[0].t) + return CGCT_NOTARENA; -inline jsbitmap * -JSGCArena::getMarkBitmap() -{ - jsuword chunk = getChunk(); - jsuword index = getIndex(); - jsuword offset = GC_MARK_BITMAP_ARRAY_OFFSET + index * GC_MARK_BITMAP_SIZE; - return reinterpret_cast<jsbitmap *>(chunk | offset); + if (!aheader.isUsed || inFreeList(thing)) + return CGCT_NOTLIVE; + + JS_ASSERT(assureThingIsAligned(thing)); + + JS_SET_TRACING_NAME(trc, "machine stack"); + Mark(trc, thing); + + return CGCT_VALID; } -/* - * Helpers for GC-thing operations. - */ +#ifdef DEBUG +bool +checkArenaListsForThing(JSCompartment *comp, void *thing) { + if (comp->objArena.arenasContainThing(thing) || + comp->funArena.arenasContainThing(thing) || +#if JS_HAS_XML_SUPPORT + comp->xmlArena.arenasContainThing(thing) || +#endif + comp->shortStringArena.arenasContainThing(thing) || + comp->stringArena.arenasContainThing(thing)) { + return true; + } + for (unsigned i = 0; i < JS_EXTERNAL_STRING_LIMIT; i++) { + if (comp->externalStringArenas[i].arenasContainThing(thing)) + return true; + } + return false; +} +#endif -inline jsbitmap * -GetGCThingMarkBit(void *thing, size_t &bitIndex) -{ - CheckValidGCThingPtr(thing); - jsuword addr = reinterpret_cast<jsuword>(thing); - jsuword chunk = addr & ~GC_CHUNK_MASK; - bitIndex = (addr & GC_CHUNK_MASK) >> GC_CELL_SHIFT; - return reinterpret_cast<jsbitmap *>(chunk | GC_MARK_BITMAP_ARRAY_OFFSET); +template <typename T> +void +EmptyArenaLists::insert(Arena<T> *arena) { + Arena<FreeCell> *a = reinterpret_cast<Arena<FreeCell> *>(arena); + a->header()->next = cellFreeList; + cellFreeList = a; } -/* - * Live objects are marked black. How many other additional colors are available - * depends on the size of the GCThing. - */ -static const uint32 BLACK = 0; +template<> +void +EmptyArenaLists::insert(Arena<JSObject> *arena) { + arena->header()->next = objectFreeList; + objectFreeList = arena; +} + +template<> +void +EmptyArenaLists::insert(Arena<JSFunction> *arena) { + arena->header()->next = functionFreeList; + functionFreeList = arena; +} -static void -AssertValidColor(void *thing, uint32 color) -{ - JS_ASSERT_IF(color, color < JSGCArenaInfo::fromGCThing(thing)->list->thingSize / GC_CELL_SIZE); +template<> +void +EmptyArenaLists::insert(Arena<JSString> *arena) { + arena->header()->next = stringFreeList; + stringFreeList = arena; +} + +template<> +void +EmptyArenaLists::insert(Arena<JSShortString> *arena) { + arena->header()->next = shortStringFreeList; + shortStringFreeList = arena; +} + +template<typename T> +Arena<T> *EmptyArenaLists::getTypedFreeList() { + return NULL; } -inline bool -IsMarkedGCThing(void *thing, uint32 color = BLACK) -{ - AssertValidColor(thing, color); +template<> +Arena<JSObject> *EmptyArenaLists::getTypedFreeList<JSObject>() { + Arena<JSObject> *arena = objectFreeList; + if (arena) { + objectFreeList = arena->header()->next; + return arena; + } + return NULL; +} - size_t index; - jsbitmap *markBitmap = GetGCThingMarkBit(thing, index); - return !!JS_TEST_BIT(markBitmap, index + color); +template<> +Arena<JSString> *EmptyArenaLists::getTypedFreeList<JSString>() { + Arena<JSString> *arena = stringFreeList; + if (arena) { + stringFreeList = arena->header()->next; + return arena; + } + return NULL; +} + +template<> +Arena<JSShortString> *EmptyArenaLists::getTypedFreeList<JSShortString>() { + Arena<JSShortString> *arena = shortStringFreeList; + if (arena) { + shortStringFreeList = arena->header()->next; + return arena; + } + return NULL; } -/* - * The GC always marks live objects BLACK. If color is not BLACK, we also mark - * the object with that additional color. - */ -inline bool -MarkIfUnmarkedGCThing(void *thing, uint32 color = BLACK) -{ - AssertValidColor(thing, color); - - size_t index; - jsbitmap *markBitmap = GetGCThingMarkBit(thing, index); - if (JS_TEST_BIT(markBitmap, index)) - return false; - JS_SET_BIT(markBitmap, index); - if (color != BLACK) - JS_SET_BIT(markBitmap, index + color); - return true; +template<> +Arena<JSFunction> *EmptyArenaLists::getTypedFreeList<JSFunction>() { + Arena<JSFunction> *arena = functionFreeList; + if (arena) { + functionFreeList = arena->header()->next; + return arena; + } + return NULL; } -size_t -ThingsPerArena(size_t thingSize) +} /* namespace gc */ +} /* namespace js */ + +void +JSCompartment::finishArenaLists() { - JS_ASSERT(!(thingSize & GC_CELL_MASK)); - JS_ASSERT(thingSize <= GC_ARENA_SIZE); - return GC_ARENA_SIZE / thingSize; + objArena.releaseAll(); + funArena.releaseAll(); + shortStringArena.releaseAll(); + stringArena.releaseAll(); +#if JS_HAS_XML_SUPPORT + xmlArena.releaseAll(); +#endif + for (unsigned i = 0; i < 8; i++) + externalStringArenas[i].releaseAll(); +} + +void +Chunk::clearMarkBitmap() +{ + PodZero(&bitmaps[0], ArenasPerChunk); } -/* Can only be called if thing belongs to an arena where a->list is not null. */ -inline size_t -GCThingToArenaIndex(void *thing) +void +Chunk::init(JSRuntime *rt) { - CheckValidGCThingPtr(thing); - jsuword addr = reinterpret_cast<jsuword>(thing); - jsuword offsetInArena = addr & GC_ARENA_MASK; - JSGCArenaInfo *a = JSGCArenaInfo::fromGCThing(thing); - JS_ASSERT(a->list); - JS_ASSERT(offsetInArena % a->list->thingSize == 0); - return offsetInArena / a->list->thingSize; + info.runtime = rt; + info.age = 0; + info.emptyArenaLists.init(); + info.emptyArenaLists.cellFreeList = &arenas[0]; + Arena<FreeCell> *arena = &arenas[0]; + Arena<FreeCell> *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1]; + while (arena < last) { + arena->header()->next = arena + 1; + arena->header()->isUsed = false; + ++arena; + } + last->header()->next = NULL; + last->header()->isUsed = false; + info.numFree = ArenasPerChunk; +} + +bool +Chunk::unused() +{ + return info.numFree == ArenasPerChunk; +} + +bool +Chunk::hasAvailableArenas() +{ + return info.numFree > 0; +} + +bool +Chunk::withinArenasRange(Cell *cell) +{ + uintptr_t addr = uintptr_t(cell); + if (addr >= uintptr_t(&arenas[0]) && addr < uintptr_t(&arenas[ArenasPerChunk])) + return true; + return false; } -/* Can only be applicable to arena where a->list is not null. */ -inline uint8 * -GCArenaIndexToThing(JSGCArena *a, JSGCArenaInfo *ainfo, size_t index) +template <typename T> +Arena<T> * +Chunk::allocateArena(JSCompartment *comp, unsigned thingKind) { - JS_ASSERT(a->getInfo() == ainfo); - - /* - * We use "<=" and not "<" in the assert so index can mean the limit. - * For the same reason we use "+", not "|" when finding the thing address - * as the limit address can start at the next arena. - */ - JS_ASSERT(index <= ThingsPerArena(ainfo->list->thingSize)); - jsuword offsetInArena = index * ainfo->list->thingSize; - return reinterpret_cast<uint8 *>(a->toPageStart() + offsetInArena); + JSRuntime *rt = info.runtime; + JS_ASSERT(hasAvailableArenas()); + Arena<T> *arena = info.emptyArenaLists.getNext<T>(comp, thingKind); + JS_ASSERT(arena); + JS_ASSERT(arena->header()->isUsed); + --info.numFree; + rt->gcBytes += sizeof(Arena<T>); + METER(rt->gcStats.nallarenas++); + return arena; } -/* - * The private JSGCThing struct, which describes a JSRuntime.gcFreeList element. - */ -struct JSGCThing { - JSGCThing *link; -}; - -static inline JSGCThing * -MakeNewArenaFreeList(JSGCArena *a, size_t thingSize) +template <typename T> +void +Chunk::releaseArena(Arena<T> *arena) { - jsuword thingsStart = a->toPageStart(); - jsuword lastThingMinAddr = thingsStart + GC_ARENA_SIZE - thingSize * 2 + 1; - jsuword thingPtr = thingsStart; - do { - jsuword nextPtr = thingPtr + thingSize; - JS_ASSERT((nextPtr & GC_ARENA_MASK) + thingSize <= GC_ARENA_SIZE); - JSGCThing *thing = reinterpret_cast<JSGCThing *>(thingPtr); - thing->link = reinterpret_cast<JSGCThing *>(nextPtr); - thingPtr = nextPtr; - } while (thingPtr < lastThingMinAddr); + JSRuntime *rt = info.runtime; + METER(rt->gcStats.afree++); + JS_ASSERT(rt->gcStats.nallarenas != 0); + METER(rt->gcStats.nallarenas--); + JS_ASSERT(rt->gcBytes >= sizeof(Arena<T>)); - JSGCThing *lastThing = reinterpret_cast<JSGCThing *>(thingPtr); - lastThing->link = NULL; - return reinterpret_cast<JSGCThing *>(thingsStart); + rt->gcBytes -= sizeof(Arena<T>); + info.emptyArenaLists.insert(arena); + arena->header()->isUsed = false; + ++info.numFree; + if (unused()) + info.age = 0; +} + +bool +Chunk::expire() +{ + if (!unused()) + return false; + return info.age++ > MaxAge; +} + +JSRuntime * +Chunk::getRuntime() +{ + return info.runtime; } inline jsuword GetGCChunk(JSRuntime *rt) { void *p = rt->gcChunkAllocator->alloc(); #ifdef MOZ_GCTIMER if (p) @@ -625,303 +496,143 @@ ReleaseGCChunk(JSRuntime *rt, jsuword ch #ifdef MOZ_GCTIMER JS_ATOMIC_INCREMENT(&destroyChunkCount); #endif JS_ASSERT(rt->gcStats.nchunks != 0); METER(rt->gcStats.nchunks--); rt->gcChunkAllocator->free(p); } -static JSGCArena * -NewGCArena(JSContext *cx) +inline Chunk * +AllocateGCChunk(JSRuntime *rt) +{ + Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc(); +#ifdef MOZ_GCTIMER + if (p) + JS_ATOMIC_INCREMENT(&newChunkCount); +#endif + METER_IF(p, rt->gcStats.nchunks++); + return p; +} + +inline void +ReleaseGCChunk(JSRuntime *rt, Chunk *p) +{ + JS_ASSERT(p); +#ifdef MOZ_GCTIMER + JS_ATOMIC_INCREMENT(&destroyChunkCount); +#endif + JS_ASSERT(rt->gcStats.nchunks != 0); + METER(rt->gcStats.nchunks--); + rt->gcChunkAllocator->free(p); +} + +static Chunk * +PickChunk(JSContext *cx) { JSRuntime *rt = cx->runtime; - if (!JS_THREAD_DATA(cx)->waiveGCQuota && + Chunk *chunk; + if (!JS_THREAD_DATA(cx)->waiveGCQuota && (rt->gcBytes >= rt->gcMaxBytes || rt->gcBytes > GC_HEAP_GROWTH_FACTOR * rt->gcNewArenaTriggerBytes)) { /* * FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so * just pretend we are out of memory which will throw us off trace and * we will re-try this code path from the interpreter. */ if (!JS_ON_TRACE(cx)) return NULL; - TriggerGC(rt); + TriggerGC(cx->runtime); } - if (rt->gcFreeArenaChunks.empty()) { -#ifdef DEBUG - for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) - JS_ASSERT(GCChunkInfo::fromChunk(r.front())->numFreeArenas == 0); -#endif - /* - * Make sure that after the GC we can append all allocated chunks to - * gcFreeArenaChunks. - * - * FIXME bug 583729 - use the same for the rt->gcChunkSet. - */ - if (!rt->gcFreeArenaChunks.reserve(rt->gcChunkSet.count() + 1)) - return NULL; - jsuword chunk = GetGCChunk(rt); - if (!chunk) - return NULL; - GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk); - ci->init(rt); - - /* - * FIXME bug 583732 - chunk is newly allocated and cannot present in - * the table so using ordinary lookupForAdd is suboptimal here. - */ - GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk); - JS_ASSERT(!p); - if (!rt->gcChunkSet.add(p, chunk)) { - ReleaseGCChunk(rt, chunk); - return NULL; - } - JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci)); + for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) { + if (r.front()->hasAvailableArenas()) + return r.front(); } - GCChunkInfo *ci = rt->gcFreeArenaChunks.back(); - JS_ASSERT(ci->numFreeArenas); + chunk = AllocateGCChunk(rt); + if (!chunk) + return NULL; - /* Scan the bitmap for the first non-zero bit. */ - jsbitmap *freeArenas = ci->getFreeArenaBitmap(); - size_t arenaIndex = 0; - while (!*freeArenas) { - arenaIndex += JS_BITS_PER_WORD; - freeArenas++; - } - size_t bit = CountTrailingZeros(*freeArenas); - arenaIndex += bit; - JS_ASSERT(arenaIndex < GC_ARENAS_PER_CHUNK); - JS_ASSERT(*freeArenas & (jsuword(1) << bit)); - *freeArenas &= ~(jsuword(1) << bit); - --ci->numFreeArenas; - if (ci->numFreeArenas == 0) { - JS_ASSERT(ci == rt->gcFreeArenaChunks.back()); - rt->gcFreeArenaChunks.popBack(); + /* + * FIXME bug 583732 - chunk is newly allocated and cannot be present in + * the table so using ordinary lookupForAdd is suboptimal here. + */ + GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk); + JS_ASSERT(!p); + if (!rt->gcChunkSet.add(p, chunk)) { + ReleaseGCChunk(rt, chunk); + return NULL; } - rt->gcBytes += GC_ARENA_SIZE; - METER(rt->gcStats.nallarenas++); - METER_UPDATE_MAX(rt->gcStats.maxnallarenas, rt->gcStats.nallarenas); - - return JSGCArena::fromChunkAndIndex(ci->getChunk(), arenaIndex); -} + chunk->init(rt); -/* - * This function does not touch the arena or release its memory so code can - * still refer into it. - */ -static void -ReleaseGCArena(JSRuntime *rt, JSGCArena *a) -{ - METER(rt->gcStats.afree++); - JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE); - rt->gcBytes -= GC_ARENA_SIZE; - JS_ASSERT(rt->gcStats.nallarenas != 0); - METER(rt->gcStats.nallarenas--); - - jsuword chunk = a->getChunk(); - GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk); - JS_ASSERT(ci->numFreeArenas <= GC_ARENAS_PER_CHUNK - 1); - jsbitmap *freeArenas = ci->getFreeArenaBitmap(); - JS_ASSERT(!JS_TEST_BIT(freeArenas, a->getIndex())); - JS_SET_BIT(freeArenas, a->getIndex()); - ci->numFreeArenas++; - if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK) - ci->gcChunkAge = 0; - -#ifdef DEBUG - a->getInfo()->prev = rt->gcEmptyArenaList; - rt->gcEmptyArenaList = a; -#endif + return chunk; } static void -FreeGCChunks(JSRuntime *rt) +ExpireGCChunks(JSRuntime *rt) { -#ifdef DEBUG - while (rt->gcEmptyArenaList) { - JSGCArena *next = rt->gcEmptyArenaList->getInfo()->prev; - memset(rt->gcEmptyArenaList, JS_FREE_PATTERN, GC_ARENA_SIZE); - rt->gcEmptyArenaList = next; - } -#endif + /* Remove unused chunks. */ + AutoLockGC lock(rt); - /* Remove unused chunks and rebuild gcFreeArenaChunks. */ - rt->gcFreeArenaChunks.clear(); - JS_ASSERT(rt->gcFreeArenaChunks.capacity() >= rt->gcChunkSet.count()); for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) { - GCChunkInfo *ci = GCChunkInfo::fromChunk(e.front()); - JS_ASSERT(ci->runtime == rt); - if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK) { - if (ci->gcChunkAge > GC_MAX_CHUNK_AGE) { - e.removeFront(); - ReleaseGCChunk(rt, ci->getChunk()); - continue; - } - ci->gcChunkAge++; + Chunk *chunk = e.front(); + JS_ASSERT(chunk->info.runtime == rt); + if (chunk->expire()) { + e.removeFront(); + ReleaseGCChunk(rt, chunk); + continue; } - - if (ci->numFreeArenas) - JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci)); } } -static inline size_t -GetFinalizableThingSize(unsigned thingKind) -{ - JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); - - static const uint8 map[FINALIZE_LIMIT] = { - sizeof(JSObject), /* FINALIZE_OBJECT */ - sizeof(JSFunction), /* FINALIZE_FUNCTION */ -#if JS_HAS_XML_SUPPORT - sizeof(JSXML), /* FINALIZE_XML */ -#endif - sizeof(JSShortString), /* FINALIZE_SHORT_STRING */ - sizeof(JSString), /* FINALIZE_STRING */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING0 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING1 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING2 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING3 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING4 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING5 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING6 */ - sizeof(JSString), /* FINALIZE_EXTERNAL_STRING7 */ - }; - - JS_ASSERT(thingKind < FINALIZE_LIMIT); - return map[thingKind]; -} - -static inline size_t -GetFinalizableTraceKind(size_t thingKind) -{ - JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); - - static const uint8 map[FINALIZE_LIMIT] = { - JSTRACE_OBJECT, /* FINALIZE_OBJECT */ - JSTRACE_OBJECT, /* FINALIZE_FUNCTION */ -#if JS_HAS_XML_SUPPORT /* FINALIZE_XML */ - JSTRACE_XML, -#endif - JSTRACE_STRING, /* FINALIZE_SHORT_STRING */ - JSTRACE_STRING, /* FINALIZE_STRING */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING1 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING2 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING3 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING4 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING5 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING6 */ - JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */ - }; - - JS_ASSERT(thingKind < FINALIZE_LIMIT); - return map[thingKind]; -} - -static inline size_t -GetFinalizableArenaTraceKind(JSGCArenaInfo *ainfo) -{ - JS_ASSERT(ainfo->list); - return GetFinalizableTraceKind(ainfo->list->thingKind); -} - -static inline size_t -GetArenaTraceKind(JSGCArenaInfo *ainfo) +template <typename T> +static Arena<T> * +AllocateArena(JSContext *cx, unsigned thingKind) { - return GetFinalizableArenaTraceKind(ainfo); -} - -static inline size_t -GetFinalizableThingTraceKind(void *thing) -{ - JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing); - return GetFinalizableArenaTraceKind(ainfo); -} - -static void -InitGCArenaLists(JSRuntime *rt) -{ - for (unsigned i = 0; i != FINALIZE_LIMIT; ++i) { - JSGCArenaList *arenaList = &rt->gcArenaList[i]; - arenaList->head = NULL; - arenaList->cursor = NULL; - arenaList->thingKind = i; - arenaList->thingSize = GetFinalizableThingSize(i); - } -} - -static void -FinishGCArenaLists(JSRuntime *rt) -{ - for (unsigned i = 0; i < FINALIZE_LIMIT; i++) { - rt->gcArenaList[i].head = NULL; - rt->gcArenaList[i].cursor = NULL; + JSRuntime *rt = cx->runtime; + Chunk *chunk; + Arena<T> *arena; + { + AutoLockGC lock(rt); + if (cx->compartment->chunk && cx->compartment->chunk->hasAvailableArenas()) { + chunk = cx->compartment->chunk; + } else { + if (!(chunk = PickChunk(cx))) { + return NULL; + } else { + cx->compartment->chunk = chunk; + } + } + arena = chunk->allocateArena<T>(cx->compartment, thingKind); } - - rt->gcBytes = 0; - - for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) - ReleaseGCChunk(rt, r.front()); - rt->gcChunkSet.clear(); - rt->gcFreeArenaChunks.clear(); -} - -intN -js_GetExternalStringGCType(JSString *str) -{ - JS_STATIC_ASSERT(FINALIZE_STRING + 1 == FINALIZE_EXTERNAL_STRING0); - JS_ASSERT(!JSString::isStatic(str)); - - unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind; - JS_ASSERT(IsFinalizableStringKind(thingKind)); - return intN(thingKind) - intN(FINALIZE_EXTERNAL_STRING0); -} - -JS_FRIEND_API(uint32) -js_GetGCThingTraceKind(void *thing) -{ - if (JSString::isStatic(thing)) - return JSTRACE_STRING; - - JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing); - return GetArenaTraceKind(ainfo); -} - -JSRuntime * -js_GetGCThingRuntime(void *thing) -{ - jsuword chunk = JSGCArena::fromGCThing(thing)->getChunk(); - return GCChunkInfo::fromChunk(chunk)->runtime; + return arena; } JS_FRIEND_API(bool) -js_IsAboutToBeFinalized(void *thing) +IsAboutToBeFinalized(void *thing) { if (JSString::isStatic(thing)) return false; - return !IsMarkedGCThing(thing); + return !reinterpret_cast<Cell *>(thing)->isMarked(); } JS_FRIEND_API(bool) -js_GCThingIsMarked(void *thing, uint32 color) +js_GCThingIsMarked(void *thing, uint32 color = BLACK) { - return IsMarkedGCThing(thing, color); + JS_ASSERT(thing); + AssertValidColor(thing, color); + return reinterpret_cast<Cell *>(thing)->isMarked(color); } JSBool js_InitGC(JSRuntime *rt, uint32 maxbytes) { - InitGCArenaLists(rt); - /* * Make room for at least 16 chunks so the table would not grow before * the browser starts up. */ if (!rt->gcChunkSet.init(16)) return false; if (!rt->gcRootsHash.init(256)) @@ -968,22 +679,23 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes METER(PodZero(&rt->gcStats)); return true; } namespace js { /* - * Returns CGCT_VALID if the w can be a live GC thing and sets thing and traceKind + * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets traceKind * accordingly. Otherwise returns the reason for rejection. */ inline ConservativeGCTest -IsGCThingWord(JSRuntime *rt, jsuword w, void *&thing, uint32 &traceKind) +MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &traceKind) { + JSRuntime *rt = trc->context->runtime; /* * The conservative scanner may access words that valgrind considers as * undefined. To avoid false positives and not to alter valgrind view of * the memory we make as memcheck-defined the argument, a copy of the * original word. See bug 572678. */ #ifdef JS_VALGRIND VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)); @@ -1006,116 +718,101 @@ IsGCThingWord(JSRuntime *rt, jsuword w, */ const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK); #if JS_BITS_PER_WORD == 32 jsuword payload = w & JSID_PAYLOAD_MASK; #elif JS_BITS_PER_WORD == 64 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; #endif - jsuword chunk = payload & ~GC_CHUNK_MASK; + Cell *cell = reinterpret_cast<Cell *>(payload); + Chunk *chunk = cell->chunk(); + if (!rt->gcChunkSet.has(chunk)) return CGCT_NOTCHUNK; - GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk); - - if ((payload & GC_CHUNK_MASK) >= GC_MARK_BITMAP_ARRAY_OFFSET) + if (!chunk->withinArenasRange(cell)) return CGCT_NOTARENA; - size_t arenaIndex = (payload & GC_CHUNK_MASK) >> GC_ARENA_SHIFT; - if (JS_TEST_BIT(ci->getFreeArenaBitmap(), arenaIndex)) + ArenaHeader<Cell> *aheader = cell->arena()->header(); + + if (!aheader->isUsed) return CGCT_FREEARENA; - JSGCArena *a = JSGCArena::fromChunkAndIndex(chunk, arenaIndex); - JSGCArenaInfo *ainfo = a->getInfo(); - - traceKind = GetFinalizableArenaTraceKind(ainfo); - - /* - * On 64-bit we might consider using the tag bits in w to disqualify - * additional false roots, however, the condition would have to look - * something like: - * - * if ((traceKind == JSTRACE_STRING && tag > 0 && tag != JSVAL_TAG_SHIFT) || - * (traceKind == JSTRACE_OBJECT && tag > 0 && tag != JSVAL_TAG_OBJECT)) - * return CGCT_WRONGTAG; - * - * However, it seems like we should measure how often this actually avoids - * false roots. - */ - - jsuword start = a->toPageStart(); - jsuword offset = payload - start; - size_t thingSize = ainfo->list->thingSize; - offset -= offset % thingSize; + ConservativeGCTest test; + traceKind = aheader->thingKind; - /* - * If GC_ARENA_SIZE % thingSize != 0 or when thingSize is not a power - * of two, thingSize-aligned pointer may point at the end of the last - * thing yet be inside the arena. - */ - if (offset + thingSize > GC_ARENA_SIZE) { - JS_ASSERT(thingSize & (thingSize - 1)); - return CGCT_NOTARENA; - } - thing = (JSGCThing *) (start + offset); - - /* Make sure the thing is not on the freelist of the arena. */ - JSGCThing *cursor = ainfo->freeList; - while (cursor) { - JS_ASSERT((((jsuword) cursor) & GC_ARENA_MASK) % thingSize == 0); - JS_ASSERT(!IsMarkedGCThing(cursor)); - - /* If the cursor moves past the thing, it's not in the freelist. */ - if (thing < cursor) + switch (traceKind) { + case FINALIZE_OBJECT: + test = GetArena<JSObject>(cell)->mark((JSObject *)cell, trc); + break; + case FINALIZE_STRING: + case FINALIZE_EXTERNAL_STRING0: + case FINALIZE_EXTERNAL_STRING1: + case FINALIZE_EXTERNAL_STRING2: + case FINALIZE_EXTERNAL_STRING3: + case FINALIZE_EXTERNAL_STRING4: + case FINALIZE_EXTERNAL_STRING5: + case FINALIZE_EXTERNAL_STRING6: + case FINALIZE_EXTERNAL_STRING7: + test = GetArena<JSString>(cell)->mark((JSString *)cell, trc); break; - - /* If we find it on the freelist, it's dead. */ - if (thing == cursor) - return CGCT_NOTLIVE; - JS_ASSERT_IF(cursor->link, cursor < cursor->link); - cursor = cursor->link; + case FINALIZE_SHORT_STRING: + test = GetArena<JSShortString>(cell)->mark((JSShortString *)cell, trc); + break; + case FINALIZE_FUNCTION: + test = GetArena<JSFunction>(cell)->mark((JSFunction *)cell, trc); + break; +#if JS_HAS_XML_SUPPORT + case FINALIZE_XML: + test = GetArena<JSXML>(cell)->mark((JSXML *)cell, trc); + break; +#endif + default: + test = CGCT_WRONGTAG; + JS_NOT_REACHED("wrong tag"); } - return CGCT_VALID; + return test; } inline ConservativeGCTest -IsGCThingWord(JSRuntime *rt, jsuword w) +MarkIfGCThingWord(JSTracer *trc, jsuword w) { - void *thing; uint32 traceKind; - return IsGCThingWord(rt, w, thing, traceKind); + return MarkIfGCThingWord(trc, w, traceKind); } static void MarkWordConservatively(JSTracer *trc, jsuword w) { /* * The conservative scanner may access words that valgrind considers as * undefined. To avoid false positives and not to alter valgrind view of * the memory we make as memcheck-defined the argument, a copy of the * original word. See bug 572678. */ #ifdef JS_VALGRIND VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)); #endif - void *thing; uint32 traceKind; - ConservativeGCTest test = IsGCThingWord(trc->context->runtime, w, thing, traceKind); +#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER + ConservativeGCTest test = +#endif + MarkIfGCThingWord(trc, w, traceKind); + +#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS if (test == CGCT_VALID) { - Mark(trc, thing, traceKind, "machine stack"); -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) { - GCMarker::ConservativeRoot root = {thing, traceKind}; + GCMarker::ConservativeRoot root = {(void *)w, traceKind}; static_cast<GCMarker *>(trc)->conservativeRoots.append(root); } + } #endif - } #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER if (IS_GC_MARKING_TRACER(trc)) static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++; #endif } static void @@ -1230,20 +927,32 @@ js_FinishGC(JSRuntime *rt) #ifdef JS_ARENAMETER JS_DumpArenaStats(stdout); #endif #ifdef JS_GCMETER if (JS_WANT_GC_METER_PRINT) js_DumpGCStats(rt, stdout); #endif + /* Delete all remaining Compartments. Ideally only the defaultCompartment should be left. */ + for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) { + JSCompartment *comp = *c; + comp->finishArenaLists(); + delete comp; + } + rt->compartments.clear(); + rt->defaultCompartment = NULL; + + for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) + ReleaseGCChunk(rt, r.front()); + rt->gcChunkSet.clear(); + #ifdef JS_THREADSAFE rt->gcHelperThread.finish(rt); #endif - FinishGCArenaLists(rt); #ifdef DEBUG if (!rt->gcRootsHash.empty()) CheckLeakedRoots(rt); #endif rt->gcRootsHash.clear(); rt->gcLocksHash.clear(); } @@ -1400,39 +1109,24 @@ JSRuntime::setGCLastBytes(size_t lastByt gcLastBytes = lastBytes; uint64 triggerBytes = uint64(lastBytes) * uint64(gcTriggerFactor / 100); if (triggerBytes != size_t(triggerBytes)) triggerBytes = size_t(-1); gcTriggerBytes = size_t(triggerBytes); } void -JSGCFreeLists::purge() +FreeLists::purge() { /* * Return the free list back to the arena so the GC finalization will not * run the finalizers over unitialized bytes from free things. */ - for (JSGCThing **p = finalizables; p != JS_ARRAY_END(finalizables); ++p) { - JSGCThing *freeListHead = *p; - if (freeListHead) { - JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(freeListHead); - JS_ASSERT(!ainfo->freeList); - ainfo->freeList = freeListHead; - *p = NULL; - } - } -} - -void -JSGCFreeLists::moveTo(JSGCFreeLists *another) -{ - *another = *this; - PodArrayZero(finalizables); - JS_ASSERT(isEmpty()); + for (FreeCell ***p = finalizables; p != JS_ARRAY_END(finalizables); ++p) + *p = NULL; } static inline bool IsGCThresholdReached(JSRuntime *rt) { #ifdef JS_GC_ZEAL if (rt->gcZeal >= 1) return true; @@ -1441,156 +1135,165 @@ IsGCThresholdReached(JSRuntime *rt) /* * Since the initial value of the gcLastBytes parameter is not equal to * zero (see the js_InitGC function) the return value is false when * the gcBytes value is close to zero at the JS engine start. */ return rt->isGCMallocLimitReached() || rt->gcBytes >= rt->gcTriggerBytes; } -static void -LastDitchGC(JSContext *cx) -{ - JS_ASSERT(!JS_ON_TRACE(cx)); +struct JSShortString; - /* The last ditch GC preserves weak roots and all atoms. */ - AutoKeepAtoms keep(cx->runtime); +template <typename T> +ArenaList<T> * +GetFinalizableArenaList(JSCompartment *c, unsigned thingKind); - /* - * Keep rt->gcLock across the call into the GC so we don't starve and - * lose to racing threads who deplete the heap just after the GC has - * replenished it (or has synchronized with a racing GC that collected a - * bunch of garbage). This unfair scheduling can happen on certain - * operating systems. For the gory details, see bug 162779. - */ - js_GC(cx, GC_LOCK_HELD); +template <> +ArenaList<JSObject> * +GetFinalizableArenaList<JSObject>(JSCompartment *c, unsigned thingKind) { + JS_ASSERT(thingKind == FINALIZE_OBJECT); + return &c->objArena; +} + +template <> +ArenaList<JSString> * +GetFinalizableArenaList<JSString>(JSCompartment *c, unsigned thingKind) { + JS_ASSERT(thingKind >= FINALIZE_STRING && thingKind <= FINALIZE_EXTERNAL_STRING_LAST); + + if (JS_LIKELY(thingKind == FINALIZE_STRING)) + return &c->stringArena; + return &c->externalStringArenas[thingKind - FINALIZE_EXTERNAL_STRING0]; } -static JSGCThing * -RefillFinalizableFreeList(JSContext *cx, unsigned thingKind) -{ - JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind]); - JSRuntime *rt = cx->runtime; - JSGCArenaList *arenaList; - JSGCArena *a; - - { - AutoLockGC lock(rt); - JS_ASSERT(!rt->gcRunning); - if (rt->gcRunning) - return NULL; - - bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota; - bool doGC = canGC && IsGCThresholdReached(rt); - arenaList = &rt->gcArenaList[thingKind]; - for (;;) { - if (doGC) { - LastDitchGC(cx); - METER(cx->runtime->gcArenaStats[thingKind].retry++); - canGC = false; - - /* - * The JSGC_END callback can legitimately allocate new GC - * things and populate the free list. If that happens, just - * return that list head. - */ - JSGCThing *freeList = JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind]; - if (freeList) - return freeList; - } +template <> +ArenaList<JSShortString> * +GetFinalizableArenaList<JSShortString>(JSCompartment *c, unsigned thingKind) { + JS_ASSERT(thingKind == FINALIZE_SHORT_STRING); + return &c->shortStringArena; +} - while ((a = arenaList->cursor) != NULL) { - JSGCArenaInfo *ainfo = a->getInfo(); - arenaList->cursor = ainfo->prev; - JSGCThing *freeList = ainfo->freeList; - if (freeList) { - ainfo->freeList = NULL; - return freeList; - } - } - - a = NewGCArena(cx); - if (a) - break; - if (!canGC) { - METER(cx->runtime->gcArenaStats[thingKind].fail++); - return NULL; - } - doGC = true; - } - - /* - * Do only minimal initialization of the arena inside the GC lock. We - * can do the rest outside the lock because no other threads will see - * the arena until the GC is run. - */ - JSGCArenaInfo *ainfo = a->getInfo(); - ainfo->list = arenaList; - ainfo->prev = arenaList->head; - ainfo->freeList = NULL; - arenaList->head = a; - } - - JSGCMarkingDelay *markingDelay = a->getMarkingDelay(); - markingDelay->link = NULL; - markingDelay->unmarkedChildren = 0; - - return MakeNewArenaFreeList(a, arenaList->thingSize); +template <> +ArenaList<JSFunction> * +GetFinalizableArenaList<JSFunction>(JSCompartment *c, unsigned thingKind) { + JS_ASSERT(thingKind == FINALIZE_FUNCTION); + return &c->funArena; } -static inline void -CheckGCFreeListLink(JSGCThing *thing) +#if JS_HAS_XML_SUPPORT +template <> +ArenaList<JSXML> * +GetFinalizableArenaList<JSXML>(JSCompartment *c, unsigned thingKind) { + JS_ASSERT(thingKind == FINALIZE_XML); + return &c->xmlArena; +} +#endif + +#ifdef DEBUG +bool +CheckAllocation(JSContext *cx) { - /* - * The GC things on the free lists come from one arena and the things on - * the free list are linked in ascending address order. - */ - JS_ASSERT_IF(thing->link, - JSGCArena::fromGCThing(thing) == - JSGCArena::fromGCThing(thing->link)); - JS_ASSERT_IF(thing->link, thing < thing->link); -} - -void * -js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind) -{ - JS_ASSERT(thingKind < FINALIZE_LIMIT); #ifdef JS_THREADSAFE JS_ASSERT(cx->thread); #endif - - /* Updates of metering counters here may not be thread-safe. */ - METER(cx->runtime->gcArenaStats[thingKind].alloc++); + JS_ASSERT(!cx->runtime->gcRunning); + return true; +} +#endif - JSGCThing **freeListp = - JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind; - JSGCThing *thing = *freeListp; - if (thing) { - *freeListp = thing->link; - CheckGCFreeListLink(thing); - METER(cx->runtime->gcArenaStats[thingKind].localalloc++); - return thing; - } +template <typename T> +bool +RefillFinalizableFreeList(JSContext *cx, unsigned thingKind) +{ + JSCompartment *compartment = cx->compartment; + JS_ASSERT_IF(compartment->freeLists.finalizables[thingKind], + !*compartment->freeLists.finalizables[thingKind]); + JSRuntime *rt = cx->runtime; + + ArenaList<T> *arenaList; + Arena<T> *a; + + JS_ASSERT(!rt->gcRunning); + if (rt->gcRunning) + return false; + + bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota; + bool doGC = canGC && IsGCThresholdReached(rt); - thing = RefillFinalizableFreeList(cx, thingKind); - if (!thing) { - js_ReportOutOfMemory(cx); - return NULL; - } + arenaList = GetFinalizableArenaList<T>(cx->compartment, thingKind); + do { + if (doGC) { + JS_ASSERT(!JS_ON_TRACE(cx)); +#ifdef JS_THREADSAFE + Conditionally<AutoUnlockDefaultCompartment> unlockDefaultCompartmentIf(cx->compartment == cx->runtime->defaultCompartment && + cx->runtime->defaultCompartmentIsLocked, cx); +#endif + /* The last ditch GC preserves all atoms. */ + AutoKeepAtoms keep(cx->runtime); + js_GC(cx, GC_NORMAL); + METER(cx->runtime->gcStats.retry++); + canGC = false; + /* + * The JSGC_END callback can legitimately allocate new GC + * things and populate the free list. If that happens, just + * return that list head. + */ + if (compartment->freeLists.finalizables[thingKind]) + return true; + } + if ((a = arenaList->getNextWithFreeList())) { + JS_ASSERT(a->header()->freeList); + compartment->freeLists.populate(a, thingKind); + return true; + } + a = AllocateArena<T>(cx, thingKind); + if (a) { + compartment->freeLists.populate(a, thingKind); + arenaList->insert(a); + a->getMarkingDelay()->init(); + return true; + } + if (!canGC) { + METER(cx->runtime->gcStats.fail++); + js_ReportOutOfMemory(cx); + return false; + } + doGC = true; + } while (true); +} - /* - * See comments in RefillFinalizableFreeList about a possibility - * of *freeListp == thing. - */ - JS_ASSERT(!*freeListp || *freeListp == thing); - *freeListp = thing->link; +template +bool +RefillFinalizableFreeList<JSObject>(JSContext *cx, unsigned thingKind); + +template +bool +RefillFinalizableFreeList<JSFunction>(JSContext *cx, unsigned thingKind); + +template +bool +RefillFinalizableFreeList<JSString>(JSContext *cx, unsigned thingKind); - CheckGCFreeListLink(thing); +template +bool +RefillFinalizableFreeList<JSShortString>(JSContext *cx, unsigned thingKind); - return thing; +#if JS_HAS_XML_SUPPORT +template +bool +RefillFinalizableFreeList<JSXML>(JSContext *cx, unsigned thingKind); +#endif + +intN +js_GetExternalStringGCType(JSString *str) { + return GetExternalStringGCType(str); +} + +uint32 +js_GetGCThingTraceKind(void *thing) { + return GetGCThingTraceKind(thing); } JSBool js_LockGCThingRT(JSRuntime *rt, void *thing) { GCLocks *locks; if (!thing) @@ -1629,100 +1332,51 @@ js_UnlockGCThingRT(JSRuntime *rt, void * } } JS_PUBLIC_API(void) JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind) { switch (kind) { case JSTRACE_OBJECT: { - /* If obj has no map, it must be a newborn. */ - JSObject *obj = (JSObject *) thing; - if (!obj->map) - break; - - /* Trace universal (ops-independent) members. */ - if (JSObject *proto = obj->getProto()) - JS_CALL_OBJECT_TRACER(trc, proto, "proto"); - if (JSObject *parent = obj->getParent()) - JS_CALL_OBJECT_TRACER(trc, parent, "parent"); - if (obj->emptyShape) - obj->emptyShape->trace(trc); - - /* Delegate to ops or the native marking op. */ - JSTraceOp op = obj->getOps()->trace; - (op ? op : js_TraceObject)(trc, obj); + MarkChildren(trc, (JSObject *)thing); break; } case JSTRACE_STRING: { - JSString *str = (JSString *) thing; - if (str->isDependent()) - JS_CALL_STRING_TRACER(trc, str->dependentBase(), "base"); - else if (str->isRope()) { - if (str->isInteriorNode()) - JS_CALL_STRING_TRACER(trc, str->interiorNodeParent(), "parent"); - JS_CALL_STRING_TRACER(trc, str->ropeLeft(), "left child"); - JS_CALL_STRING_TRACER(trc, str->ropeRight(), "right child"); - } + MarkChildren(trc, (JSString *)thing); break; } #if JS_HAS_XML_SUPPORT case JSTRACE_XML: - js_TraceXML(trc, (JSXML *)thing); + MarkChildren(trc, (JSXML *)thing); break; #endif } } namespace js { /* * When the native stack is low, the GC does not call JS_TraceChildren to mark * the reachable "children" of the thing. Rather the thing is put aside and * JS_TraceChildren is called later with more space on the C stack. * * To implement such delayed marking of the children with minimal overhead for - * the normal case of sufficient native stack, the code uses two fields per - * arena stored in JSGCMarkingDelay. The first field, JSGCMarkingDelay::link, - * links all arenas with delayed things into a stack list with the pointer to - * stack top in JSRuntime::gcUnmarkedArenaStackTop. delayMarkingChildren adds + * the normal case of sufficient native stack, the code adds a field per + * arena. The field marlingdelay->link links all arenas with delayed things + * into a stack list with the pointer to stack top in + * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds * arenas to the stack as necessary while markDelayedChildren pops the arenas * from the stack until it empties. - * - * The second field, JSGCMarkingDelay::unmarkedChildren, is a bitmap that - * tells for which things the GC should call JS_TraceChildren later. The - * bitmap is a single word. As such it does not pinpoint the delayed things - * in the arena but rather tells the intervals containing - * ThingsPerUnmarkedBit(thingSize) things. Later the code in - * markDelayedChildren discovers such intervals and calls JS_TraceChildren on - * any marked thing in the interval. This implies that JS_TraceChildren can be - * called many times for a single thing if the thing shares the same interval - * with some delayed things. This should be fine as any GC graph - * marking/traversing hooks must allow repeated calls during the same GC cycle. - * In particular, xpcom cycle collector relies on this. - * - * Note that such repeated scanning may slow down the GC. In particular, it is - * possible to construct an object graph where the GC calls JS_TraceChildren - * ThingsPerUnmarkedBit(thingSize) for almost all things in the graph. We - * tolerate this as the max value for ThingsPerUnmarkedBit(thingSize) is 4. - * This is archived for JSObject on 32 bit system as it is exactly JSObject - * that has the smallest size among the GC things that can be delayed. On 32 - * bit CPU we have less than 128 objects per 4K GC arena so each bit in - * unmarkedChildren covers 4 objects. */ -inline unsigned -ThingsPerUnmarkedBit(unsigned thingSize) -{ - return JS_HOWMANY(ThingsPerArena(thingSize), JS_BITS_PER_WORD); -} GCMarker::GCMarker(JSContext *cx) - : color(0), unmarkedArenaStackTop(NULL) + : color(0), stackLimit(0), unmarkedArenaStackTop(NULL) { JS_TRACER_INIT(this, cx, NULL); #ifdef DEBUG markLaterCount = 0; #endif #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS"); memset(&conservativeStats, 0, sizeof(conservativeStats)); @@ -1738,279 +1392,147 @@ GCMarker::~GCMarker() /* Update total stats. */ context->runtime->gcStats.conservative.add(conservativeStats); #endif } void GCMarker::delayMarkingChildren(void *thing) { - JS_ASSERT(this == context->runtime->gcMarkingTracer); - JS_ASSERT(IsMarkedGCThing(thing)); - METER(context->runtime->gcStats.unmarked++); - - JSGCArena *a = JSGCArena::fromGCThing(thing); - JSGCArenaInfo *ainfo = a->getInfo(); - JSGCMarkingDelay *markingDelay = a->getMarkingDelay(); - - size_t thingArenaIndex = GCThingToArenaIndex(thing); - size_t unmarkedBitIndex = thingArenaIndex / - ThingsPerUnmarkedBit(ainfo->list->thingSize); - JS_ASSERT(unmarkedBitIndex < JS_BITS_PER_WORD); + Cell *cell = reinterpret_cast<Cell *>(thing); + Arena<Cell> *a = cell->arena(); + JS_ASSERT(cell->isMarked()); + METER(cell->compartment()->rt->gcStats.unmarked++); + MarkingDelay *markingDelay = a->getMarkingDelay(); - jsuword bit = jsuword(1) << unmarkedBitIndex; - if (markingDelay->unmarkedChildren != 0) { - JS_ASSERT(unmarkedArenaStackTop); - if (markingDelay->unmarkedChildren & bit) { - /* bit already covers things with children to mark later. */ - return; - } - markingDelay->unmarkedChildren |= bit; - } else { - /* - * The thing is the first thing with not yet marked children in the - * whole arena, so push the arena on the stack of arenas with things - * to be marked later unless the arena has already been pushed. We - * detect that through checking prevUnmarked as the field is 0 - * only for not yet pushed arenas. To ensure that - * prevUnmarked != 0 - * even when the stack contains one element, we make prevUnmarked - * for the arena at the bottom to point to itself. - * - * See comments in markDelayedChildren. - */ - markingDelay->unmarkedChildren = bit; - if (!markingDelay->link) { - if (!unmarkedArenaStackTop) { - /* Stack was empty, mark the arena as the bottom element. */ - markingDelay->link = a; - } else { - JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link); - markingDelay->link = unmarkedArenaStackTop; - } - unmarkedArenaStackTop = a; - } - JS_ASSERT(unmarkedArenaStackTop); + if (markingDelay->link) { + if (markingDelay->start > (jsuword)cell) + markingDelay->start = (jsuword)cell; + /* Arena already scheduled to be marked again */ + return; } + markingDelay->start = (jsuword)cell; + Arena<Cell> *tos = unmarkedArenaStackTop; + markingDelay->link = tos ? tos : a; + unmarkedArenaStackTop = a; #ifdef DEBUG - markLaterCount += ThingsPerUnmarkedBit(ainfo->list->thingSize); - METER_UPDATE_MAX(context->runtime->gcStats.maxunmarked, markLaterCount); + JSCompartment *comp = cell->compartment(); + markLaterCount += Arena<FreeCell>::ThingsPerArena; + METER_UPDATE_MAX(comp->rt->gcStats.maxunmarked, markLaterCount); #endif } -JS_FRIEND_API(void) +template<typename T> +void +Arena<T>::markDelayedChildren(JSTracer *trc) +{ + T* thing = (T *)getMarkingDelay()->start; + T *thingsEnd = &t.things[ThingsPerArena-1].t; + JS_ASSERT(thing == getAlignedThing(thing)); + while (thing <= thingsEnd) { + if (thing->asCell()->isMarked()) + MarkChildren(trc, thing); + + thing++; + } +} + +void GCMarker::markDelayedChildren() { - JS_ASSERT(this == context->runtime->gcMarkingTracer); - - JSGCArena *a = unmarkedArenaStackTop; - if (!a) { - JS_ASSERT(markLaterCount == 0); - return; - } - - for (;;) { + while (Arena<Cell> *a = unmarkedArenaStackTop) { /* * The following assert verifies that the current arena belongs to the - * unmarked stack, since delayMarkingChildren ensures that even for + * unmarked stack, since DelayMarkingChildren ensures that even for * the stack's bottom, prevUnmarked != 0 but rather points to * itself. */ - JSGCArenaInfo *ainfo = a->getInfo(); - JSGCMarkingDelay *markingDelay = a->getMarkingDelay(); - JS_ASSERT(markingDelay->link); - JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link); - unsigned thingSize = ainfo->list->thingSize; - unsigned traceKind = GetFinalizableArenaTraceKind(ainfo); - unsigned indexLimit = ThingsPerArena(thingSize); - unsigned thingsPerUnmarkedBit = ThingsPerUnmarkedBit(thingSize); - - /* - * We cannot use do-while loop here as a->unmarkedChildren can be zero - * before the loop as a leftover from the previous iterations. See - * comments after the loop. - */ - while (markingDelay->unmarkedChildren != 0) { - unsigned unmarkedBitIndex = JS_FLOOR_LOG2W(markingDelay->unmarkedChildren); - markingDelay->unmarkedChildren &= ~(jsuword(1) << unmarkedBitIndex); -#ifdef DEBUG - JS_ASSERT(markLaterCount >= thingsPerUnmarkedBit); - markLaterCount -= thingsPerUnmarkedBit; + MarkingDelay *markingDelay = a->getMarkingDelay(); + switch (a->header()->thingKind) { + case FINALIZE_OBJECT: + reinterpret_cast<Arena<JSObject> *>(a)->markDelayedChildren(this); + break; + case FINALIZE_STRING: + case FINALIZE_EXTERNAL_STRING0: + case FINALIZE_EXTERNAL_STRING1: + case FINALIZE_EXTERNAL_STRING2: + case FINALIZE_EXTERNAL_STRING3: + case FINALIZE_EXTERNAL_STRING4: + case FINALIZE_EXTERNAL_STRING5: + case FINALIZE_EXTERNAL_STRING6: + case FINALIZE_EXTERNAL_STRING7: + reinterpret_cast<Arena<JSString> *>(a)->markDelayedChildren(this); + break; + case FINALIZE_SHORT_STRING: + JS_ASSERT(false); + break; + case FINALIZE_FUNCTION: + reinterpret_cast<Arena<JSFunction> *>(a)->markDelayedChildren(this); + break; +#if JS_HAS_XML_SUPPORT + case FINALIZE_XML: + reinterpret_cast<Arena<JSXML> *>(a)->markDelayedChildren(this); + break; #endif - unsigned thingIndex = unmarkedBitIndex * thingsPerUnmarkedBit; - unsigned endIndex = thingIndex + thingsPerUnmarkedBit; - - /* - * endIndex can go beyond the last allocated thing as the real - * limit can be "inside" the bit. - */ - if (endIndex > indexLimit) - endIndex = indexLimit; - uint8 *thing = GCArenaIndexToThing(a, ainfo, thingIndex); - uint8 *end = GCArenaIndexToThing(a, ainfo, endIndex); - do { - JS_ASSERT(thing < end); - if (IsMarkedGCThing(thing)) - JS_TraceChildren(this, thing, traceKind); - thing += thingSize; - } while (thing != end); + default: + JS_ASSERT(false); } /* - * We finished tracing of all things in the the arena but we can only - * pop it from the stack if the arena is the stack's top. - * - * When JS_TraceChildren from the above calls JS_CallTracer that in - * turn on low C stack calls delayMarkingChildren and the latter - * pushes new arenas to the unmarked stack, we have to skip popping - * of this arena until it becomes the top of the stack again. + * Pop the arena off the stack. If we try to mark a thing on the same + * arena and that marking gets delayed, the arena will be put back + * into the worklist. */ - if (a == unmarkedArenaStackTop) { - JSGCArena *aprev = markingDelay->link; + if (unmarkedArenaStackTop == a) { + unmarkedArenaStackTop = (markingDelay->link != a) + ? markingDelay->link + : NULL; markingDelay->link = NULL; - if (a == aprev) { - /* - * prevUnmarked points to itself and we reached the bottom of - * the stack. - */ - break; - } - unmarkedArenaStackTop = a = aprev; - } else { - a = unmarkedArenaStackTop; +#ifdef DEBUG + markLaterCount -= Arena<FreeCell>::ThingsPerArena; +#endif } } - JS_ASSERT(unmarkedArenaStackTop); - JS_ASSERT(!unmarkedArenaStackTop->getMarkingDelay()->link); - unmarkedArenaStackTop = NULL; JS_ASSERT(markLaterCount == 0); + JS_ASSERT(!unmarkedArenaStackTop); } void GCMarker::slowifyArrays() { while (!arraysToSlowify.empty()) { JSObject *obj = arraysToSlowify.back(); arraysToSlowify.popBack(); - if (IsMarkedGCThing(obj)) + if (obj->isMarked()) obj->makeDenseArraySlow(context); } } - -void -Mark(JSTracer *trc, void *thing, uint32 kind) -{ - JS_ASSERT(thing); - JS_ASSERT(JS_IS_VALID_TRACE_KIND(kind)); - JS_ASSERT(trc->debugPrinter || trc->debugPrintArg); - JS_ASSERT_IF(!JSString::isStatic(thing), kind == GetFinalizableThingTraceKind(thing)); -#ifdef DEBUG - if (IS_GC_MARKING_TRACER(trc)) { - JSRuntime *rt = trc->context->runtime; - JS_ASSERT(rt->gcMarkingTracer == trc); - JS_ASSERT(rt->gcRunning); - } -#endif - - if (!IS_GC_MARKING_TRACER(trc)) { - trc->callback(trc, thing, kind); - } else { - GCMarker *gcmarker = static_cast<GCMarker *>(trc); - - if (kind == JSTRACE_STRING) { - /* - * Optimize for string as their marking is not recursive. - * - * Iterate through all nodes and leaves in the rope if this is - * part of a rope; otherwise, we only iterate once: on the string - * itself. - */ - JSRopeNodeIterator iter((JSString *) thing); - JSString *str = iter.init(); - do { - for (;;) { - if (JSString::isStatic(str)) - break; - JS_ASSERT(kind == GetFinalizableThingTraceKind(str)); - if (!MarkIfUnmarkedGCThing(str)) - break; - if (!str->isDependent()) - break; - str = str->dependentBase(); - } - str = iter.next(); - } while (str); - - } else if (MarkIfUnmarkedGCThing(thing, gcmarker->getMarkColor())) { - /* - * With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC - * always uses the non-recursive code that otherwise would be - * called only on a low C stack condition. - */ -#ifdef JS_GC_ASSUME_LOW_C_STACK -# define RECURSION_TOO_DEEP() true -#else - int stackDummy; -# define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(trc->context, stackDummy)) -#endif - if (RECURSION_TOO_DEEP()) - gcmarker->delayMarkingChildren(thing); - else - JS_TraceChildren(trc, thing, kind); - } - } - -#ifdef DEBUG - trc->debugPrinter = NULL; - trc->debugPrintArg = NULL; -#endif -} - -void -MarkGCThing(JSTracer *trc, void *thing) -{ - JS_ASSERT(size_t(thing) % JS_GCTHING_ALIGN == 0); - - if (!thing) - return; - - uint32 kind = js_GetGCThingTraceKind(thing); - Mark(trc, thing, kind); -} - } /* namespace js */ static void gc_root_traversal(JSTracer *trc, const RootEntry &entry) { #ifdef DEBUG void *ptr; if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) { ptr = *reinterpret_cast<void **>(entry.key); } else { Value *vp = reinterpret_cast<Value *>(entry.key); - ptr = vp->isGCThing() ? vp->asGCThing() : NULL; + ptr = vp->isGCThing() ? vp->toGCThing() : NULL; } if (ptr) { if (!JSString::isStatic(ptr)) { bool root_points_to_gcArenaList = false; - jsuword thing = (jsuword) ptr; - JSRuntime *rt = trc->context->runtime; - for (unsigned i = 0; i != FINALIZE_LIMIT; i++) { - JSGCArenaList *arenaList = &rt->gcArenaList[i]; - size_t thingSize = arenaList->thingSize; - size_t limit = ThingsPerArena(thingSize) * thingSize; - for (JSGCArena *a = arenaList->head; - a; - a = a->getInfo()->prev) { - if (thing - a->toPageStart() < limit) { - root_points_to_gcArenaList = true; - break; - } + JSCompartment **c = trc->context->runtime->compartments.begin(); + for (; c != trc->context->runtime->compartments.end(); ++c) { + JSCompartment *comp = *c; + if (checkArenaListsForThing(comp, ptr)) { + root_points_to_gcArenaList = true; + break; } } if (!root_points_to_gcArenaList && entry.value.name) { fprintf(stderr, "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n" "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n" "The root's name is \"%s\".\n", entry.value.name); @@ -2024,21 +1546,18 @@ gc_root_traversal(JSTracer *trc, const R MarkGCThing(trc, *reinterpret_cast<void **>(entry.key)); else MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key)); } static void gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc) { - uint32 traceKind; - JS_ASSERT(entry.value >= 1); - traceKind = js_GetGCThingTraceKind(entry.key); - JS_CALL_TRACER(trc, entry.key, traceKind, "locked object"); + MarkGCThing(trc, entry.key, "locked object"); } void js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp) { MarkObject(trc, fp->scopeChain(), "scope chain"); if (fp->isDummyFrame()) return; @@ -2049,16 +1568,29 @@ js_TraceStackFrame(JSTracer *trc, JSStac MarkObject(trc, fp->argsObj(), "arguments"); if (fp->isScriptFrame()) js_TraceScript(trc, fp->script()); MarkValue(trc, fp->thisValue(), "this"); MarkValue(trc, fp->returnValue(), "rval"); } +void +AutoIdArray::trace(JSTracer *trc) +{ + JS_ASSERT(tag == IDARRAY); + gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); +} + +void +AutoEnumStateRooter::trace(JSTracer *trc) +{ + js::gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj"); +} + inline void AutoGCRooter::trace(JSTracer *trc) { switch (tag) { case JSVAL: MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val"); return; @@ -2158,17 +1690,17 @@ namespace js { void MarkContext(JSTracer *trc, JSContext *acx) { /* Stack frames and slots are traced by StackSpace::mark. */ /* Mark other roots-by-definition in acx. */ if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL)) - JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object"); + MarkObject(trc, *acx->globalObject, "global object"); if (acx->throwing) { MarkValue(trc, acx->exception, "exception"); } else { /* Avoid keeping GC-ed junk stored in JSContext.exception. */ acx->exception.setNull(); } for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) @@ -2216,17 +1748,17 @@ MarkRuntime(JSTracer *trc) void *thing; switch (gcr->tag) { default: continue; case AutoGCRooter::JSVAL: { const Value &v = static_cast<AutoValueRooter *>(gcr)->val; if (!v.isMarkable()) continue; - thing = v.asGCThing(); + thing = v.toGCThing(); break; } case AutoGCRooter::XML: thing = static_cast<AutoXMLRooter *>(gcr)->xml; break; case AutoGCRooter::OBJECT: thing = static_cast<AutoObjectRooter *>(gcr)->obj; if (!thing) @@ -2239,18 +1771,18 @@ MarkRuntime(JSTracer *trc) thing = JSID_TO_GCTHING(id); break; } } if (JSString::isStatic(thing)) continue; - if (!IsMarkedGCThing(thing)) { - ConservativeGCTest test = IsGCThingWord(rt, reinterpret_cast<jsuword>(thing)); + if (!reinterpret_cast<Cell *>(thing)->isMarked()) { + ConservativeGCTest test = MarkIfGCThingWord(trc, reinterpret_cast<jsuword>(thing)); fprintf(stderr, "Conservative GC scanner has missed the root 0x%p with tag %ld" " on the stack due to %d. The root location 0x%p, distance from" " the stack base %ld, conservative gc span %ld." " Consevtaive GC status for the thread %d." " Aborting.\n", thing, (long) gcr->tag, int(test), (void *) gcr, (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr), @@ -2341,135 +1873,47 @@ js_DestroyScriptsToGC(JSContext *cx, JST while ((script = *listp) != NULL) { *listp = script->u.nextToGC; script->u.nextToGC = NULL; js_DestroyScript(cx, script); } } } -inline void -FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind) -{ - JS_ASSERT(thingKind == FINALIZE_OBJECT || - thingKind == FINALIZE_FUNCTION); - - /* Cope with stillborn objects that have no map. */ - if (!obj->map) - return; - - /* Finalize obj first, in case it needs map and slots. */ - Class *clasp = obj->getClass(); - if (clasp->finalize) - clasp->finalize(cx, obj); - - Probes::finalizeObject(obj); - - obj->finish(cx); -} - -inline void -FinalizeFunction(JSContext *cx, JSFunction *fun, unsigned thingKind) -{ - FinalizeObject(cx, FUN_OBJECT(fun), thingKind); -} - -#if JS_HAS_XML_SUPPORT -inline void -FinalizeXML(JSContext *cx, JSXML *xml, unsigned thingKind) -{ - js_FinalizeXML(cx, xml); -} -#endif - -JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); -static JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = { - NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL -}; - intN js_ChangeExternalStringFinalizer(JSStringFinalizeOp oldop, JSStringFinalizeOp newop) { for (uintN i = 0; i != JS_ARRAY_LENGTH(str_finalizers); i++) { if (str_finalizers[i] == oldop) { str_finalizers[i] = newop; return intN(i); } } return -1; } -inline void -FinalizeShortString(JSContext *cx, JSShortString *str, unsigned thingKind) -{ - JS_ASSERT(FINALIZE_SHORT_STRING == thingKind); - JS_ASSERT(!JSString::isStatic(str->header())); - JS_ASSERT(str->header()->isFlat()); - JS_RUNTIME_UNMETER(cx->runtime, liveStrings); -} - -inline void -FinalizeString(JSContext *cx, JSString *str, unsigned thingKind) -{ - JS_ASSERT(FINALIZE_STRING == thingKind); - JS_ASSERT(!JSString::isStatic(str)); - JS_RUNTIME_UNMETER(cx->runtime, liveStrings); - if (str->isDependent()) { - JS_ASSERT(str->dependentBase()); - JS_RUNTIME_UNMETER(cx->runtime, liveDependentStrings); - } else if (str->isFlat()) { - /* - * flatChars for stillborn string is null, but cx->free checks - * for a null pointer on its own. - */ - cx->free(str->flatChars()); - } else if (str->isTopNode()) { - cx->free(str->topNodeBuffer()); - } - /* Nothing to be done for rope interior nodes. */ -} - -inline void -FinalizeExternalString(JSContext *cx, JSString *str, unsigned thingKind) -{ - unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0; - JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers)); - JS_ASSERT(!JSString::isStatic(str)); - JS_ASSERT(str->isFlat()); - - JS_RUNTIME_UNMETER(cx->runtime, liveStrings); - - /* A stillborn string has null chars. */ - jschar *chars = str->flatChars(); - if (!chars) - return; - JSStringFinalizeOp finalizer = str_finalizers[type]; - if (finalizer) - finalizer(cx, str); -} - /* * This function is called from js_FinishAtomState to force the finalization * of the permanently interned strings when cx is not available. */ void js_FinalizeStringRT(JSRuntime *rt, JSString *str) { JS_RUNTIME_UNMETER(rt, liveStrings); JS_ASSERT(!JSString::isStatic(str)); JS_ASSERT(!str->isRope()); if (str->isDependent()) { /* A dependent string can not be external and must be valid. */ - JS_ASSERT(JSGCArenaInfo::fromGCThing(str)->list->thingKind == FINALIZE_STRING); + JS_ASSERT(str->asCell()->arena()->header()->thingKind == FINALIZE_STRING); JS_ASSERT(str->dependentBase()); JS_RUNTIME_UNMETER(rt, liveDependentStrings); } else { - unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind; + unsigned thingKind = str->asCell()->arena()->header()->thingKind; JS_ASSERT(IsFinalizableStringKind(thingKind)); /* A stillborn string has null chars, so is not valid. */ jschar *chars = str->flatChars(); if (!chars) return; if (thingKind == FINALIZE_STRING) { rt->free(chars); @@ -2483,122 +1927,127 @@ js_FinalizeStringRT(JSRuntime *rt, JSStr * string knows how to deal with null context. */ finalizer(NULL, str); } } } } -template<typename T, - void finalizer(JSContext *cx, T *thing, unsigned thingKind)> +template<typename T> static void -FinalizeArenaList(JSContext *cx, unsigned thingKind) +FinalizeArenaList(JSCompartment *comp, JSContext *cx, unsigned thingKind) { - JS_STATIC_ASSERT(!(sizeof(T) & GC_CELL_MASK)); - JSGCArenaList *arenaList = &cx->runtime->gcArenaList[thingKind]; - JS_ASSERT(sizeof(T) == arenaList->thingSize); - - JSGCArena **ap = &arenaList->head; - JSGCArena *a = *ap; + JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask)); + ArenaList<T> *arenaList = GetFinalizableArenaList<T>(comp, thingKind); + Arena<T> **ap = &arenaList->head; + Arena<T> *a = *ap; if (!a) return; + JS_ASSERT(sizeof(T) == arenaList->head->header()->thingSize); #ifdef JS_GCMETER uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0; #endif for (;;) { - JSGCArenaInfo *ainfo = a->getInfo(); - JS_ASSERT(ainfo->list == arenaList); + ArenaHeader<T> *header = a->header(); + JS_ASSERT_IF(header->hasFreeThings, header->freeList); + JS_ASSERT(header->thingKind == thingKind); JS_ASSERT(!a->getMarkingDelay()->link); JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0); + JS_ASSERT(a->header()->isUsed); - JSGCThing *freeList = NULL; - JSGCThing **tailp = &freeList; + FreeCell *nextFree = header->freeList; + FreeCell *freeList = NULL; + FreeCell **tailp = &freeList; bool allClear = true; - jsuword thing = a->toPageStart(); - jsuword thingsEnd = thing + GC_ARENA_SIZE / sizeof(T) * sizeof(T); + T *thingsEnd = &a->t.things[a->ThingsPerArena-1].t; + T *thing = &a->t.things[0].t; + thingsEnd++; - jsuword nextFree = reinterpret_cast<jsuword>(ainfo->freeList); if (!nextFree) { - nextFree = thingsEnd; + nextFree = thingsEnd->asFreeCell(); } else { - JS_ASSERT(thing <= nextFree); - JS_ASSERT(nextFree < thingsEnd); + JS_ASSERT(thing->asCell() <= nextFree); + JS_ASSERT(nextFree < thingsEnd->asCell()); } - jsuword gcCellIndex = 0; - jsbitmap *bitmap = a->getMarkBitmap(); - for (;; thing += sizeof(T), gcCellIndex += sizeof(T) >> GC_CELL_SHIFT) { - if (thing == nextFree) { + for (;; thing++) { + if (thing->asCell() == nextFree) { if (thing == thingsEnd) break; - nextFree = reinterpret_cast<jsuword>( - reinterpret_cast<JSGCThing *>(nextFree)->link); + nextFree = nextFree->link; if (!nextFree) { - nextFree = thingsEnd; + nextFree = thingsEnd->asFreeCell(); } else { - JS_ASSERT(thing < nextFree); - JS_ASSERT(nextFree < thingsEnd); + JS_ASSERT(thing->asCell() < nextFree); + JS_ASSERT(nextFree < thingsEnd->asFreeCell()); } - } else if (JS_TEST_BIT(bitmap, gcCellIndex)) { + } else if (thing->asCell()->isMarked()) { allClear = false; METER(nthings++); continue; } else { - T *t = reinterpret_cast<T *>(thing); - finalizer(cx, t, thingKind); + thing->finalize(cx, thingKind); #ifdef DEBUG - memset(t, JS_FREE_PATTERN, sizeof(T)); + memset(thing, JS_FREE_PATTERN, sizeof(T)); #endif } - JSGCThing *t = reinterpret_cast<JSGCThing *>(thing); + FreeCell *t = thing->asFreeCell(); *tailp = t; tailp = &t->link; } #ifdef DEBUG /* Check that the free list is consistent. */ unsigned nfree = 0; if (freeList) { JS_ASSERT(tailp != &freeList); - JSGCThing *t = freeList; + FreeCell *t = freeList; for (;;) { ++nfree; if (&t->link == tailp) break; JS_ASSERT(t < t->link); t = t->link; } } #endif if (allClear) { /* * Forget just assembled free list head for the arena and * add the arena itself to the destroy list. */ - JS_ASSERT(nfree == ThingsPerArena(sizeof(T))); - *ap = ainfo->prev; - ReleaseGCArena(cx->runtime, a); + JS_ASSERT(nfree == a->ThingsPerArena); + JS_ASSERT((T *)tailp == &a->t.things[a->ThingsPerArena-1].t); + *tailp = NULL; + header->freeList = freeList; +#ifdef DEBUG + header->hasFreeThings = true; +#endif + *ap = (header->next); + JS_ASSERT((T *)header->freeList == &a->t.things[0].t); + a->chunk()->releaseArena((Arena<T> *)a); METER(nkilledarenas++); } else { - JS_ASSERT(nfree < ThingsPerArena(sizeof(T))); + JS_ASSERT(nfree < a->ThingsPerArena); *tailp = NULL; - ainfo->freeList = freeList; - ap = &ainfo->prev; + header->freeList = freeList; +#ifdef DEBUG + header->hasFreeThings = (nfree == 0) ? false : true; +#endif + ap = &header->next; METER(nlivearenas++); } if (!(a = *ap)) break; } arenaList->cursor = arenaList->head; - - METER(UpdateArenaStats(&cx->runtime->gcArenaStats[thingKind], - nlivearenas, nkilledarenas, nthings)); + METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings)); } #ifdef JS_THREADSAFE namespace js { bool GCHelperThread::init(JSRuntime *rt) @@ -2718,17 +2167,17 @@ GCHelperThread::doSweep() freeVector.resize(0); } } #endif /* JS_THREADSAFE */ static void -SweepCompartments(JSContext *cx) +SweepCompartments(JSContext *cx, JSGCInvocationKind gckind) { JSRuntime *rt = cx->runtime; JSCompartmentCallback callback = rt->compartmentCallback; JSCompartment **read = rt->compartments.begin(); JSCompartment **end = rt->compartments.end(); JSCompartment **write = read; /* Delete defaultCompartment only during runtime shutdown */ @@ -2737,21 +2186,28 @@ SweepCompartments(JSContext *cx) while (read < end) { JSCompartment *compartment = (*read++); if (compartment->marked) { compartment->marked = false; *write++ = compartment; /* Remove dead wrappers from the compartment map. */ compartment->sweep(cx); } else { - if (callback) - (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY); - if (compartment->principals) - JSPRINCIPALS_DROP(cx, compartment->principals); - delete compartment; + JS_ASSERT(compartment->freeLists.isEmpty()); + if (compartment->arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT) { + if (callback) + (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY); + if (compartment->principals) + JSPRINCIPALS_DROP(cx, compartment->principals); + delete compartment; + } else { + compartment->marked = false; + *write++ = compartment; + compartment->sweep(cx); + } } } rt->compartments.resize(write - rt->compartments.begin()); } /* * Common cache invalidation and so forth that must be done before GC. Even if * GCUntilDone calls GC several times, this work needs to be done only once. @@ -2783,16 +2239,18 @@ PreGCCleanup(JSContext *cx, JSGCInvocati #ifdef JS_GC_ZEAL || rt->gcZeal >= 1 #endif ) { rt->gcRegenShapes = true; rt->shapeGen = Shape::LAST_RESERVED_SHAPE; rt->protoHazardShape = 0; } + for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) + (*c)->purge(cx); js_PurgeThreads(cx); { JSContext *iter = NULL; while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) acx->purge(); } } @@ -2800,31 +2258,32 @@ PreGCCleanup(JSContext *cx, JSGCInvocati /* * Perform mark-and-sweep GC. * * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each * other thread must be either outside all requests or blocked waiting for GC * to finish. Note that the caller does not hold rt->gcLock. */ static void -MarkAndSweep(JSContext *cx GCTIMER_PARAM) +MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM) { JSRuntime *rt = cx->runtime; rt->gcNumber++; /* * Mark phase. */ GCMarker gcmarker(cx); JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker)); JS_ASSERT(gcmarker.getMarkColor() == BLACK); rt->gcMarkingTracer = &gcmarker; + gcmarker.stackLimit = cx->stackLimit; for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) - GCChunkInfo::fromChunk(r.front())->clearMarkBitmap(); + r.front()->clearMarkBitmap(); MarkRuntime(&gcmarker); js_MarkScriptFilenames(rt); /* * Mark children of things that caused too deep recursion during the above * tracing. */ @@ -2867,56 +2326,57 @@ MarkAndSweep(JSContext *cx GCTIMER_PARA /* Finalize watch points associated with unreachable objects. */ js_SweepWatchPoints(cx); #ifdef DEBUG /* Save the pre-sweep count of scope-mapped properties. */ rt->liveObjectPropsPreSweep = rt->liveObjectProps; #endif -#ifdef JS_METHODJIT - /* Fix-up call ICs guarding against unreachable objects. */ - mjit::SweepCallICs(cx); +#ifdef JS_TRACER + for (ThreadDataIter i(rt); !i.empty(); i.popFront()) + i.threadData()->traceMonitor.sweep(); #endif /* * We finalize iterators before other objects so the iterator can use the * object which properties it enumerates over to finalize the enumeration * state. We finalize objects before other GC things to ensure that * object's finalizer can access them even if they will be freed. */ - JS_ASSERT(!rt->gcEmptyArenaList); - FinalizeArenaList<JSObject, FinalizeObject>(cx, FINALIZE_OBJECT); - FinalizeArenaList<JSFunction, FinalizeFunction>(cx, FINALIZE_FUNCTION); + + for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) { + FinalizeArenaList<JSObject>(*comp, cx, FINALIZE_OBJECT); + FinalizeArenaList<JSFunction>(*comp, cx, FINALIZE_FUNCTION); #if JS_HAS_XML_SUPPORT - FinalizeArenaList<JSXML, FinalizeXML>(cx, FINALIZE_XML); + FinalizeArenaList<JSXML>(*comp, cx, FINALIZE_XML); #endif + } TIMESTAMP(sweepObjectEnd); /* * We sweep the deflated cache before we finalize the strings so the * cache can safely use js_IsAboutToBeFinalized.. */ rt->deflatedStringCache->sweep(cx); - FinalizeArenaList<JSShortString, FinalizeShortString>(cx, FINALIZE_SHORT_STRING); - FinalizeArenaList<JSString, FinalizeString>(cx, FINALIZE_STRING); - for (unsigned i = FINALIZE_EXTERNAL_STRING0; - i <= FINALIZE_EXTERNAL_STRING_LAST; - ++i) { - FinalizeArenaList<JSString, FinalizeExternalString>(cx, i); + for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) { + FinalizeArenaList<JSShortString>(*comp, cx, FINALIZE_SHORT_STRING); + FinalizeArenaList<JSString>(*comp, cx, FINALIZE_STRING); + for (unsigned i = FINALIZE_EXTERNAL_STRING0; i <= FINALIZE_EXTERNAL_STRING_LAST; ++i) + FinalizeArenaList<JSString>(*comp, cx, i); } rt->gcNewArenaTriggerBytes = rt->gcBytes < GC_ARENA_ALLOCATION_TRIGGER ? GC_ARENA_ALLOCATION_TRIGGER : rt->gcBytes; TIMESTAMP(sweepStringEnd); - SweepCompartments(cx); + SweepCompartments(cx, gckind); /* * Sweep the runtime's property trees after finalizing objects, in case any * had watchpoints referencing tree nodes. */ js::PropertyTree::sweepShapes(cx); /* @@ -2929,17 +2389,17 @@ MarkAndSweep(JSContext *cx GCTIMER_PARA /* Slowify arrays we have accumulated. */ gcmarker.slowifyArrays(); /* * Destroy arenas after we finished the sweeping so finalizers can safely * use js_IsAboutToBeFinalized(). */ - FreeGCChunks(rt); + ExpireGCChunks(rt); TIMESTAMP(sweepDestroyEnd); if (rt->gcCallback) (void) rt->gcCallback(cx, JSGC_FINALIZE_END); #ifdef DEBUG_srcnotesize { extern void DumpSrcNoteSizeHist(); DumpSrcNoteSizeHist(); printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes); @@ -3172,17 +2632,17 @@ GCUntilDone(JSContext *cx, JSGCInvocatio rt->gcPoke = false; AutoUnlockGC unlock(rt); if (firstRun) { PreGCCleanup(cx, gckind); TIMESTAMP(startMark); firstRun = false; } - MarkAndSweep(cx GCTIMER_ARG); + MarkAndSweep(cx, gckind GCTIMER_ARG); // GC again if: // - another thread, not in a request, called js_GC // - js_GC was called recursively // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT. } while (rt->gcPoke); #ifdef JS_THREADSAFE @@ -3245,21 +2705,24 @@ js_GC(JSContext *cx, JSGCInvocationKind (void) callback(cx, JSGC_END); } /* * On shutdown, iterate until the JSGC_END callback stops creating * garbage. */ } while (gckind == GC_LAST_CONTEXT && rt->gcPoke); - +#ifdef JS_GCMETER + js_DumpGCStats(cx->runtime, stderr); +#endif GCTIMER_END(gckind == GC_LAST_CONTEXT); } namespace js { +namespace gc { bool SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto) { /* * This function cannot be called during the GC and always requires a * request. */ @@ -3289,46 +2752,16 @@ SetProtoCheckingForCycles(JSContext *cx, obj2 = obj2->getProto(); } if (!cycle) obj->setProto(proto); return !cycle; } -void -TraceRuntime(JSTracer *trc) -{ - LeaveTrace(trc->context); - -#ifdef JS_THREADSAFE - { - JSContext *cx = trc->context; - JSRuntime *rt = cx->runtime; - AutoLockGC lock(rt); - - if (rt->gcThread != cx->thread) { - AutoGCSession gcsession(cx); - AutoUnlockGC unlock(rt); - RecordNativeStackTopForGC(trc->context); - MarkRuntime(trc); - return; - } - } -#else - RecordNativeStackTopForGC(trc->context); -#endif - - /* - * Calls from inside a normal GC or a recursive calls are OK and do not - * require session setup. - */ - MarkRuntime(trc); -} - JSCompartment * NewCompartment(JSContext *cx, JSPrincipals *principals) { JSRuntime *rt = cx->runtime; JSCompartment *compartment = new JSCompartment(rt); if (!compartment || !compartment->init()) { JS_ReportOutOfMemory(cx); return NULL; @@ -3350,13 +2783,44 @@ NewCompartment(JSContext *cx, JSPrincipa } JSCompartmentCallback callback = rt->compartmentCallback; if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) { AutoLockGC lock(rt); rt->compartments.popBack(); return NULL; } - return compartment; } +} /* namespace gc */ + +void +TraceRuntime(JSTracer *trc) +{ + LeaveTrace(trc->context); + +#ifdef JS_THREADSAFE + { + JSContext *cx = trc->context; + JSRuntime *rt = cx->runtime; + AutoLockGC lock(rt); + + if (rt->gcThread != cx->thread) { + AutoGCSession gcsession(cx); + AutoUnlockGC unlock(rt); + RecordNativeStackTopForGC(trc->context); + MarkRuntime(trc); + return; + } + } +#else + RecordNativeStackTopForGC(trc->context); +#endif + + /* + * Calls from inside a normal GC or a recursive calls are OK and do not + * require session setup. + */ + MarkRuntime(trc); } + +} /* namespace js */
--- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -51,58 +51,653 @@ #include "jsbit.h" #include "jsgcchunk.h" #include "jsutil.h" #include "jsvector.h" #include "jsversion.h" #include "jsobj.h" #include "jsfun.h" #include "jsgcstats.h" +#include "jscell.h" + +struct JSCompartment; + +extern "C" void +js_TraceXML(JSTracer *trc, JSXML* thing); + +#if JS_STACK_GROWTH_DIRECTION > 0 +# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) < limit) +#else +# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) > limit) +#endif + +namespace js { +namespace gc { + +/* Every arena has a header. */ +template <typename T> +struct ArenaHeader { + JSCompartment *compartment; + Arena<T> *next; + FreeCell *freeList; + unsigned thingKind; + bool isUsed; + size_t thingSize; +#ifdef DEBUG + bool hasFreeThings; +#endif +}; + +template <typename T> +union ThingOrCell { + T t; + FreeCell cell; +}; + +template <typename T, size_t N, size_t R> +struct Things { + ThingOrCell<T> things[N]; + char filler[R]; +}; + +template <typename T, size_t N> +struct Things<T, N, 0> { + ThingOrCell<T> things[N]; +}; + +template <typename T> +struct Arena { + static const size_t ArenaSize = 4096; + + struct AlignedArenaHeader { + T align[(sizeof(ArenaHeader<T>) + sizeof(T) - 1) / sizeof(T)]; + }; + + /* We want things in the arena to be aligned, so align the header. */ + union { + ArenaHeader<T> aheader; + AlignedArenaHeader align; + }; + + static const size_t ThingsPerArena = (ArenaSize - sizeof(AlignedArenaHeader)) / sizeof(T); + static const size_t FillerSize = ArenaSize - sizeof(AlignedArenaHeader) - sizeof(T) * ThingsPerArena; + Things<T, ThingsPerArena, FillerSize> t; + + inline Chunk *chunk() const; + inline size_t arenaIndex() const; + + inline ArenaHeader<T> *header() { return &aheader; }; + + inline MarkingDelay *getMarkingDelay() const; + inline ArenaBitmap *bitmap() const; + + inline ConservativeGCTest mark(T *thing, JSTracer *trc); + void markDelayedChildren(JSTracer *trc); + inline bool inFreeList(void *thing) const; + inline T *getAlignedThing(T *thing); +#ifdef DEBUG + bool assureThingIsAligned(T *thing); +#endif + + void init(JSCompartment *compartment, unsigned thingKind); +}; +JS_STATIC_ASSERT(sizeof(Arena<FreeCell>) == 4096); + +/* + * Live objects are marked black. How many other additional colors are available + * depends on the size of the GCThing. + */ +static const uint32 BLACK = 0; + +/* An arena bitmap contains enough mark bits for all the cells in an arena. */ +struct ArenaBitmap { + static const size_t BitsPerWord = sizeof(uintptr_t) == 4 ? 32 : 64; + static const size_t BitmapSize = (Arena<FreeCell>::ThingsPerArena / BitsPerWord) + 1; + uintptr_t bitmap[BitmapSize]; + + JS_ALWAYS_INLINE void mark(size_t bit, uint32 color) { + uintptr_t *word = &bitmap[bit / BitsPerWord]; + JS_ASSERT(word < &bitmap[JS_ARRAY_LENGTH(bitmap)]); + *word |= (uintptr_t(1) << ((bit + color) % BitsPerWord)); + } + + JS_ALWAYS_INLINE bool isMarked(size_t bit, uint32 color) { + uintptr_t *word = &bitmap[bit / BitsPerWord]; + JS_ASSERT(word < &bitmap[JS_ARRAY_LENGTH(bitmap)]); + return *word & (uintptr_t(1) << ((bit + color) % BitsPerWord)); + } + + JS_ALWAYS_INLINE bool markIfUnmarked(size_t bit, uint32 color) { + uintptr_t *word = &bitmap[bit / BitsPerWord]; + JS_ASSERT(word < &bitmap[JS_ARRAY_LENGTH(bitmap)]); + uintptr_t mask = (uintptr_t(1) << (bit % BitsPerWord)); + if (*word & mask) + return false; + *word |= mask; + if (color != BLACK) { + mask = (uintptr_t(1) << ((bit + color) % BitsPerWord)); + if (*word & mask) + return false; + *word |= mask; + } + + return true; + } +}; + +JS_STATIC_ASSERT(Arena<FreeCell>::ArenaSize % ArenaBitmap::BitsPerWord == 0); + +/* Marking delay is used to resume marking later when recursive marking uses too much stack. */ +struct MarkingDelay { + Arena<Cell> *link; + uintptr_t unmarkedChildren; + jsuword start; + + void init() + { + link = NULL; + unmarkedChildren = 0; + } +}; + +struct EmptyArenaLists { + Arena<FreeCell> *cellFreeList; + Arena<JSObject> *objectFreeList; + Arena<JSString> *stringFreeList; + Arena<JSShortString> *shortStringFreeList; + Arena<JSFunction> *functionFreeList; + + void init() { + cellFreeList = NULL; + objectFreeList = NULL; + stringFreeList = NULL; + shortStringFreeList = NULL; + functionFreeList = NULL; + } + + Arena<FreeCell> *getOtherArena() { + Arena<FreeCell> *arena = NULL; + if ((arena = (Arena<FreeCell> *)cellFreeList)) { + cellFreeList = cellFreeList->header()->next; + return arena; + } else if ((arena = (Arena<FreeCell> *)objectFreeList)) { + objectFreeList = objectFreeList->header()->next; + return arena; + } else if ((arena = (Arena<FreeCell> *)stringFreeList)) { + stringFreeList = stringFreeList->header()->next; + return arena; + } else if ((arena = (Arena<FreeCell> *)shortStringFreeList)) { + shortStringFreeList = shortStringFreeList->header()->next; + return arena; + } else { + JS_ASSERT(functionFreeList); + arena = (Arena<FreeCell> *)functionFreeList; + functionFreeList = functionFreeList->header()->next; + return arena; + } + } + + template <typename T> + Arena<T> *getTypedFreeList(); + + template <typename T> + Arena<T> *getNext(JSCompartment *comp, unsigned thingKind); + + template <typename T> + void insert(Arena<T> *arena); +}; + +template<typename T> +Arena<T> *EmptyArenaLists::getNext(JSCompartment *comp, unsigned thingKind) { + Arena<T> *arena = getTypedFreeList<T>(); + if (arena) { + JS_ASSERT(arena->header()->isUsed == false); + JS_ASSERT(arena->header()->thingSize == sizeof(T)); + arena->header()->isUsed = true; + arena->header()->thingKind = thingKind; + arena->header()->compartment = comp; + return arena; + } + arena = (Arena<T> *)getOtherArena(); + JS_ASSERT(arena->header()->isUsed == false); + arena->init(comp, thingKind); + return arena; +} + +/* The chunk header (located at the end of the chunk to preserve arena alignment). */ +struct ChunkInfo { + Chunk *link; + JSRuntime *runtime; + EmptyArenaLists emptyArenaLists; + size_t age; + size_t numFree; +}; + +/* Chunks contain arenas and associated data structures (mark bitmap, delayed marking state). */ +struct Chunk { + static const size_t BytesPerArena = sizeof(Arena<FreeCell>) + + sizeof(ArenaBitmap) + + sizeof(MarkingDelay); + + static const size_t ArenasPerChunk = (GC_CHUNK_SIZE - sizeof(ChunkInfo)) / BytesPerArena; + static const size_t MaxAge = 3; + + Arena<FreeCell> arenas[ArenasPerChunk]; + ArenaBitmap bitmaps[ArenasPerChunk]; + MarkingDelay markingDelay[ArenasPerChunk]; + + ChunkInfo info; + + void clearMarkBitmap(); + void init(JSRuntime *rt); + + bool unused(); + bool hasAvailableArenas(); + bool withinArenasRange(Cell *cell); + + template <typename T> + Arena<T> *allocateArena(JSCompartment *comp, unsigned thingKind); + + template <typename T> + void releaseArena(Arena<T> *a); + + JSRuntime *getRuntime(); + bool expire(); +}; +JS_STATIC_ASSERT(sizeof(Chunk) <= GC_CHUNK_SIZE); +JS_STATIC_ASSERT(sizeof(Chunk) + Chunk::BytesPerArena > GC_CHUNK_SIZE); + +Arena<Cell> * +Cell::arena() const +{ + uintptr_t addr = uintptr_t(this); + JS_ASSERT(addr % sizeof(FreeCell) == 0); + addr &= ~(Arena<FreeCell>::ArenaSize - 1); + return reinterpret_cast<Arena<Cell> *>(addr); +} + +Chunk * +Cell::chunk() const +{ + uintptr_t addr = uintptr_t(this); + JS_ASSERT(addr % sizeof(FreeCell) == 0); + addr &= ~(GC_CHUNK_SIZE - 1); + return reinterpret_cast<Chunk *>(addr); +} + +ArenaBitmap * +Cell::bitmap() const +{ + return &chunk()->bitmaps[arena()->arenaIndex()]; +} + +size_t +Cell::cellIndex() const +{ + return reinterpret_cast<const FreeCell *>(this) - reinterpret_cast<FreeCell *>(&arena()->t); +} + +template <typename T> +Chunk * +Arena<T>::chunk() const +{ + uintptr_t addr = uintptr_t(this); + JS_ASSERT(addr % sizeof(FreeCell) == 0); + addr &= ~(GC_CHUNK_SIZE - 1); + return reinterpret_cast<Chunk *>(addr); +} + +template <typename T> +size_t +Arena<T>::arenaIndex() const +{ + return reinterpret_cast<const Arena<FreeCell> *>(this) - chunk()->arenas; +} + +template <typename T> +MarkingDelay * +Arena<T>::getMarkingDelay() const +{ + return &chunk()->markingDelay[arenaIndex()]; +} + +template <typename T> +ArenaBitmap * +Arena<T>::bitmap() const +{ + return &chunk()->bitmaps[arenaIndex()]; +} + +inline void +Cell::mark(uint32 color = BLACK) const +{ + bitmap()->mark(cellIndex(), color); +} + +static void +AssertValidColor(const void *thing, uint32 color) +{ + JS_ASSERT_IF(color, color < reinterpret_cast<const js::gc::FreeCell *>(thing)->arena()->header()->thingSize / sizeof(FreeCell)); +} + +inline bool +Cell::isMarked(uint32 color = BLACK) const +{ + AssertValidColor(this, color); + return bitmap()->isMarked(cellIndex(), color); +} + +bool +Cell::markIfUnmarked(uint32 color = BLACK) const +{ + AssertValidColor(this, color); + return bitmap()->markIfUnmarked(cellIndex(), color); +} + +JSCompartment * +Cell::compartment() const +{ + return arena()->header()->compartment; +} + +template <typename T> +static inline +Arena<T> * +GetArena(Cell *cell) +{ + return reinterpret_cast<Arena<T> *>(cell->arena()); +} + +/* + * The kind of GC thing with a finalizer. The external strings follow the + * ordinary string to simplify js_GetExternalStringGCType. + */ +enum JSFinalizeGCThingKind { + FINALIZE_OBJECT, + FINALIZE_FUNCTION, +#if JS_HAS_XML_SUPPORT + FINALIZE_XML, +#endif + FINALIZE_SHORT_STRING, + FINALIZE_STRING, + FINALIZE_EXTERNAL_STRING0, + FINALIZE_EXTERNAL_STRING1, + FINALIZE_EXTERNAL_STRING2, + FINALIZE_EXTERNAL_STRING3, + FINALIZE_EXTERNAL_STRING4, + FINALIZE_EXTERNAL_STRING5, + FINALIZE_EXTERNAL_STRING6, + FINALIZE_EXTERNAL_STRING7, + FINALIZE_EXTERNAL_STRING_LAST = FINALIZE_EXTERNAL_STRING7, + FINALIZE_LIMIT +}; #define JSTRACE_XML 2 /* * One past the maximum trace kind. */ #define JSTRACE_LIMIT 3 /* * Lower limit after which we limit the heap growth */ -const size_t GC_ARENA_ALLOCATION_TRIGGER = 25 * js::GC_CHUNK_SIZE; +const size_t GC_ARENA_ALLOCATION_TRIGGER = 30 * js::GC_CHUNK_SIZE; /* * A GC is triggered once the number of newly allocated arenas * is 1.5 times the number of live arenas after the last GC. * (Starting after the lower limit of GC_ARENA_ALLOCATION_TRIGGER) */ -const float GC_HEAP_GROWTH_FACTOR = 1.5; +const float GC_HEAP_GROWTH_FACTOR = 3; const uintN JS_EXTERNAL_STRING_LIMIT = 8; +static inline size_t +GetFinalizableTraceKind(size_t thingKind) +{ + JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); + + static const uint8 map[FINALIZE_LIMIT] = { + JSTRACE_OBJECT, /* FINALIZE_OBJECT */ + JSTRACE_OBJECT, /* FINALIZE_FUNCTION */ +#if JS_HAS_XML_SUPPORT /* FINALIZE_XML */ + JSTRACE_XML, +#endif + JSTRACE_STRING, /* FINALIZE_SHORT_STRING */ + JSTRACE_STRING, /* FINALIZE_STRING */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING1 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING2 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING3 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING4 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING5 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING6 */ + JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */ + }; + + JS_ASSERT(thingKind < FINALIZE_LIMIT); + return map[thingKind]; +} + +static inline bool +IsFinalizableStringKind(unsigned thingKind) +{ + return unsigned(FINALIZE_SHORT_STRING) <= thingKind && + thingKind <= unsigned(FINALIZE_EXTERNAL_STRING_LAST); +} + +/* + * Get the type of the external string or -1 if the string was not created + * with JS_NewExternalString. + */ +static inline intN +GetExternalStringGCType(JSString *str) +{ + JS_STATIC_ASSERT(FINALIZE_STRING + 1 == FINALIZE_EXTERNAL_STRING0); + JS_ASSERT(!JSString::isStatic(str)); + + unsigned thingKind = GetArena<JSString>((Cell *)str)->header()->thingKind; + JS_ASSERT(IsFinalizableStringKind(thingKind)); + return intN(thingKind) - intN(FINALIZE_EXTERNAL_STRING0); +} + +static inline uint32 +GetGCThingTraceKind(void *thing) +{ + JS_ASSERT(thing); + if (JSString::isStatic(thing)) + return JSTRACE_STRING; + Cell *cell = reinterpret_cast<Cell *>(thing); + return GetFinalizableTraceKind(cell->arena()->header()->thingKind); +} + +static inline JSRuntime * +GetGCThingRuntime(void *thing) +{ + return reinterpret_cast<FreeCell *>(thing)->chunk()->info.runtime; +} + +#ifdef DEBUG +extern bool +checkArenaListsForThing(JSCompartment *comp, jsuword thing); +#endif + +template <typename T> +struct ArenaList { + Arena<T> *head; /* list start */ + Arena<T> *cursor; /* arena with free things */ + + inline void init() { + head = NULL; + cursor = NULL; + } + + inline Arena<T> *getNextWithFreeList() { + Arena<T> *a; + while (cursor != NULL) { + ArenaHeader<T> *aheader = cursor->header(); + a = cursor; + cursor = (Arena<T> *)aheader->next; + if (aheader->freeList) + return a; + } + return NULL; + } + +#ifdef DEBUG + bool arenasContainThing(void *thing) { + for (Arena<T> *a = head; a; a = (Arena<T> *)a->header()->next) { + JS_ASSERT(a->header()->isUsed); + if (thing >= &a->t.things[0] && thing < &a->t.things[a->ThingsPerArena]) + return true; + } + return false; + } +#endif + + inline void insert(Arena<T> *a) { + a->header()->next = head; + head = a; + } + + void releaseAll() { + while (head) { + Arena<T> *next = head->header()->next; + head->chunk()->releaseArena(head); + head = next; + } + head = NULL; + cursor = NULL; + } + + inline bool isEmpty() const { + return (head == NULL); + } +}; + +struct FreeLists { + FreeCell **finalizables[FINALIZE_LIMIT]; + + void purge(); + + inline FreeCell *getNext(uint32 kind) { + FreeCell *top = NULL; + if (finalizables[kind]) { + top = *finalizables[kind]; + if (top) { + *finalizables[kind] = top->link; + } else { + finalizables[kind] = NULL; + } +#ifdef DEBUG + if (top && !top->link) + top->arena()->header()->hasFreeThings = false; +#endif + } + return top; + } + + template <typename T> + inline void populate(Arena<T> *a, uint32 thingKind) { + finalizables[thingKind] = &a->header()->freeList; + } + +#ifdef DEBUG + bool isEmpty() const { + for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizables); ++i) { + if (finalizables[i]) + return false; + } + return true; + } +#endif +}; +} + +typedef Vector<gc::Chunk *, 32, SystemAllocPolicy> GCChunks; + +struct GCPtrHasher +{ + typedef void *Lookup; + + static HashNumber hash(void *key) { + return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS); + } + + static bool match(void *l, void *k) { return l == k; } +}; + +typedef HashMap<void *, uint32, GCPtrHasher, SystemAllocPolicy> GCLocks; + +struct RootInfo { + RootInfo() {} + RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {} + const char *name; + JSGCRootType type; +}; + +typedef js::HashMap<void *, + RootInfo, + js::DefaultHasher<void *>, + js::SystemAllocPolicy> RootedValueMap; + +/* If HashNumber grows, need to change WrapperHasher. */ +JS_STATIC_ASSERT(sizeof(HashNumber) == 4); + +struct WrapperHasher +{ + typedef Value Lookup; + + static HashNumber hash(Value key) { + uint64 bits = JSVAL_BITS(Jsvalify(key)); + return (uint32)bits ^ (uint32)(bits >> 32); + } + + static bool match(const Value &l, const Value &k) { return l == k; } +}; + +typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap; + +class AutoValueVector; +class AutoIdVector; +} + +static inline void +CheckGCFreeListLink(js::gc::FreeCell *cell) +{ + /* + * The GC things on the free lists come from one arena and the things on + * the free list are linked in ascending address order. + */ + JS_ASSERT_IF(cell->link, + cell->arena() == + cell->link->arena()); + JS_ASSERT_IF(cell->link, cell < cell->link); +} + +template <typename T> +extern bool +RefillFinalizableFreeList(JSContext *cx, unsigned thingKind); + +#ifdef DEBUG +extern bool +CheckAllocation(JSContext *cx); +#endif + /* * Get the type of the external string or -1 if the string was not created * with JS_NewExternalString. */ extern intN js_GetExternalStringGCType(JSString *str); extern JS_FRIEND_API(uint32) js_GetGCThingTraceKind(void *thing); -extern size_t -ThingsPerArena(size_t thingSize); - -/* - * The sole purpose of the function is to preserve public API compatibility - * in JS_GetStringBytes which takes only single JSString* argument. - */ -JSRuntime * -js_GetGCThingRuntime(void *thing); - #if 1 /* * Since we're forcing a GC from JS_GC anyway, don't bother wasting cycles * loading oldval. XXX remove implied force, fix jsinterp.c's "second arg * ignored", etc. */ #define GC_POKE(cx, oldval) ((cx)->runtime->gcPoke = JS_TRUE) #else @@ -151,33 +746,21 @@ js_ReserveObjects(JSContext *cx, size_t extern JSBool js_LockGCThingRT(JSRuntime *rt, void *thing); extern void js_UnlockGCThingRT(JSRuntime *rt, void *thing); extern JS_FRIEND_API(bool) -js_IsAboutToBeFinalized(void *thing); +IsAboutToBeFinalized(void *thing); extern JS_FRIEND_API(bool) js_GCThingIsMarked(void *thing, uint32 color); -/* - * Macro to test if a traversal is the marking phase of GC to avoid exposing - * ScriptFilenameEntry to traversal implementations. - */ -#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL) - -#if JS_HAS_XML_SUPPORT -# define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) < JSTRACE_LIMIT) -#else -# define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) <= JSTRACE_STRING) -#endif - extern void js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp); namespace js { extern JS_REQUIRES_STACK void MarkRuntime(JSTracer *trc); @@ -226,134 +809,16 @@ extern void js_WaitForGC(JSRuntime *rt); #else /* !JS_THREADSAFE */ # define js_WaitForGC(rt) ((void) 0) #endif -/* - * The kind of GC thing with a finalizer. The external strings follow the - * ordinary string to simplify js_GetExternalStringGCType. - */ -enum JSFinalizeGCThingKind { - FINALIZE_OBJECT, - FINALIZE_FUNCTION, -#if JS_HAS_XML_SUPPORT - FINALIZE_XML, -#endif - FINALIZE_SHORT_STRING, - FINALIZE_STRING, - FINALIZE_EXTERNAL_STRING0, - FINALIZE_EXTERNAL_STRING1, - FINALIZE_EXTERNAL_STRING2, - FINALIZE_EXTERNAL_STRING3, - FINALIZE_EXTERNAL_STRING4, - FINALIZE_EXTERNAL_STRING5, - FINALIZE_EXTERNAL_STRING6, - FINALIZE_EXTERNAL_STRING7, - FINALIZE_EXTERNAL_STRING_LAST = FINALIZE_EXTERNAL_STRING7, - FINALIZE_LIMIT -}; - -static inline bool -IsFinalizableStringKind(unsigned thingKind) -{ - return unsigned(FINALIZE_SHORT_STRING) <= thingKind && - thingKind <= unsigned(FINALIZE_EXTERNAL_STRING_LAST); -} - -/* - * Allocates a new GC thing. After a successful allocation the caller must - * fully initialize the thing before calling any function that can potentially - * trigger GC. This will ensure that GC tracing never sees junk values stored - * in the partially initialized thing. - */ -extern void * -js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind); - -static inline JSObject * -js_NewGCObject(JSContext *cx) -{ - return (JSObject *) js_NewFinalizableGCThing(cx, FINALIZE_OBJECT); -} - -static inline JSString * -js_NewGCString(JSContext *cx) -{ - return (JSString *) js_NewFinalizableGCThing(cx, FINALIZE_STRING); -} - -struct JSShortString; - -static inline JSShortString * -js_NewGCShortString(JSContext *cx) -{ - return (JSShortString *) js_NewFinalizableGCThing(cx, FINALIZE_SHORT_STRING); -} - -static inline JSString * -js_NewGCExternalString(JSContext *cx, uintN type) -{ - JS_ASSERT(type < JS_EXTERNAL_STRING_LIMIT); - type += FINALIZE_EXTERNAL_STRING0; - return (JSString *) js_NewFinalizableGCThing(cx, type); -} - -static inline JSFunction * -js_NewGCFunction(JSContext *cx) -{ - JSFunction* obj = (JSFunction *)js_NewFinalizableGCThing(cx, FINALIZE_FUNCTION); - -#ifdef DEBUG - if (obj) { - memset((uint8 *) obj + sizeof(JSObject), JS_FREE_PATTERN, - sizeof(JSFunction) - sizeof(JSObject)); - } -#endif - - return obj; -} - -#if JS_HAS_XML_SUPPORT -static inline JSXML * -js_NewGCXML(JSContext *cx) -{ - return (JSXML *) js_NewFinalizableGCThing(cx, FINALIZE_XML); -} -#endif - -struct JSGCArena; - -struct JSGCArenaList { - JSGCArena *head; /* list start */ - JSGCArena *cursor; /* arena with free things */ - uint32 thingKind; /* one of JSFinalizeGCThingKind */ - uint32 thingSize; /* size of things to allocate on this list - */ -}; - -struct JSGCFreeLists { - JSGCThing *finalizables[FINALIZE_LIMIT]; - - void purge(); - void moveTo(JSGCFreeLists * another); - -#ifdef DEBUG - bool isEmpty() const { - for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizables); ++i) { - if (finalizables[i]) - return false; - } - return true; - } -#endif -}; - extern void js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data); namespace js { #ifdef JS_THREADSAFE /* @@ -420,40 +885,36 @@ class GCHelperThread { *freeCursor++ = ptr; else replenishAndFreeLater(ptr); } }; #endif /* JS_THREADSAFE */ - -struct GCChunkInfo; - struct GCChunkHasher { - typedef jsuword Lookup; + typedef gc::Chunk *Lookup; /* * Strip zeros for better distribution after multiplying by the golden * ratio. */ - static HashNumber hash(jsuword chunk) { - JS_ASSERT(!(chunk & GC_CHUNK_MASK)); - return HashNumber(chunk >> GC_CHUNK_SHIFT); + static HashNumber hash(gc::Chunk *chunk) { + JS_ASSERT(!(jsuword(chunk) & GC_CHUNK_MASK)); + return HashNumber(jsuword(chunk) >> GC_CHUNK_SHIFT); } - static bool match(jsuword k, jsuword l) { - JS_ASSERT(!(k & GC_CHUNK_MASK)); - JS_ASSERT(!(l & GC_CHUNK_MASK)); + static bool match(gc::Chunk *k, gc::Chunk *l) { + JS_ASSERT(!(jsuword(k) & GC_CHUNK_MASK)); + JS_ASSERT(!(jsuword(l) & GC_CHUNK_MASK)); return k == l; } }; -typedef HashSet<jsuword, GCChunkHasher, SystemAllocPolicy> GCChunkSet; -typedef Vector<GCChunkInfo *, 32, SystemAllocPolicy> GCChunkInfoVector; +typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet; struct ConservativeGCThreadData { /* * The GC scans conservatively between JSThreadData::nativeStackBase and * nativeStackTop unless the latter is NULL. */ jsuword *nativeStackTop; @@ -485,26 +946,26 @@ struct ConservativeGCThreadData { return !!nativeStackTop; } }; struct GCMarker : public JSTracer { private: /* The color is only applied to objects, functions and xml. */ uint32 color; - + public: + jsuword stackLimit; /* See comments before delayMarkingChildren is jsgc.cpp. */ - JSGCArena *unmarkedArenaStackTop; + js::gc::Arena<js::gc::Cell> *unmarkedArenaStackTop; #ifdef DEBUG size_t markLaterCount; #endif - public: #if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER) - ConservativeGCStats conservativeStats; + js::gc::ConservativeGCStats conservativeStats; #endif #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS struct ConservativeRoot { void *thing; uint32 traceKind; }; Vector<ConservativeRoot, 0, SystemAllocPolicy> conservativeRoots; const char *conservativeDumpFileName; void dumpConservativeRoots(); @@ -531,176 +992,55 @@ struct GCMarker : public JSTracer { void delayMarkingChildren(void *thing); JS_FRIEND_API(void) markDelayedChildren(); void slowifyArrays(); }; +void +MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end); + } /* namespace js */ extern void js_FinalizeStringRT(JSRuntime *rt, JSString *str); /* * This function is defined in jsdbgapi.cpp but is declared here to avoid * polluting jsdbgapi.h, a public API header, with internal functions. */ extern void js_MarkTraps(JSTracer *trc); namespace js { +namespace gc { + +/* + * Macro to test if a traversal is the marking phase of GC to avoid exposing + * ScriptFilenameEntry to traversal implementations. + */ +#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL) + +#if JS_HAS_XML_SUPPORT +# define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) < JSTRACE_LIMIT) +#else +# define JS_IS_VALID_TRACE_KIND(kind) ((uint32)(kind) <= JSTRACE_STRING) +#endif /* * Set object's prototype while checking that doing so would not create * a cycle in the proto chain. The cycle check and proto change are done * only when all other requests are finished or suspended to ensure exclusive * access to the chain. If there is a cycle, return false without reporting * an error. Otherwise, set the proto and return true. */ extern bool SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto); -/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */ -void -Mark(JSTracer *trc, void *thing, uint32 kind); - -static inline void -Mark(JSTracer *trc, void *thing, uint32 kind, const char *name) -{ - JS_ASSERT(thing); - JS_SET_TRACING_NAME(trc, name); - Mark(trc, thing, kind); -} - -static inline void -MarkString(JSTracer *trc, JSString *str) -{ - JS_ASSERT(str); - Mark(trc, str, JSTRACE_STRING); -} - -static inline void -MarkString(JSTracer *trc, JSString *str, const char *name) -{ - JS_ASSERT(str); - JS_SET_TRACING_NAME(trc, name); - Mark(trc, str, JSTRACE_STRING); -} - -static inline void -MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name) -{ - for (uint32 i = 0; i < len; i++) { - if (JSAtom *atom = vec[i]) { - JS_SET_TRACING_INDEX(trc, name, i); - Mark(trc, ATOM_TO_STRING(atom), JSTRACE_STRING); - } - } -} - -static inline void -MarkObject(JSTracer *trc, JSObject &obj, const char *name) -{ - JS_SET_TRACING_NAME(trc, name); - Mark(trc, &obj, JSTRACE_OBJECT); -} - -static inline void -MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name) -{ - for (uint32 i = 0; i < len; i++) { - if (JSObject *obj = vec[i]) { - JS_SET_TRACING_INDEX(trc, name, i); - Mark(trc, obj, JSTRACE_OBJECT); - } - } -} - -/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */ -static inline void -MarkValueRaw(JSTracer *trc, const js::Value &v) -{ - if (v.isMarkable()) - return Mark(trc, v.asGCThing(), v.gcKind()); -} - -static inline void -MarkValue(JSTracer *trc, const js::Value &v, const char *name) -{ - JS_SET_TRACING_NAME(trc, name); - MarkValueRaw(trc, v); -} - -static inline void -MarkValueRange(JSTracer *trc, Value *beg, Value *end, const char *name) -{ - for (Value *vp = beg; vp < end; ++vp) { - JS_SET_TRACING_INDEX(trc, name, vp - beg); - MarkValueRaw(trc, *vp); - } -} - -static inline void -MarkValueRange(JSTracer *trc, size_t len, Value *vec, const char *name) -{ - MarkValueRange(trc, vec, vec + len, name); -} - -void -MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end); - -static inline void -MarkId(JSTracer *trc, jsid id) -{ - if (JSID_IS_STRING(id)) - Mark(trc, JSID_TO_STRING(id), JSTRACE_STRING); - else if (JS_UNLIKELY(JSID_IS_OBJECT(id))) - Mark(trc, JSID_TO_OBJECT(id), JSTRACE_OBJECT); -} - -static inline void -MarkId(JSTracer *trc, jsid id, const char *name) -{ - JS_SET_TRACING_NAME(trc, name); - MarkId(trc, id); -} - -static inline void -MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name) -{ - for (jsid *idp = beg; idp != end; ++idp) { - JS_SET_TRACING_INDEX(trc, name, (idp - beg)); - MarkId(trc, *idp); - } -} - -static inline void -MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name) -{ - MarkIdRange(trc, vec, vec + len, name); -} - -/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */ -void -MarkGCThing(JSTracer *trc, void *thing); - -static inline void -MarkGCThing(JSTracer *trc, void *thing, const char *name) -{ - JS_SET_TRACING_NAME(trc, name); - MarkGCThing(trc, thing); -} - -static inline void -MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index) -{ - JS_SET_TRACING_INDEX(trc, name, index); - MarkGCThing(trc, thing); -} - JSCompartment * NewCompartment(JSContext *cx, JSPrincipals *principals); } /* namespace js */ +} /* namespace gc */ #endif /* jsgc_h___ */
new file mode 100644 --- /dev/null +++ b/js/src/jsgcinlines.h @@ -0,0 +1,482 @@ +/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey code. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef jsgcinlines_h___ +#define jsgcinlines_h___ + +#include "jsgc.h" +#include "jscntxt.h" +#include "jscompartment.h" + +#include "jslock.h" +#include "jstl.h" + +#ifdef JS_GCMETER +# define METER(x) ((void) (x)) +# define METER_IF(condition, x) ((void) ((condition) && (x))) +#else +# define METER(x) ((void) 0) +# define METER_IF(condition, x) ((void) 0) +#endif + +/* + * Allocates a new GC thing. After a successful allocation the caller must + * fully initialize the thing before calling any function that can potentially + * trigger GC. This will ensure that GC tracing never sees junk values stored + * in the partially initialized thing. + */ + +template <typename T> +JS_ALWAYS_INLINE T * +NewFinalizableGCThing(JSContext *cx, unsigned thingKind) +{ + JS_ASSERT(thingKind < js::gc::FINALIZE_LIMIT); + + METER(cx->compartment->compartmentStats[thingKind].alloc++); + do { + js::gc::FreeCell *cell = cx->compartment->freeLists.getNext(thingKind); + if (cell) { + CheckGCFreeListLink(cell); + return (T *)cell; + } + if (!RefillFinalizableFreeList<T>(cx, thingKind)) + return NULL; + } while (true); +} + +#undef METER +#undef METER_IF + +inline JSObject * +js_NewGCObject(JSContext *cx) +{ + return NewFinalizableGCThing<JSObject>(cx, js::gc::FINALIZE_OBJECT); +} + +inline JSString * +js_NewGCString(JSContext *cx) +{ + return NewFinalizableGCThing<JSString>(cx, js::gc::FINALIZE_STRING); +} + +inline JSShortString * +js_NewGCShortString(JSContext *cx) +{ + return (JSShortString *) NewFinalizableGCThing<JSShortString>(cx, js::gc::FINALIZE_SHORT_STRING); +} + +inline JSString * +js_NewGCExternalString(JSContext *cx, uintN type) +{ + JS_ASSERT(type < js::gc::JS_EXTERNAL_STRING_LIMIT); + type += js::gc::FINALIZE_EXTERNAL_STRING0; + return NewFinalizableGCThing<JSString>(cx, type); +} + +inline JSFunction* +js_NewGCFunction(JSContext *cx) +{ + return NewFinalizableGCThing<JSFunction>(cx, js::gc::FINALIZE_FUNCTION); +} + +#if JS_HAS_XML_SUPPORT +inline JSXML * +js_NewGCXML(JSContext *cx) +{ + return NewFinalizableGCThing<JSXML>(cx, js::gc::FINALIZE_XML); +} +#endif + +namespace js { +namespace gc { + +template<typename T> +static JS_ALWAYS_INLINE void +Mark(JSTracer *trc, T *thing) +{ + JS_ASSERT(thing); + JS_ASSERT(JS_IS_VALID_TRACE_KIND(GetGCThingTraceKind(thing))); + JS_ASSERT(trc->debugPrinter || trc->debugPrintArg); + + if (!IS_GC_MARKING_TRACER(trc)) { + uint32 kind = GetGCThingTraceKind(thing); + trc->callback(trc, thing, kind); + goto out; + } + + TypedMarker(trc, thing); + + out: +#ifdef DEBUG + trc->debugPrinter = NULL; + trc->debugPrintArg = NULL; +#endif + return; /* to avoid out: right_curl when DEBUG is not defined */ +} + +static inline void +MarkString(JSTracer *trc, JSString *str) +{ + JS_ASSERT(str); + if (JSString::isStatic(str)) + return; + JS_ASSERT(GetArena<JSString>((Cell *)str)->assureThingIsAligned((JSString *)str)); + Mark(trc, str); +} + +static inline void +MarkString(JSTracer *trc, JSString *str, const char *name) +{ + JS_ASSERT(str); + JS_SET_TRACING_NAME(trc, name); + MarkString(trc, str); +} + +static inline void +MarkObject(JSTracer *trc, JSObject &obj, const char *name) +{ + JS_ASSERT(trc); + JS_ASSERT(&obj); + JS_SET_TRACING_NAME(trc, name); + JS_ASSERT(GetArena<JSObject>((Cell *)&obj)->assureThingIsAligned(&obj) || + GetArena<JSFunction>((Cell *)&obj)->assureThingIsAligned((JSFunction *)&obj)); + Mark(trc, &obj); +} + +static inline void +MarkChildren(JSTracer *trc, JSObject *obj) +{ + /* If obj has no map, it must be a newborn. */ + if (!obj->map) + return; + + /* Trace universal (ops-independent) members. */ + if (JSObject *proto = obj->getProto()) + MarkObject(trc, *proto, "proto"); + if (JSObject *parent = obj->getParent()) + MarkObject(trc, *parent, "parent"); + if (obj->emptyShape) + obj->emptyShape->trace(trc); + + /* Delegate to ops or the native marking op. */ + TraceOp op = obj->getOps()->trace; + (op ? op : js_TraceObject)(trc, obj); +} + +static inline void +MarkChildren(JSTracer *trc, JSFunction *fun) +{ + JSObject *obj = reinterpret_cast<JSObject *>(fun); + if (!obj->map) + return; + if (JSObject *proto = obj->getProto()) + MarkObject(trc, *proto, "proto"); + + if (JSObject *parent = obj->getParent()) + MarkObject(trc, *parent, "parent"); + TraceOp op = obj->getOps()->trace; + (op ? op : js_TraceObject)(trc, obj); +} + +static inline void +MarkChildren(JSTracer *trc, JSString *str) +{ + if (str->isDependent()) + MarkString(trc, str->dependentBase(), "base"); + else if (str->isRope()) { + if (str->isInteriorNode()) + MarkString(trc, str->interiorNodeParent(), "parent"); + MarkString(trc, str->ropeLeft(), "left child"); + MarkString(trc, str->ropeRight(), "right child"); + } +} + +#ifdef JS_HAS_XML_SUPPORT +static inline void +MarkChildren(JSTracer *trc, JSXML *xml) +{ + js_TraceXML(trc, xml); +} +#endif + +#if JS_STACK_GROWTH_DIRECTION > 0 +# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) < limit) +#else +# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) > limit) +#endif + +static inline bool +RecursionTooDeep(GCMarker *gcmarker) { +#ifdef JS_GC_ASSUME_LOW_C_STACK + return true; +#else + int stackDummy; + return !JS_CHECK_STACK_SIZE(gcmarker->stackLimit, &stackDummy); +#endif +} + +static JS_ALWAYS_INLINE void +TypedMarker(JSTracer *trc, JSXML *thing) +{ + if (!reinterpret_cast<Cell *>(thing)->markIfUnmarked(reinterpret_cast<GCMarker *>(trc)->getMarkColor())) + return; + GCMarker *gcmarker = static_cast<GCMarker *>(trc); + if (RecursionTooDeep(gcmarker)) { + gcmarker->delayMarkingChildren(thing); + } else { + MarkChildren(trc, thing); + } +} + +static JS_ALWAYS_INLINE void +TypedMarker(JSTracer *trc, JSObject *thing) +{ + JS_ASSERT(thing); + JS_ASSERT(JSTRACE_OBJECT == GetFinalizableTraceKind(thing->asCell()->arena()->header()->thingKind)); + + GCMarker *gcmarker = static_cast<GCMarker *>(trc); + if (!thing->markIfUnmarked(gcmarker->getMarkColor())) + return; + + if (RecursionTooDeep(gcmarker)) { + gcmarker->delayMarkingChildren(thing); + } else { + MarkChildren(trc, thing); + } +} + +static JS_ALWAYS_INLINE void +TypedMarker(JSTracer *trc, JSFunction *thing) +{ + JS_ASSERT(thing); + JS_ASSERT(JSTRACE_OBJECT == GetFinalizableTraceKind(thing->asCell()->arena()->header()->thingKind)); + + GCMarker *gcmarker = static_cast<GCMarker *>(trc); + if (!thing->markIfUnmarked(gcmarker->getMarkColor())) + return; + + if (RecursionTooDeep(gcmarker)) { + gcmarker->delayMarkingChildren(thing); + } else { + MarkChildren(trc, static_cast<JSObject *>(thing)); + } +} + +static JS_ALWAYS_INLINE void +TypedMarker(JSTracer *trc, JSShortString *thing) +{ + thing->asCell()->markIfUnmarked(); +} + +static JS_ALWAYS_INLINE void +TypedMarker(JSTracer *trc, JSString *thing) +{ + /* + * Iterate through all nodes and leaves in the rope if this is part of a + * rope; otherwise, we only iterate once: on the string itself. + */ + JSRopeNodeIterator iter(thing); + JSString *str = iter.init(); + do { + for (;;) { + if (JSString::isStatic(str)) + break; + JS_ASSERT(JSTRACE_STRING == GetFinalizableTraceKind(str->asCell()->arena()->header()->thingKind)); + if (!str->asCell()->markIfUnmarked()) + break; + if (!str->isDependent()) + break; + str = str->dependentBase(); + } + str = iter.next(); + } while (str); +} + +static inline void +MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name) +{ + for (uint32 i = 0; i < len; i++) { + if (JSAtom *atom = vec[i]) { + JS_SET_TRACING_INDEX(trc, name, i); + Mark(trc, ATOM_TO_STRING(atom)); + } + } +} + +static inline void +MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name) +{ + for (uint32 i = 0; i < len; i++) { + if (JSObject *obj = vec[i]) { + JS_SET_TRACING_INDEX(trc, name, i); + Mark(trc, obj); + } + } +} + +static inline void +MarkId(JSTracer *trc, jsid id) +{ + if (JSID_IS_STRING(id)) + Mark(trc, JSID_TO_STRING(id)); + else if (JS_UNLIKELY(JSID_IS_OBJECT(id))) + Mark(trc, JSID_TO_OBJECT(id)); +} + +static inline void +MarkId(JSTracer *trc, jsid id, const char *name) +{ + JS_SET_TRACING_NAME(trc, name); + MarkId(trc, id); +} + +static inline void +MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name) +{ + for (jsid *idp = beg; idp != end; ++idp) { + JS_SET_TRACING_INDEX(trc, name, (idp - beg)); + MarkId(trc, *idp); + } +} + +static inline void +MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name) +{ + MarkIdRange(trc, vec, vec + len, name); +} + +static inline void +MarkKind(JSTracer *trc, void *thing, uint32 kind) +{ + JS_ASSERT(thing); + JS_ASSERT(kind == GetGCThingTraceKind(thing)); + switch (kind) { + case JSTRACE_OBJECT: + Mark(trc, reinterpret_cast<JSObject *>(thing)); + break; + case JSTRACE_STRING: + if (JSString::isStatic((JSString *)thing)) + return; + Mark(trc, reinterpret_cast<JSString *>(thing)); + break; +#if JS_HAS_XML_SUPPORT + case JSTRACE_XML: + Mark(trc, reinterpret_cast<JSXML *>(thing)); + break; +#endif + default: + JS_ASSERT(false); + } +} + +/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */ +static inline void +MarkValueRaw(JSTracer *trc, const js::Value &v) +{ + if (v.isMarkable()) { + JS_ASSERT(v.toGCThing()); + return MarkKind(trc, v.toGCThing(), v.gcKind()); + } +} + +static inline void +MarkValue(JSTracer *trc, const js::Value &v, const char *name) +{ + JS_SET_TRACING_NAME(trc, name); + MarkValueRaw(trc, v); +} + +static inline void +MarkValueRange(JSTracer *trc, Value *beg, Value *end, const char *name) +{ + for (Value *vp = beg; vp < end; ++vp) { + JS_SET_TRACING_INDEX(trc, name, vp - beg); + MarkValueRaw(trc, *vp); + } +} + +static inline void +MarkValueRange(JSTracer *trc, size_t len, Value *vec, const char *name) +{ + MarkValueRange(trc, vec, vec + len, name); +} + +/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */ +static inline void +MarkGCThing(JSTracer *trc, void *thing, uint32 kind) +{ + if (!thing) + return; + + MarkKind(trc, thing, kind); +} + +static inline void +MarkGCThing(JSTracer *trc, void *thing) +{ + if (!thing) + return; + MarkKind(trc, thing, GetGCThingTraceKind(thing)); +} + +static inline void +MarkGCThing(JSTracer *trc, void *thing, const char *name) +{ + JS_SET_TRACING_NAME(trc, name); + MarkGCThing(trc, thing); +} + +static inline void +MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index) +{ + JS_SET_TRACING_INDEX(trc, name, index); + MarkGCThing(trc, thing); +} + +static inline void +Mark(JSTracer *trc, void *thing, uint32 kind, const char *name) +{ + JS_ASSERT(thing); + JS_SET_TRACING_NAME(trc, name); + MarkKind(trc, thing, kind); +} + +}} + +#endif /* jsgcinlines_h___ */
--- a/js/src/jsgcstats.cpp +++ b/js/src/jsgcstats.cpp @@ -37,18 +37,26 @@ * ***** END LICENSE BLOCK ***** */ #include "jstypes.h" #include "jscntxt.h" #include "jsgcstats.h" #include "jsgc.h" #include "jsxml.h" #include "jsbuiltins.h" +#include "jscompartment.h" using namespace js; +using namespace js::gc; + +#define UL(x) ((unsigned long)(x)) +#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y)) + +namespace js { +namespace gc { #if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER) void ConservativeGCStats::dump(FILE *fp) { size_t words = 0; for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i) @@ -63,16 +71,219 @@ ConservativeGCStats::dump(FILE *fp) fprintf(fp, " points to free arena: %lu\n", ULSTAT(counter[CGCT_FREEARENA])); fprintf(fp, " excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG])); fprintf(fp, " excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE])); fprintf(fp, " valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID])); #undef ULSTAT } #endif +#ifdef JS_GCMETER +void +UpdateCompartmentStats(JSCompartment *comp, unsigned thingKind, uint32 nlivearenas, + uint32 nkilledArenas, uint32 nthings) +{ + size_t narenas = 0; + JSGCArenaStats *compSt = &comp->compartmentStats[thingKind]; + JSGCArenaStats *globSt = &comp->rt->globalArenaStats[thingKind]; + narenas = nlivearenas + nkilledArenas; + JS_ASSERT(narenas >= compSt->livearenas); + + compSt->newarenas = narenas - compSt->livearenas; + compSt->narenas = narenas; + compSt->livearenas = nlivearenas; + if (compSt->maxarenas < narenas) + compSt->maxarenas = narenas; + compSt->totalarenas += narenas; + + compSt->nthings = nthings; + if (compSt->maxthings < nthings) + compSt->maxthings = nthings; + compSt->totalthings += nthings; + globSt->newarenas += compSt->newarenas; + globSt->narenas += narenas; + globSt->livearenas += compSt->livearenas; + globSt->totalarenas += compSt->totalarenas; + globSt->nthings += compSt->nthings; + globSt->totalthings += compSt->totalthings; + if (globSt->maxarenas < compSt->maxarenas) + globSt->maxarenas = compSt->maxarenas; + if (globSt->maxthings < compSt->maxthings) + globSt->maxthings = compSt->maxthings; +} + +static const char *const GC_ARENA_NAMES[] = { + "object", + "function", +#if JS_HAS_XML_SUPPORT + "xml", +#endif + "short string", + "string", + "external_string_0", + "external_string_1", + "external_string_2", + "external_string_3", + "external_string_4", + "external_string_5", + "external_string_6", + "external_string_7", +}; +JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GC_ARENA_NAMES) == FINALIZE_LIMIT); + +void GetSizeAndThingsPerArena(int thingKind, size_t &thingSize, size_t &thingsPerArena) +{ + switch (thingKind) { + case FINALIZE_OBJECT: + thingSize = sizeof(JSObject); + thingsPerArena = Arena<JSObject>::ThingsPerArena; + break; + case FINALIZE_STRING: + case FINALIZE_EXTERNAL_STRING0: + case FINALIZE_EXTERNAL_STRING1: + case FINALIZE_EXTERNAL_STRING2: + case FINALIZE_EXTERNAL_STRING3: + case FINALIZE_EXTERNAL_STRING4: + case FINALIZE_EXTERNAL_STRING5: + case FINALIZE_EXTERNAL_STRING6: + case FINALIZE_EXTERNAL_STRING7: + thingSize = sizeof(JSString); + thingsPerArena = Arena<JSString>::ThingsPerArena; + break; + case FINALIZE_SHORT_STRING: + thingSize = sizeof(JSShortString); + thingsPerArena = Arena<JSShortString>::ThingsPerArena; + break; + case FINALIZE_FUNCTION: + thingSize = sizeof(JSFunction); + thingsPerArena = Arena<JSFunction>::ThingsPerArena; + break; +#if JS_HAS_XML_SUPPORT + case FINALIZE_XML: + thingSize = sizeof(JSXML); + thingsPerArena = Arena<JSXML>::ThingsPerArena; + break; +#endif + default: + JS_ASSERT(false); + } +} + +void +DumpArenaStats(JSGCArenaStats *stp, FILE *fp) +{ + size_t sumArenas = 0, sumTotalArenas = 0, sumThings =0, sumMaxThings = 0; + size_t sumThingSize = 0, sumTotalThingSize = 0, sumArenaCapacity = 0; + size_t sumTotalArenaCapacity = 0, sumAlloc = 0, sumLocalAlloc = 0; + + for (int i = 0; i < (int) FINALIZE_LIMIT; i++) { + JSGCArenaStats *st = &stp[i]; + if (st->maxarenas == 0) + continue; + size_t thingSize = 0, thingsPerArena = 0; + GetSizeAndThingsPerArena(i, thingSize, thingsPerArena); + + fprintf(fp, "%s arenas (thing size %lu, %lu things per arena):\n", + GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena)); + fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas)); + fprintf(fp, " arenas after GC: %lu (%.1f%%)\n", + UL(st->livearenas), PERCENT(st->livearenas, st->narenas)); + fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas)); + fprintf(fp, " things: %lu\n", UL(st->nthings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(st->nthings, thingsPerArena * st->narenas)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(st->totalthings, thingsPerArena * st->totalarenas)); + fprintf(fp, " max things: %lu\n", UL(st->maxthings)); + fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc)); + fprintf(fp, " alloc without locks: %lu (%.1f%%)\n", + UL(st->localalloc), PERCENT(st->localalloc, st->alloc)); + sumArenas += st->narenas; + sumTotalArenas += st->totalarenas; + sumThings += st->nthings; + sumMaxThings += st->maxthings; + sumThingSize += thingSize * st->nthings; + sumTotalThingSize += size_t(thingSize * st->totalthings); + sumArenaCapacity += thingSize * thingsPerArena * st->narenas; + sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas; + sumAlloc += st->alloc; + sumLocalAlloc += st->localalloc; + putc('\n', fp); + } + + fputs("Never used arenas:\n", fp); + for (int i = 0; i < (int) FINALIZE_LIMIT; i++) { + JSGCArenaStats *st = &stp[i]; + if (st->maxarenas != 0) + continue; + fprintf(fp, "%s\n", GC_ARENA_NAMES[i]); + } + fprintf(fp, "\nTOTAL STATS:\n"); + fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas)); + fprintf(fp, " total GC things: %lu\n", UL(sumThings)); + fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(sumThingSize, sumArenaCapacity)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(sumTotalThingSize, sumTotalArenaCapacity)); + fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc)); + fprintf(fp, " alloc without locks: %lu (%.1f%%)\n", + UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc)); +} + +void +DumpCompartmentStats(JSCompartment *comp, FILE *fp) +{ + if (comp->rt->defaultCompartment == comp) + fprintf(fp, "\n**** DefaultCompartment Allocation Statistics: %p ****\n\n", (void *) comp); + else + fprintf(fp, "\n**** Compartment Allocation Statistics: %p ****\n\n", (void *) comp); + + DumpArenaStats(&comp->compartmentStats[0], fp); +} + +#endif + +} //gc +} //js + +#ifdef JS_GCMETER + +JS_FRIEND_API(void) +js_DumpGCStats(JSRuntime *rt, FILE *fp) +{ +#define ULSTAT(x) UL(rt->gcStats.x) + if (JS_WANT_GC_METER_PRINT) { + fprintf(fp, "\n**** Global Arena Allocation Statistics: ****\n"); + DumpArenaStats(&rt->globalArenaStats[0], fp); + fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes)); + fprintf(fp, " allocation failures: %lu\n", ULSTAT(fail)); + fprintf(fp, "allocation retries after GC: %lu\n", ULSTAT(retry)); + fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock)); + fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock)); + fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked)); +#ifdef DEBUG + fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked)); +#endif + fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke)); + fprintf(fp, " thing arenas freed so far: %lu\n\n", ULSTAT(afree)); + } + + if (JS_WANT_GC_PER_COMPARTMENT_PRINT) + for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) + DumpCompartmentStats(*c, fp); + PodZero(&rt->globalArenaStats); + if (JS_WANT_CONSERVATIVE_GC_PRINT) + rt->gcStats.conservative.dump(fp); +#undef ULSTAT +} +#endif + +namespace js { + #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS void GCMarker::dumpConservativeRoots() { if (!conservativeDumpFileName) return; FILE *fp; @@ -121,200 +332,48 @@ GCMarker::dumpConservativeRoots() } fputc('\n', fp); if (fp != stdout && fp != stderr) fclose(fp); } #endif /* JS_DUMP_CONSERVATIVE_GC_ROOTS */ -#ifdef JS_GCMETER - -void -UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas, - uint32 nthings) -{ - size_t narenas; - - narenas = nlivearenas + nkilledArenas; - JS_ASSERT(narenas >= st->livearenas); - - st->newarenas = narenas - st->livearenas; - st->narenas = narenas; - st->livearenas = nlivearenas; - if (st->maxarenas < narenas) - st->maxarenas = narenas; - st->totalarenas += narenas; - - st->nthings = nthings; - if (st->maxthings < nthings) - st->maxthings = nthings; - st->totalthings += nthings; -} - -JS_FRIEND_API(void) -js_DumpGCStats(JSRuntime *rt, FILE *fp) -{ - static const char *const GC_ARENA_NAMES[] = { - "object", - "function", -#if JS_HAS_XML_SUPPORT - "xml", -#endif - "short string", - "string", - "external_string_0", - "external_string_1", - "external_string_2", - "external_string_3", - "external_string_4", - "external_string_5", - "external_string_6", - "external_string_7", - }; - - fprintf(fp, "\nGC allocation statistics:\n\n"); - -#define UL(x) ((unsigned long)(x)) -#define ULSTAT(x) UL(rt->gcStats.x) -#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y)) - - size_t sumArenas = 0; - size_t sumTotalArenas = 0; - size_t sumThings = 0; - size_t sumMaxThings = 0; - size_t sumThingSize = 0; - size_t sumTotalThingSize = 0; - size_t sumArenaCapacity = 0; - size_t sumTotalArenaCapacity = 0; - size_t sumAlloc = 0; - size_t sumLocalAlloc = 0; - size_t sumFail = 0; - size_t sumRetry = 0; - for (int i = 0; i < (int) FINALIZE_LIMIT; i++) { - size_t thingSize, thingsPerArena; - JSGCArenaStats *st; - thingSize = rt->gcArenaList[i].thingSize; - thingsPerArena = ThingsPerArena(thingSize); - st = &rt->gcArenaStats[i]; - if (st->maxarenas == 0) - continue; - fprintf(fp, - "%s arenas (thing size %lu, %lu things per arena):", - GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena)); - putc('\n', fp); - fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas)); - fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n", - UL(st->newarenas), PERCENT(st->newarenas, st->narenas)); - fprintf(fp, " arenas after GC: %lu (%.1f%%)\n", - UL(st->livearenas), PERCENT(st->livearenas, st->narenas)); - fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas)); - fprintf(fp, " things: %lu\n", UL(st->nthings)); - fprintf(fp, " GC cell utilization: %.1f%%\n", - PERCENT(st->nthings, thingsPerArena * st->narenas)); - fprintf(fp, " average cell utilization: %.1f%%\n", - PERCENT(st->totalthings, thingsPerArena * st->totalarenas)); - fprintf(fp, " max things: %lu\n", UL(st->maxthings)); - fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc)); - fprintf(fp, " alloc without locks: %lu (%.1f%%)\n", - UL(st->localalloc), PERCENT(st->localalloc, st->alloc)); - sumArenas += st->narenas; - sumTotalArenas += st->totalarenas; - sumThings += st->nthings; - sumMaxThings += st->maxthings; - sumThingSize += thingSize * st->nthings; - sumTotalThingSize += size_t(thingSize * st->totalthings); - sumArenaCapacity += thingSize * thingsPerArena * st->narenas; - sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas; - sumAlloc += st->alloc; - sumLocalAlloc += st->localalloc; - sumFail += st->fail; - sumRetry += st->retry; - putc('\n', fp); - } - - fputs("Never used arenas:\n", fp); - for (int i = 0; i < (int) FINALIZE_LIMIT; i++) { - size_t thingSize, thingsPerArena; - JSGCArenaStats *st; - thingSize = rt->gcArenaList[i].thingSize; - thingsPerArena = ThingsPerArena(thingSize); - st = &rt->gcArenaStats[i]; - if (st->maxarenas != 0) - continue; - fprintf(fp, - "%s (thing size %lu, %lu things per arena)\n", - GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena)); - } - fprintf(fp, "\nTOTAL STATS:\n"); - fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes)); - fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas)); - fprintf(fp, " max allocated arenas: %lu\n", ULSTAT(maxnallarenas)); - fprintf(fp, " max allocated chunks: %lu\n", ULSTAT(maxnchunks)); - fprintf(fp, " total GC things: %lu\n", UL(sumThings)); - fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings)); - fprintf(fp, " GC cell utilization: %.1f%%\n", - PERCENT(sumThingSize, sumArenaCapacity)); - fprintf(fp, " average cell utilization: %.1f%%\n", - PERCENT(sumTotalThingSize, sumTotalArenaCapacity)); - fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry)); - fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc)); - fprintf(fp, " alloc without locks: %lu (%.1f%%)\n", - UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc)); - fprintf(fp, " allocation failures: %lu\n", UL(sumFail)); - fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock)); - fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock)); - fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked)); -#ifdef DEBUG - fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked)); -#endif - fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke)); - fprintf(fp, " thing arenas freed so far: %lu\n", ULSTAT(afree)); - rt->gcStats.conservative.dump(fp); - -#undef UL -#undef ULSTAT -#undef PERCENT -} -#endif - #ifdef MOZ_GCTIMER -namespace js { - jsrefcount newChunkCount = 0; jsrefcount destroyChunkCount = 0; GCTimer::GCTimer() { getFirstEnter(); memset(this, 0, sizeof(GCTimer)); enter = rdtsc(); } -uint64 +uint64 GCTimer::getFirstEnter() { static uint64 firstEnter = rdtsc(); return firstEnter; } -void +void GCTimer::finish(bool lastGC) { end = rdtsc(); if (startMark > 0) { if (JS_WANT_GC_SUITE_PRINT) { fprintf(stderr, "%f %f %f\n", (double)(end - enter) / 1e6, (double)(startSweep - startMark) / 1e6, (double)(sweepDestroyEnd - startSweep) / 1e6); } else { static FILE *gcFile; if (!gcFile) { - gcFile = fopen("gcTimer.dat", "w"); + gcFile = fopen("gcTimer.dat", "a"); fprintf(gcFile, " AppTime, Total, Mark, Sweep, FinObj,"); fprintf(gcFile, " FinStr, Destroy, newChunks, destoyChunks\n"); } JS_ASSERT(gcFile); fprintf(gcFile, "%12.1f, %6.1f, %6.1f, %6.1f, %6.1f, %6.1f, %7.1f, ", (double)(enter - getFirstEnter()) / 1e6, (double)(end - enter) / 1e6, @@ -332,44 +391,14 @@ GCTimer::finish(bool lastGC) { gcFile = NULL; } } } newChunkCount = 0; destroyChunkCount = 0; } -#ifdef JS_SCOPE_DEPTH_METER -void -DumpScopeDepthMeter(JSRuntime *rt) -{ - static FILE *fp; - if (!fp) - fp = fopen("/tmp/scopedepth.stats", "w"); - - if (fp) { - JS_DumpBasicStats(&rt->protoLookupDepthStats, "proto-lookup depth", fp); - JS_DumpBasicStats(&rt->scopeSearchDepthStats, "scope-search depth", fp); - JS_DumpBasicStats(&rt->hostenvScopeDepthStats, "hostenv scope depth", fp); - JS_DumpBasicStats(&rt->lexicalScopeDepthStats, "lexical scope depth", fp); - - putc('\n', fp); - fflush(fp); - } -} #endif -#ifdef JS_DUMP_LOOP_STATS -void -DumpLoopStats(JSRuntime *rt) -{ - static FILE *lsfp; - if (!lsfp) - lsfp = fopen("/tmp/loopstats", "w"); - if (lsfp) { - JS_DumpBasicStats(&rt->loopStats, "loops", lsfp); - fflush(lsfp); - } -} -#endif +} //js -} /* namespace js */ -#endif +#undef UL +#undef PERCENT
--- a/js/src/jsgcstats.h +++ b/js/src/jsgcstats.h @@ -41,142 +41,134 @@ #if !defined JS_DUMP_CONSERVATIVE_GC_ROOTS && defined DEBUG # define JS_DUMP_CONSERVATIVE_GC_ROOTS 1 #endif /* Define JS_GCMETER here if wanted */ #if defined JS_GCMETER const bool JS_WANT_GC_METER_PRINT = true; +const bool JS_WANT_GC_PER_COMPARTMENT_PRINT = true; +const bool JS_WANT_CONSERVATIVE_GC_PRINT = true; #elif defined DEBUG # define JS_GCMETER 1 const bool JS_WANT_GC_METER_PRINT = false; +const bool JS_WANT_GC_PER_COMPARTMENT_PRINT = false; +const bool JS_WANT_CONSERVATIVE_GC_PRINT = false; #endif -#define METER_UPDATE_MAX(maxLval, rval) \ - METER_IF((maxLval) < (rval), (maxLval) = (rval)) - namespace js { - +namespace gc { /* * The conservative GC test for a word shows that it is either a valid GC * thing or is not for one of the following reasons. */ enum ConservativeGCTest { CGCT_VALID, CGCT_LOWBITSET, /* excluded because one of the low bits was set */ CGCT_NOTARENA, /* not within arena range in a chunk */ CGCT_NOTCHUNK, /* not within a valid chunk */ CGCT_FREEARENA, /* within arena containing only free things */ CGCT_WRONGTAG, /* tagged pointer but wrong type */ CGCT_NOTLIVE, /* gcthing is not allocated */ CGCT_END }; struct ConservativeGCStats { - uint32 counter[CGCT_END]; /* ConservativeGCTest classification + uint32 counter[gc::CGCT_END]; /* ConservativeGCTest classification counters */ void add(const ConservativeGCStats &another) { for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i) counter[i] += another.counter[i]; } void dump(FILE *fp); }; -} /* namespace js */ - #ifdef JS_GCMETER - struct JSGCArenaStats { uint32 alloc; /* allocation attempts */ uint32 localalloc; /* allocations from local lists */ - uint32 retry; /* allocation retries after running the GC */ - uint32 fail; /* allocation failures */ uint32 nthings; /* live GC things */ uint32 maxthings; /* maximum of live GC cells */ double totalthings; /* live GC things the GC scanned so far */ uint32 narenas; /* number of arena in list before the GC */ uint32 newarenas; /* new arenas allocated before the last GC */ uint32 livearenas; /* number of live arenas after the last GC */ uint32 maxarenas; /* maximum of allocated arenas */ uint32 totalarenas; /* total number of arenas with live things that GC scanned so far */ }; +#endif + +#ifdef JS_GCMETER struct JSGCStats { uint32 lock; /* valid lock calls */ uint32 unlock; /* valid unlock calls */ uint32 unmarked; /* number of times marking of GC thing's children were delayed due to a low C stack */ + uint32 retry; /* allocation retries after running the GC */ + uint32 fail; /* allocation failures */ #ifdef DEBUG uint32 maxunmarked;/* maximum number of things with children to mark later */ #endif uint32 poke; /* number of potentially useful GC calls */ uint32 afree; /* thing arenas freed so far */ uint32 nallarenas; /* number of all allocated arenas */ uint32 maxnallarenas; /* maximum number of all allocated arenas */ uint32 nchunks; /* number of allocated chunks */ uint32 maxnchunks; /* maximum number of allocated chunks */ - js::ConservativeGCStats conservative; + ConservativeGCStats conservative; }; -extern JS_FRIEND_API(void) -js_DumpGCStats(JSRuntime *rt, FILE *fp); - extern void -UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas, - uint32 nthings); - +UpdateCompartmentStats(JSCompartment *comp, unsigned thingKind, uint32 nlivearenas, + uint32 nkilledArenas, uint32 nthings); #endif /* JS_GCMETER */ -namespace js { +} //gc #ifdef MOZ_GCTIMER +const bool JS_WANT_GC_SUITE_PRINT = false; //false for gnuplot output + extern jsrefcount newChunkCount; extern jsrefcount destroyChunkCount; -const bool JS_WANT_GC_SUITE_PRINT = false; //false for gnuplot output - struct GCTimer { uint64 enter; uint64 startMark; uint64 startSweep; uint64 sweepObjectEnd; uint64 sweepStringEnd; uint64 sweepDestroyEnd; uint64 end; GCTimer(); - static uint64 getFirstEnter(); + + uint64 getFirstEnter(); + void finish(bool lastGC); }; # define GCTIMER_PARAM , GCTimer &gcTimer # define GCTIMER_ARG , gcTimer # define TIMESTAMP(x) (gcTimer.x = rdtsc()) # define GCTIMER_BEGIN() GCTimer gcTimer # define GCTIMER_END(last) (gcTimer.finish(last)) #else # define GCTIMER_PARAM # define GCTIMER_ARG # define TIMESTAMP(x) ((void) 0) # define GCTIMER_BEGIN() ((void) 0) # define GCTIMER_END(last) ((void) 0) #endif -#ifdef JS_SCOPE_DEPTH_METER -extern void -DumpScopeDepthMeter(JSRuntime *rt); -#endif +} //js -#ifdef JS_DUMP_LOOP_STATS -extern void -DumpLoopStats(JSRuntime *rt); -#endif +extern JS_FRIEND_API(void) +js_DumpGCStats(JSRuntime *rt, FILE *fp); -} /* namepsace js */ - -#endif /* jsgcstats_h___ */ +#endif /* jsgcstats_h__ */
--- a/js/src/jshashtable.h +++ b/js/src/jshashtable.h @@ -317,16 +317,19 @@ class HashTable : AllocPolicy #ifdef DEBUG , entered(false), mutationCount(0) #endif {} bool init(uint32 length) { + /* Make sure that init isn't called twice. */ + JS_ASSERT(table == NULL); + /* * Correct for sMaxAlphaFrac such that the table will not resize * when adding 'length' entries. */ JS_ASSERT(length < (uint32(1) << 23)); uint32 capacity = (length * sInvMaxAlpha) >> 7; if (capacity < sMinSize)
--- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -89,16 +89,17 @@ #if JS_HAS_XML_SUPPORT #include "jsxml.h" #endif #include "jsautooplen.h" using namespace js; +using namespace js::gc; /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */ #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ #ifdef DEBUG jsbytecode *const JSStackFrame::sInvalidpc = (jsbytecode *)0xbeef; JSObject *const JSStackFrame::sInvalidScopeChain = (JSObject *)0xbeef; #endif @@ -6507,17 +6508,17 @@ END_CASE(JSOP_ARRAYPUSH) } /* switch (op) */ } /* for (;;) */ #endif /* !JS_THREADED_INTERP */ error: JS_ASSERT(cx->regs == ®s); #ifdef JS_TRACER if (regs.fp->hasImacropc() && cx->throwing) { - // Handle other exceptions as if they came from the imacro-calling pc. + // Handle exceptions as if they came from the imacro-calling pc. regs.pc = regs.fp->imacropc(); regs.fp->clearImacropc(); atoms = script->atomMap.vector; } #endif JS_ASSERT(size_t((regs.fp->hasImacropc() ? regs.fp->imacropc() : regs.pc) - script->code) < script->length);
--- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -76,16 +76,17 @@ #endif #include "jscntxtinlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsstrinlines.h" using namespace js; +using namespace js::gc; static void iterator_finalize(JSContext *cx, JSObject *obj); static void iterator_trace(JSTracer *trc, JSObject *obj); static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly); Class js_IteratorClass = { "Iterator", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator) |
--- a/js/src/jsnum.h +++ b/js/src/jsnum.h @@ -358,17 +358,18 @@ ValueToUint16(JSContext *cx, const js::V * type with positive sign and less than 2^32 in magnitude such the mathematical * difference of Result(3) and k is mathematically an integer multiple of 2^32. * 5. If Result(4) is greater than or equal to 2^31, return Result(4)- 2^32, * otherwise return Result(4). */ static inline int32 js_DoubleToECMAInt32(jsdouble d) { -#if defined(__i386__) || defined(__i386) +#if defined(__i386__) || defined(__i386) || defined(__x86_64__) || \ + defined(_M_IX86) || defined(_M_X64) jsdpun du, duh, two32; uint32 di_h, u_tmp, expon, shift_amount; int32 mask32; /* * Algorithm Outline * Step 1. If d is NaN, +/-Inf or |d|>=2^84 or |d|<1, then return 0 * All of this is implemented based on an exponent comparison.
--- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -98,16 +98,17 @@ #include "jsprobes.h" #include "jsatominlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" #include "jsautooplen.h" using namespace js; +using namespace js::gc; JS_FRIEND_DATA(const JSObjectMap) JSObjectMap::sharedNonNative(JSObjectMap::SHAPELESS); Class js_ObjectClass = { js_Object_str, JSCLASS_HAS_CACHED_PROTO(JSProto_Object), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -429,18 +430,17 @@ js_LeaveSharpObject(JSContext *cx, JSIdA *idap = NULL; } } } static intN gc_sharp_table_entry_marker(JSHashEntry *he, intN i, void *arg) { - JS_CALL_OBJECT_TRACER((JSTracer *)arg, (JSObject *)he->key, - "sharp table entry"); + MarkObject((JSTracer *)arg, *(JSObject *)he->key, "sharp table entry"); return JS_DHASH_NEXT; } void js_TraceSharpMap(JSTracer *trc, JSSharpObjectMap *map) { JS_ASSERT(map->depth > 0); JS_ASSERT(map->table); @@ -1384,17 +1384,17 @@ static JSBool obj_hasOwnProperty(JSContext *cx, uintN argc, Value *vp) { JSObject *obj = ComputeThisFromVp(cx, vp); return obj && js_HasOwnPropertyHelper(cx, obj->getOps()->lookupProperty, argc, vp); } JSBool -js_HasOwnPropertyHelper(JSContext *cx, JSLookupPropOp lookup, uintN argc, +js_HasOwnPropertyHelper(JSContext *cx, LookupPropOp lookup, uintN argc, Value *vp) { jsid id; if (!ValueToId(cx, argc != 0 ? vp[2] : UndefinedValue(), &id)) return JS_FALSE; JSObject *obj = ComputeThisFromVp(cx, vp); JSObject *obj2; @@ -1415,17 +1415,17 @@ js_HasOwnPropertyHelper(JSContext *cx, J obj2->dropProperty(cx, prop); } else { vp->setBoolean(false); } return JS_TRUE; } JSBool -js_HasOwnProperty(JSContext *cx, JSLookupPropOp lookup, JSObject *obj, jsid id, +js_HasOwnProperty(JSContext *cx, LookupPropOp lookup, JSObject *obj, jsid id, JSObject **objp, JSProperty **propp) { JSAutoResolveFlags rf(cx, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING); if (!(lookup ? lookup : js_LookupProperty)(cx, obj, id, objp, propp)) return false; if (!*propp) return true; @@ -1738,18 +1738,19 @@ js_GetOwnPropertyDescriptor(JSContext *c if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { doGet = false; if (attrs & JSPROP_GETTER) roots[0] = shape->getterValue(); if (attrs & JSPROP_SETTER) roots[1] = shape->setterValue(); } JS_UNLOCK_OBJ(cx, pobj); - } else if (!pobj->getAttributes(cx, id, &attrs)) { - return false; + } else { + if (!pobj->getAttributes(cx, id, &attrs)) + return false; } if (doGet && !obj->getProperty(cx, id, &roots[2])) return false; return js_NewPropertyDescriptorObject(cx, id, attrs, roots[0], /* getter */ @@ -1824,26 +1825,33 @@ obj_keys(JSContext *cx, uintN argc, Valu JSObject *aobj = js_NewArrayObject(cx, jsuint(vals.length()), vals.begin()); if (!aobj) return JS_FALSE; vp->setObject(*aobj); return JS_TRUE; } -static JSBool -HasProperty(JSContext* cx, JSObject* obj, jsid id, Value* vp, JSBool* answerp) -{ - if (!JS_HasPropertyById(cx, obj, id, answerp)) - return JS_FALSE; - if (!*answerp) { +static bool +HasProperty(JSContext* cx, JSObject* obj, jsid id, Value* vp, bool *foundp) +{ + if (!obj->hasProperty(cx, id, foundp, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING)) + return false; + if (!*foundp) { vp->setUndefined(); - return JS_TRUE; - } - return JS_GetPropertyById(cx, obj, id, Jsvalify(vp)); + return true; + } + + /* + * We must go through the method read barrier in case id is 'get' or 'set'. + * There is no obvious way to defer cloning a joined function object whose + * identity will be used by DefinePropertyOnObject, e.g., or reflected via + * js_GetOwnPropertyDescriptor, as the getter or setter callable object. + */ + return !!obj->getProperty(cx, id, vp); } PropDesc::PropDesc() : pd(UndefinedValue()), id(INT_TO_JSID(0)), value(UndefinedValue()), get(UndefinedValue()), set(UndefinedValue()), @@ -1871,75 +1879,71 @@ PropDesc::initialize(JSContext* cx, jsid JSObject* desc = &v.toObject(); /* Make a copy of the descriptor. We might need it later. */ pd = v; /* Start with the proper defaults. */ attrs = JSPROP_PERMANENT | JSPROP_READONLY; - JSBool hasProperty; + bool found; /* 8.10.5 step 3 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.enumerableAtom), &v, - &hasProperty)) { + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.enumerableAtom), &v, &found)) return false; - } - if (hasProperty) { + if (found) { hasEnumerable = JS_TRUE; if (js_ValueToBoolean(v)) attrs |= JSPROP_ENUMERATE; } /* 8.10.5 step 4 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.configurableAtom), &v, - &hasProperty)) { + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.configurableAtom), &v, &found)) return false; - } - if (hasProperty) { + if (found) { hasConfigurable = JS_TRUE; if (js_ValueToBoolean(v)) attrs &= ~JSPROP_PERMANENT; } /* 8.10.5 step 5 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.valueAtom), &v, &hasProperty)) + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.valueAtom), &v, &found)) return false; - if (hasProperty) { + if (found) { hasValue = true; value = v; } /* 8.10.6 step 6 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.writableAtom), &v, &hasProperty)) + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.writableAtom), &v, &found)) return false; - if (hasProperty) { + if (found) { hasWritable = JS_TRUE; if (js_ValueToBoolean(v)) attrs &= ~JSPROP_READONLY; } /* 8.10.7 step 7 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.getAtom), &v, &hasProperty)) + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.getAtom), &v, &found)) return false; - if (hasProperty) { + if (found) { if ((v.isPrimitive() || !js_IsCallable(v)) && !v.isUndefined()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_GET_SET_FIELD, js_getter_str); return false; } hasGet = true; get = v; attrs |= JSPROP_GETTER | JSPROP_SHARED; } /* 8.10.7 step 8 */ - if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.setAtom), &v, &hasProperty)) + if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.setAtom), &v, &found)) return false; - if (hasProperty) { + if (found) { if ((v.isPrimitive() || !js_IsCallable(v)) && !v.isUndefined()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_GET_SET_FIELD, js_setter_str); return false; } hasSet = true; set = v; attrs |= JSPROP_SETTER | JSPROP_SHARED;
--- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -50,16 +50,17 @@ */ #include "jsapi.h" #include "jshash.h" /* Added by JSIFY */ #include "jspubtd.h" #include "jsprvtd.h" #include "jslock.h" #include "jsvalue.h" #include "jsvector.h" +#include "jscell.h" namespace js { class JSProxyHandler; class AutoPropDescArrayRooter; namespace mjit { class Compiler; @@ -147,19 +148,16 @@ struct PropDesc { js::PropertyOp getter() const { return js::CastAsPropertyOp(getterObject()); } js::PropertyOp setter() const { return js::CastAsPropertyOp(setterObject()); } - static void traceDescriptorArray(JSTracer* trc, JSObject* obj); - static void finalizeDescriptorArray(JSContext* cx, JSObject* obj); - js::Value pd; jsid id; js::Value value, get, set; /* Property descriptor boolean fields. */ uint8 attrs; /* Bits indicating which values are set. */ @@ -273,17 +271,17 @@ struct JSFunction; * * Both these flag bits are initially zero; they may be set or queried using * the (is|set)(Delegate|System) inline methods. * * The dslots member is null or a pointer into a dynamically allocated vector * of Values for reserved and dynamic slots. If dslots is not null, dslots[-1] * records the number of available slots. */ -struct JSObject { +struct JSObject : js::gc::Cell { /* * TraceRecorder must be a friend because it generates code that * manipulates JSObjects, which requires peeking under any encapsulation. */ friend class js::TraceRecorder; /* * Private pointer to the last added property and methods to manipulate the @@ -974,16 +972,17 @@ struct JSObject { inline void init(js::Class *aclasp, JSObject *proto, JSObject *parent, JSContext *cx); inline void init(js::Class *aclasp, JSObject *proto, JSObject *parent, void *priv, JSContext *cx); inline void init(js::Class *aclasp, JSObject *proto, JSObject *parent, const js::Value &privateSlotValue, JSContext *cx); inline void finish(JSContext *cx); + JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thindKind); /* * Like init, but also initializes map. The catch: proto must be the result * of a call to js_InitClass(...clasp, ...). */ inline void initSharingEmptyShape(js::Class *clasp, JSObject *proto, JSObject *parent, @@ -1052,17 +1051,17 @@ struct JSObject { /* Remove the property named by id from this object. */ bool removeProperty(JSContext *cx, jsid id); /* Clear the scope, making it empty. */ void clear(JSContext *cx); JSBool lookupProperty(JSContext *cx, jsid id, JSObject **objp, JSProperty **propp) { - JSLookupPropOp op = getOps()->lookupProperty; + js::LookupPropOp op = getOps()->lookupProperty; return (op ? op : js_LookupProperty)(cx, this, id, objp, propp); } JSBool defineProperty(JSContext *cx, jsid id, const js::Value &value, js::PropertyOp getter = js::PropertyStub, js::PropertyOp setter = js::PropertyStub, uintN attrs = JSPROP_ENUMERATE) { js::DefinePropOp op = getOps()->defineProperty; @@ -1075,37 +1074,37 @@ struct JSObject { } JSBool setProperty(JSContext *cx, jsid id, js::Value *vp, JSBool strict) { js::StrictPropertyIdOp op = getOps()->setProperty; return (op ? op : js_SetProperty)(cx, this, id, vp, strict); } JSBool getAttributes(JSContext *cx, jsid id, uintN *attrsp) { - JSAttributesOp op = getOps()->getAttributes; + js::AttributesOp op = getOps()->getAttributes; return (op ? op : js_GetAttributes)(cx, this, id, attrsp); } JSBool setAttributes(JSContext *cx, jsid id, uintN *attrsp) { - JSAttributesOp op = getOps()->setAttributes; + js::AttributesOp op = getOps()->setAttributes; return (op ? op : js_SetAttributes)(cx, this, id, attrsp); } JSBool deleteProperty(JSContext *cx, jsid id, js::Value *rval, JSBool strict) { js::StrictPropertyIdOp op = getOps()->deleteProperty; return (op ? op : js_DeleteProperty)(cx, this, id, rval, strict); } JSBool enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp) { js::NewEnumerateOp op = getOps()->enumerate; return (op ? op : js_Enumerate)(cx, this, iterop, statep, idp); } JSType typeOf(JSContext *cx) { - JSTypeOfOp op = getOps()->typeOf; + js::TypeOfOp op = getOps()->typeOf; return (op ? op : js_TypeOf)(cx, this); } JSObject *wrappedObject(JSContext *cx) const; /* These four are time-optimized to avoid stub calls. */ JSObject *thisObject(JSContext *cx) { JSObjectOp op = getOps()->thisObject; @@ -1156,17 +1155,16 @@ struct JSObject { JS_FRIEND_API(bool) isWrapper() const; JS_FRIEND_API(JSObject *) unwrap(uintN *flagsp = NULL); inline void initArrayClass(); }; JS_STATIC_ASSERT(offsetof(JSObject, fslots) % sizeof(js::Value) == 0); -JS_STATIC_ASSERT(sizeof(JSObject) % JS_GCTHING_ALIGN == 0); #define JSSLOT_START(clasp) (((clasp)->flags & JSCLASS_HAS_PRIVATE) \ ? JSSLOT_PRIVATE + 1 \ : JSSLOT_PRIVATE) #define JSSLOT_FREE(clasp) (JSSLOT_START(clasp) \ + JSCLASS_RESERVED_SLOTS(clasp)) @@ -1332,21 +1330,21 @@ js_LeaveSharpObject(JSContext *cx, JSIdA /* * Mark objects stored in map if GC happens between js_EnterSharpObject * and js_LeaveSharpObject. GC calls this when map->depth > 0. */ extern void js_TraceSharpMap(JSTracer *trc, JSSharpObjectMap *map); extern JSBool -js_HasOwnPropertyHelper(JSContext *cx, JSLookupPropOp lookup, uintN argc, +js_HasOwnPropertyHelper(JSContext *cx, js::LookupPropOp lookup, uintN argc, js::Value *vp); extern JSBool -js_HasOwnProperty(JSContext *cx, JSLookupPropOp lookup, JSObject *obj, jsid id, +js_HasOwnProperty(JSContext *cx, js::LookupPropOp lookup, JSObject *obj, jsid id, JSObject **objp, JSProperty **propp); extern JSBool js_NewPropertyDescriptorObject(JSContext *cx, jsid id, uintN attrs, const js::Value &getter, const js::Value &setter, const js::Value &value, js::Value *vp); extern JSBool
--- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -55,16 +55,19 @@ /* Headers included for inline implementations used by this header. */ #include "jsbool.h" #include "jscntxt.h" #include "jsnum.h" #include "jsscopeinlines.h" #include "jsstr.h" +#include "jsgcinlines.h" +#include "jsprobes.h" + inline void JSObject::dropProperty(JSContext *cx, JSProperty *prop) { JS_ASSERT(prop); if (isNative()) JS_UNLOCK_OBJ(cx, this); } @@ -110,16 +113,36 @@ inline bool JSObject::unbrand(JSContext *cx) { JS_ASSERT(isNative()); if (!branded()) setGeneric(); return true; } +inline void +JSObject::finalize(JSContext *cx, unsigned thingKind) +{ + JS_ASSERT(thingKind == js::gc::FINALIZE_OBJECT || + thingKind == js::gc::FINALIZE_FUNCTION); + + /* Cope with stillborn objects that have no map. */ + if (!map) + return; + + /* Finalize obj first, in case it needs map and slots. */ + js::Class *clasp = getClass(); + if (clasp->finalize) + clasp->finalize(cx, this); + + js::Probes::finalizeObject(this); + + finish(cx); +} + /* * Property read barrier for deferred cloning of compiler-created function * objects optimized as typically non-escaping, ad-hoc methods in obj. */ inline bool JSObject::methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp) { JS_ASSERT(canHaveMethodBarrier());
--- a/js/src/json.cpp +++ b/js/src/json.cpp @@ -61,16 +61,17 @@ #include "jsvector.h" #include "json.h" #include "jsatominlines.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable:4351) #endif struct JSONParser { @@ -569,17 +570,20 @@ js_Stringify(JSContext *cx, Value *vp, J } // helper to determine whether a character could be part of a number static JSBool IsNumChar(jschar c) { return ((c <= '9' && c >= '0') || c == '.' || c == '-' || c == '+' || c == 'e' || c == 'E'); } -static JSBool HandleData(JSContext *cx, JSONParser *jp, JSONDataType type); +static JSBool HandleDataString(JSContext *cx, JSONParser *jp); +static JSBool HandleDataKeyString(JSContext *cx, JSONParser *jp); +static JSBool HandleDataNumber(JSContext *cx, JSONParser *jp); +static JSBool HandleDataKeyword(JSContext *cx, JSONParser *jp); static JSBool PopState(JSContext *cx, JSONParser *jp); static bool Walk(JSContext *cx, jsid id, JSObject *holder, const Value &reviver, Value *vp) { JS_CHECK_RECURSION(cx, return false); if (!holder->getProperty(cx, id, vp)) @@ -707,21 +711,21 @@ js_FinishJSONParse(JSContext *cx, JSONPa return true; JSBool early_ok = JS_TRUE; // Check for unprocessed primitives at the root. This doesn't happen for // strings because a closing quote triggers value processing. if ((jp->statep - jp->stateStack) == 1) { if (*jp->statep == JSON_PARSE_STATE_KEYWORD) { - early_ok = HandleData(cx, jp, JSON_DATA_KEYWORD); + early_ok = HandleDataKeyword(cx, jp); if (early_ok) PopState(cx, jp); } else if (*jp->statep == JSON_PARSE_STATE_NUMBER) { - early_ok = HandleData(cx, jp, JSON_DATA_NUMBER); + early_ok = HandleDataNumber(cx, jp); if (early_ok) PopState(cx, jp); } } // This internal API is infallible, in spite of its JSBool return type. js_RemoveRoot(cx->runtime, &jp->objectStack); @@ -943,39 +947,46 @@ HandleKeyword(JSContext *cx, JSONParser } else { return JSONParseError(jp, cx); } return PushPrimitive(cx, jp, keyword); } static JSBool -HandleData(JSContext *cx, JSONParser *jp, JSONDataType type) +HandleDataString(JSContext *cx, JSONParser *jp) { - JSBool ok; - - switch (type) { - case JSON_DATA_STRING: - ok = HandleString(cx, jp, jp->buffer.begin(), jp->buffer.length()); - break; + JSBool ok = HandleString(cx, jp, jp->buffer.begin(), jp->buffer.length()); + if (ok) + jp->buffer.clear(); + return ok; +} - case JSON_DATA_KEYSTRING: - ok = jp->objectKey.append(jp->buffer.begin(), jp->buffer.end()); - break; +static JSBool +HandleDataKeyString(JSContext *cx, JSONParser *jp) +{ + JSBool ok = jp->objectKey.append(jp->buffer.begin(), jp->buffer.end()); + if (ok) + jp->buffer.clear(); + return ok; +} - case JSON_DATA_NUMBER: - ok = HandleNumber(cx, jp, jp->buffer.begin(), jp->buffer.length()); - break; +static JSBool +HandleDataNumber(JSContext *cx, JSONParser *jp) +{ + JSBool ok = HandleNumber(cx, jp, jp->buffer.begin(), jp->buffer.length()); + if (ok) + jp->buffer.clear(); + return ok; +} - default: - JS_ASSERT(type == JSON_DATA_KEYWORD); - ok = HandleKeyword(cx, jp, jp->buffer.begin(), jp->buffer.length()); - break; - } - +static JSBool +HandleDataKeyword(JSContext *cx, JSONParser *jp) +{ + JSBool ok = HandleKeyword(cx, jp, jp->buffer.begin(), jp->buffer.length()); if (ok) jp->buffer.clear(); return ok; } JSBool js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len, DecodingMode decodingMode) @@ -1102,24 +1113,23 @@ js_ConsumeJSONText(JSContext *cx, JSONPa return JSONParseError(jp, cx); } break; case JSON_PARSE_STATE_STRING: if (c == '"') { if (!PopState(cx, jp)) return JS_FALSE; - JSONDataType jdt; if (*jp->statep == JSON_PARSE_STATE_OBJECT_IN_PAIR) { - jdt = JSON_DATA_KEYSTRING; + if (!HandleDataKeyString(cx, jp)) + return JS_FALSE; } else { - jdt = JSON_DATA_STRING; + if (!HandleDataString(cx, jp)) + return JS_FALSE; } - if (!HandleData(cx, jp, jdt)) - return JS_FALSE; } else if (c == '\\') { *jp->statep = JSON_PARSE_STATE_STRING_ESCAPE; } else if (c <= 0x1F) { // The JSON lexical grammer does not allow a JSONStringCharacter to be // any of the Unicode characters U+0000 thru U+001F (control characters). return JSONParseError(jp, cx); } else { if (!jp->buffer.append(c)) @@ -1179,31 +1189,31 @@ js_ConsumeJSONText(JSContext *cx, JSONPa if (!jp->buffer.append(c)) return JS_FALSE; } else { // this character isn't part of the keyword, process it again i--; if (!PopState(cx, jp)) return JS_FALSE; - if (!HandleData(cx, jp, JSON_DATA_KEYWORD)) + if (!HandleDataKeyword(cx, jp)) return JS_FALSE; } break; case JSON_PARSE_STATE_NUMBER: if (IsNumChar(c)) { if (!jp->buffer.append(c)) return JS_FALSE; } else { // this character isn't part of the number, process it again i--; if (!PopState(cx, jp)) return JS_FALSE; - if (!HandleData(cx, jp, JSON_DATA_NUMBER)) + if (!HandleDataNumber(cx, jp)) return JS_FALSE; } break; case JSON_PARSE_STATE_FINISHED: if (!JS_ISXMLSPACE(c)) { // extra input return JSONParseError(jp, cx);
--- a/js/src/json.h +++ b/js/src/json.h @@ -32,19 +32,17 @@ * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ #ifndef json_h___ #define json_h___ -/* - * JS JSON functions. - */ + #include "jsprvtd.h" #include "jspubtd.h" #include "jsvalue.h" #include "jsvector.h" #define JSON_MAX_DEPTH 2048 #define JSON_PARSER_BUFSIZE 1024 @@ -102,23 +100,16 @@ enum JSONParserState { /* Within numeric literal. */ JSON_PARSE_STATE_NUMBER, /* Handling keywords (only null/true/false pass validity post-check). */ JSON_PARSE_STATE_KEYWORD }; -enum JSONDataType { - JSON_DATA_STRING, - JSON_DATA_KEYSTRING, - JSON_DATA_NUMBER, - JSON_DATA_KEYWORD -}; - struct JSONParser; extern JSONParser * js_BeginJSONParse(JSContext *cx, js::Value *rootVal, bool suppressErrors = false); /* Aargh, Windows. */ #ifdef STRICT #undef STRICT
--- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -77,16 +77,17 @@ #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" #include "jscntxtinlines.h" #include "jsautooplen.h" using namespace js; +using namespace js::gc; /* * Index limit must stay within 32 bits. */ JS_STATIC_ASSERT(sizeof(uint32) * JS_BITS_PER_BYTE >= INDEX_LIMIT_LOG2 + 1); /* Verify JSOP_XXX_LENGTH constant definitions. */ #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
--- a/js/src/jsparse.cpp +++ b/js/src/jsparse.cpp @@ -94,16 +94,17 @@ #include "jsregexpinlines.h" // Grr, windows.h or something under it #defines CONST... #ifdef CONST #undef CONST #endif using namespace js; +using namespace js::gc; /* * Asserts to verify assumptions behind pn_ macros. */ #define pn_offsetof(m) offsetof(JSParseNode, m) JS_STATIC_ASSERT(pn_offsetof(pn_link) == pn_offsetof(dn_uses)); JS_STATIC_ASSERT(pn_offsetof(pn_u.name.atom) == pn_offsetof(pn_u.apair.atom)); @@ -309,17 +310,17 @@ JSFunctionBox::shouldUnbrand(uintN metho return false; } void Parser::trace(JSTracer *trc) { JSObjectBox *objbox = traceListHead; while (objbox) { - JS_CALL_OBJECT_TRACER(trc, objbox->object, "parser.object"); + MarkObject(trc, *objbox->object, "parser.object"); objbox = objbox->traceLink; } } static void UnlinkFunctionBoxes(JSParseNode *pn, JSTreeContext *tc); static void @@ -3595,16 +3596,17 @@ BindVarOrConst(JSContext *cx, BindData * if (!CheckStrictBinding(cx, tc, atom, pn)) return false; JSStmtInfo *stmt = js_LexicalLookup(tc, atom, NULL); if (stmt && stmt->type == STMT_WITH) { data->fresh = false; + pn->pn_dflags |= PND_DEOPTIMIZED; return true; } JSAtomListElement *ale = tc->decls.lookup(atom); JSOp op = data->op; if (stmt || ale) { JSDefinition *dn = ale ? ALE_DEFN(ale) : NULL; @@ -8497,88 +8499,89 @@ Parser::primaryExpr(TokenKind tt, JSBool pn->pn_dflags |= PND_BOUND; } } else if ((!afterDot #if JS_HAS_XML_SUPPORT || tokenStream.peekToken() == TOK_DBLCOLON #endif ) && !(tc->flags & TCF_DECL_DESTRUCTURING)) { JSStmtInfo *stmt = js_LexicalLookup(tc, pn->pn_atom, NULL); - if (!stmt || stmt->type != STMT_WITH) { - JSDefinition *dn; - - JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom); - if (ale) { - dn = ALE_DEFN(ale); + + JSDefinition *dn; + + JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom); + if (ale) { + dn = ALE_DEFN(ale); #if JS_HAS_BLOCK_SCOPE - /* - * Skip out-of-scope let bindings along an ALE list or hash - * chain. These can happen due to |let (x = x) x| block and - * expression bindings, where the x on the right of = comes - * from an outer scope. See bug 496532. - */ - while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) { - do { - ale = ALE_NEXT(ale); - } while (ale && ALE_ATOM(ale) != pn->pn_atom); - if (!ale) - break; - dn = ALE_DEFN(ale); - } + /* + * Skip out-of-scope let bindings along an ALE list or hash + * chain. These can happen due to |let (x = x) x| block and + * expression bindings, where the x on the right of = comes + * from an outer scope. See bug 496532. + */ + while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) { + do { + ale = ALE_NEXT(ale); + } while (ale && ALE_ATOM(ale) != pn->pn_atom); + if (!ale) + break; + dn = ALE_DEFN(ale); + } #endif - } - + } + + if (ale) { + dn = ALE_DEFN(ale); + } else { + ale = tc->lexdeps.lookup(pn->pn_atom); if (ale) { dn = ALE_DEFN(ale); } else { - ale = tc->lexdeps.lookup(pn->pn_atom); - if (ale) { - dn = ALE_DEFN(ale); - } else { - /* - * No definition before this use in any lexical scope. - * Add a mapping in tc->lexdeps from pn->pn_atom to a - * new node for the forward-referenced definition. This - * placeholder definition node will be adopted when we - * parse the real defining declaration form, or left as - * a free variable definition if we never see the real - * definition. - */ - ale = MakePlaceholder(pn, tc); - if (!ale) - return NULL; - dn = ALE_DEFN(ale); - - /* - * In case this is a forward reference to a function, - * we pessimistically set PND_FUNARG if the next token - * is not a left parenthesis. - * - * If the definition eventually parsed into dn is not a - * function, this flag won't hurt, and if we do parse a - * function with pn's name, then the PND_FUNARG flag is - * necessary for safe context->display-based optimiza- - * tion of the closure's static link. - */ - JS_ASSERT(PN_TYPE(dn) == TOK_NAME); - JS_ASSERT(dn->pn_op == JSOP_NOP); - if (tokenStream.peekToken() != TOK_LP) - dn->pn_dflags |= PND_FUNARG; - } + /* + * No definition before this use in any lexical scope. + * Add a mapping in tc->lexdeps from pn->pn_atom to a + * new node for the forward-referenced definition. This + * placeholder definition node will be adopted when we + * parse the real defining declaration form, or left as + * a free variable definition if we never see the real + * definition. + */ + ale = MakePlaceholder(pn, tc); + if (!ale) + return NULL; + dn = ALE_DEFN(ale); + + /* + * In case this is a forward reference to a function, + * we pessimistically set PND_FUNARG if the next token + * is not a left parenthesis. + * + * If the definition eventually parsed into dn is not a + * function, this flag won't hurt, and if we do parse a + * function with pn's name, then the PND_FUNARG flag is + * necessary for safe context->display-based optimiza- + * tion of the closure's static link. + */ + JS_ASSERT(PN_TYPE(dn) == TOK_NAME); + JS_ASSERT(dn->pn_op == JSOP_NOP); + if (tokenStream.peekToken() != TOK_LP) + dn->pn_dflags |= PND_FUNARG; } - - JS_ASSERT(dn->pn_defn); - LinkUseToDef(pn, dn, tc); - - /* Here we handle the backward function reference case. */ - if (tokenStream.peekToken() != TOK_LP) - dn->pn_dflags |= PND_FUNARG; - - pn->pn_dflags |= (dn->pn_dflags & PND_FUNARG); - } + } + + JS_ASSERT(dn->pn_defn); + LinkUseToDef(pn, dn, tc); + + /* Here we handle the backward function reference case. */ + if (tokenStream.peekToken() != TOK_LP) + dn->pn_dflags |= PND_FUNARG; + + pn->pn_dflags |= (dn->pn_dflags & PND_FUNARG); + if (stmt && stmt->type == STMT_WITH) + pn->pn_dflags |= PND_DEOPTIMIZED; } #if JS_HAS_XML_SUPPORT if (tokenStream.matchToken(TOK_DBLCOLON)) { if (afterDot) { JSString *str; /*
--- a/js/src/jsprobes.cpp +++ b/js/src/jsprobes.cpp @@ -114,17 +114,17 @@ jsprobes_jsvaltovoid(JSContext *cx, cons if (argval.isNumber()) { if (argval.isInt32()) return (void *)argval.toInt32(); // FIXME Now what? //return (void *)argval.toDouble(); } - return argval.asGCThing(); + return argval.toGCThing(); } #endif const char * Probes::FunctionName(JSContext *cx, const JSFunction *fun) { if (!fun) return nullName;
--- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -46,16 +46,17 @@ #include "jsnum.h" #include "jsobj.h" #include "jsproxy.h" #include "jsscope.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; namespace js { static inline const Value & GetCall(JSObject *proxy) { JS_ASSERT(proxy->isFunctionProxy()); return proxy->getSlot(JSSLOT_PROXY_CALL); } @@ -215,17 +216,17 @@ bool JSProxyHandler::iterate(JSContext *cx, JSObject *proxy, uintN flags, Value *vp) { JS_ASSERT(OperationInProgress(cx, proxy)); AutoIdVector props(cx); if (!enumerate(cx, proxy, props)) return false; return EnumeratedIdVectorToIterator(cx, proxy, flags, props, vp); } - + JSString * JSProxyHandler::obj_toString(JSContext *cx, JSObject *proxy) { JS_ASSERT(proxy->isProxy()); return JS_NewStringCopyZ(cx, proxy->isFunctionProxy() ? "[object Function]" : "[object Object]"); @@ -318,17 +319,17 @@ static bool DerivedTrap(JSContext *cx, JSObject *handler, JSAtom *atom, Value *fvalp) { JS_ASSERT(atom == ATOM(has) || atom == ATOM(hasOwn) || atom == ATOM(get) || atom == ATOM(set) || atom == ATOM(enumerateOwn) || atom == ATOM(iterate)); - + return GetTrap(cx, handler, atom, fvalp); } static bool Trap(JSContext *cx, JSObject *handler, Value fval, uintN argc, Value* argv, Value *rval) { JS_CHECK_RECURSION(cx, return false); @@ -1093,18 +1094,18 @@ NonNullObject(JSContext *cx, const Value static JSBool proxy_create(JSContext *cx, uintN argc, Value *vp) { if (argc < 1) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED, "create", "0", "s"); return false; } - JSObject *handler; - if (!(handler = NonNullObject(cx, vp[2]))) + JSObject *handler = NonNullObject(cx, vp[2]); + if (!handler) return false; JSObject *proto, *parent = NULL; if (argc > 1 && vp[3].isObject()) { proto = &vp[3].toObject(); parent = proto->getParent(); } else { JS_ASSERT(IsFunctionObject(vp[0])); proto = NULL; @@ -1123,18 +1124,18 @@ proxy_create(JSContext *cx, uintN argc, static JSBool proxy_createFunction(JSContext *cx, uintN argc, Value *vp) { if (argc < 2) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED, "createFunction", "1", ""); return false; } - JSObject *handler; - if (!(handler = NonNullObject(cx, vp[2]))) + JSObject *handler = NonNullObject(cx, vp[2]); + if (!handler) return false; JSObject *proto, *parent; parent = vp[0].toObject().getParent(); if (!js_GetClassPrototype(cx, parent, JSProto_Function, &proto)) return false; parent = proto->getParent(); JSObject *call = js_ValueToCallableObject(cx, &vp[3], JSV2F_SEARCH_STACK); @@ -1162,33 +1163,33 @@ proxy_createFunction(JSContext *cx, uint static JSBool proxy_isTrapping(JSContext *cx, uintN argc, Value *vp) { if (argc < 1) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED, "isTrapping", "0", "s"); return false; } - JSObject *obj; - if (!(obj = NonNullObject(cx, vp[2]))) + JSObject *obj = NonNullObject(cx, vp[2]); + if (!obj) return false; vp->setBoolean(obj->isProxy()); return true; } static JSBool proxy_fix(JSContext *cx, uintN argc, Value *vp) { if (argc < 1) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED, "fix", "0", "s"); return false; } - JSObject *obj; - if (!(obj = NonNullObject(cx, vp[2]))) + JSObject *obj = NonNullObject(cx, vp[2]); + if (!obj) return false; if (obj->isProxy()) { JSBool flag; if (!FixProxy(cx, obj, &flag)) return false; vp->setBoolean(flag); } else { vp->setBoolean(true); @@ -1308,18 +1309,18 @@ FixProxy(JSContext *cx, JSObject *proxy, return true; } if (OperationInProgress(cx, proxy)) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_PROXY_FIX); return false; } - JSObject *props; - if (!(props = NonNullObject(cx, tvr.value()))) + JSObject *props = NonNullObject(cx, tvr.value()); + if (!props) return false; JSObject *proto = proxy->getProto(); JSObject *parent = proxy->getParent(); Class *clasp = proxy->isFunctionProxy() ? &CallableObjectClass : &js_ObjectClass; /* Make a blank object from the recipe fix provided to us. */ JSObject *newborn = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent); @@ -1333,17 +1334,17 @@ FixProxy(JSContext *cx, JSObject *proxy, } { AutoPendingProxyOperation pending(cx, proxy); if (!js_PopulateObject(cx, newborn, props)) return false; } - /* Trade spaces between the newborn object and the proxy. */ + /* Trade contents between the newborn object and the proxy. */ proxy->swap(newborn); /* The GC will dispose of the proxy object. */ *bp = true; return true; }
--- a/js/src/jsregexp.cpp +++ b/js/src/jsregexp.cpp @@ -60,16 +60,17 @@ #ifdef JS_TRACER #include "jstracer.h" using namespace avmplus; using namespace nanojit; #endif using namespace js; +using namespace js::gc; /* * RegExpStatics allocates memory -- in order to keep the statics stored * per-global and not leak, we create a js::Class to wrap the C++ instance and * provide an appropriate finalizer. We store an instance of that js::Class in * a global reserved slot. */ @@ -517,17 +518,17 @@ js_XDRRegExpObject(JSXDRState *xdr, JSOb #endif /* !JS_HAS_XDR */ static void regexp_trace(JSTracer *trc, JSObject *obj) { RegExp *re = RegExp::extractFrom(obj); if (re && re->getSource()) - JS_CALL_STRING_TRACER(trc, re->getSource(), "source"); + MarkString(trc, re->getSource(), "source"); } static JSBool regexp_enumerate(JSContext *cx, JSObject *obj) { JS_ASSERT(obj->isRegExp()); jsval v;
--- a/js/src/jsscope.cpp +++ b/js/src/jsscope.cpp @@ -62,16 +62,17 @@ #include "jsscope.h" #include "jsstr.h" #include "jstracer.h" #include "jsobjinlines.h" #include "jsscopeinlines.h" using namespace js; +using namespace js::gc; uint32 js_GenerateShape(JSContext *cx, bool gcLocked) { JSRuntime *rt; uint32 shape; rt = cx->runtime; @@ -1356,21 +1357,21 @@ Shape::trace(JSTracer *trc) const if (IS_GC_MARKING_TRACER(trc)) mark(); MarkId(trc, id, "id"); if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { if ((attrs & JSPROP_GETTER) && rawGetter) { JS_SET_TRACING_DETAILS(trc, PrintPropertyGetterOrSetter, this, 0); - Mark(trc, getterObject(), JSTRACE_OBJECT); + Mark(trc, getterObject()); } if ((attrs & JSPROP_SETTER) && rawSetter) { JS_SET_TRACING_DETAILS(trc, PrintPropertyGetterOrSetter, this, 1); - Mark(trc, setterObject(), JSTRACE_OBJECT); + Mark(trc, setterObject()); } } if (isMethod()) { JS_SET_TRACING_DETAILS(trc, PrintPropertyMethod, this, 0); - Mark(trc, &methodObject(), JSTRACE_OBJECT); + Mark(trc, &methodObject()); } }
--- a/js/src/jsscope.h +++ b/js/src/jsscope.h @@ -286,16 +286,17 @@ CastAsPropertyOp(js::Class *clasp) return JS_DATA_TO_FUNC_PTR(PropertyOp, clasp); } struct Shape : public JSObjectMap { friend struct ::JSObject; friend struct ::JSFunction; friend class js::PropertyTree; + friend bool HasUnreachableGCThings(TreeFragment *f); protected: mutable js::PropertyTable *table; public: inline void freeTable(JSContext *cx); static bool initRuntimeState(JSContext *cx);
--- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -66,16 +66,17 @@ #endif #include "methodjit/MethodJIT.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" using namespace js; +using namespace js::gc; static const jsbytecode emptyScriptCode[] = {JSOP_STOP, SRC_NULL}; /* static */ const JSScript JSScript::emptyScriptConst = { JS_INIT_STATIC_CLIST(NULL), const_cast<jsbytecode*>(emptyScriptCode), 1, JSVERSION_DEFAULT, 0, 0, 0, 0, 0, 0, 0, true, false, false, false, false, false, true, @@ -1281,41 +1282,41 @@ js_TraceScript(JSTracer *trc, JSScript * if (script->objectsOffset != 0) { JSObjectArray *objarray = script->objects(); uintN i = objarray->length; do { --i; if (objarray->vector[i]) { JS_SET_TRACING_INDEX(trc, "objects", i); - Mark(trc, objarray->vector[i], JSTRACE_OBJECT); + Mark(trc, objarray->vector[i]); } } while (i != 0); } if (script->regexpsOffset != 0) { JSObjectArray *objarray = script->regexps(); uintN i = objarray->length; do { --i; if (objarray->vector[i]) { JS_SET_TRACING_INDEX(trc, "regexps", i); - Mark(trc, objarray->vector[i], JSTRACE_OBJECT); + Mark(trc, objarray->vector[i]); } } while (i != 0); } if (script->constOffset != 0) { JSConstArray *constarray = script->consts(); MarkValueRange(trc, constarray->length, constarray->vector, "consts"); } if (script->u.object) { JS_SET_TRACING_NAME(trc, "object"); - Mark(trc, script->u.object, JSTRACE_OBJECT); + Mark(trc, script->u.object); } if (IS_GC_MARKING_TRACER(trc) && script->filename) js_MarkScriptFilename(script->filename); } JSBool js_NewScriptObject(JSContext *cx, JSScript *script)
--- a/js/src/jsstr.cpp +++ b/js/src/jsstr.cpp @@ -78,20 +78,26 @@ #include "jscntxtinlines.h" #include "jsobjinlines.h" #include "jsstrinlines.h" #include "jsregexpinlines.h" #include "jscntxtinlines.h" using namespace js; +using namespace js::gc; JS_STATIC_ASSERT(size_t(JSString::MAX_LENGTH) <= size_t(JSVAL_INT_MAX)); JS_STATIC_ASSERT(JSString::MAX_LENGTH <= JSVAL_INT_MAX); +JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); +JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = { + NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL +}; + const jschar * js_GetStringChars(JSContext *cx, JSString *str) { if (!js_MakeStringImmutable(cx, str)) return NULL; return str->flatChars(); } @@ -3115,17 +3121,17 @@ static JSFunctionSpec string_methods[] = { {(c), 0x00} } } #ifdef __SUNPRO_CC #pragma pack(8) #else #pragma pack(push, 8) #endif -JSString JSString::unitStringTable[] +const JSString JSString::unitStringTable[] #ifdef __GNUC__ __attribute__ ((aligned (8))) #endif = { R8(0) }; #ifdef __SUNPRO_CC #pragma pack(0) #else @@ -3141,30 +3147,30 @@ JSString JSString::unitStringTable[] */ #define TO_SMALL_CHAR(c) ((c) >= '0' && (c) <= '9' ? (c) - '0' : \ (c) >= 'a' && (c) <= 'z' ? (c) - 'a' + 10 : \ (c) >= 'A' && (c) <= 'Z' ? (c) - 'A' + 36 : \ JSString::INVALID_SMALL_CHAR) #define R TO_SMALL_CHAR -JSString::SmallChar JSString::toSmallChar[] = { R7(0) }; +const JSString::SmallChar JSString::toSmallChar[] = { R7(0) }; #undef R /* * This is used when we generate our table of short strings, so the compiler is * happier if we use |c| as few times as possible. */ #define FROM_SMALL_CHAR(c) ((c) + ((c) < 10 ? '0' : \ (c) < 36 ? 'a' - 10 : \ 'A' - 36)) #define R FROM_SMALL_CHAR -jschar JSString::fromSmallChar[] = { R6(0) }; +const jschar JSString::fromSmallChar[] = { R6(0) }; #undef R /* * For code-generation ease, length-2 strings are encoded as 12-bit int values, * where the upper 6 bits is the first character and the lower 6 bits is the * second character. */ @@ -3175,17 +3181,17 @@ jschar JSString::fromSmallChar[] = { R6( { {FROM_SMALL_CHAR((c) >> 6), FROM_SMALL_CHAR((c) & 0x3F), 0x00} } } #ifdef __SUNPRO_CC #pragma pack(8) #else #pragma pack(push, 8) #endif -JSString JSString::length2StringTable[] +const JSString JSString::length2StringTable[] #ifdef __GNUC__ __attribute__ ((aligned (8))) #endif = { R12(0) }; #ifdef __SUNPRO_CC #pragma pack(0) #else @@ -3217,17 +3223,17 @@ const char JSString::deflatedLength2Stri JS_STATIC_ASSERT(100 + (1 << 7) + (1 << 4) + (1 << 3) + (1 << 2) == 256); #ifdef __SUNPRO_CC #pragma pack(8) #else #pragma pack(push, 8) #endif -JSString JSString::hundredStringTable[] +const JSString JSString::hundredStringTable[] #ifdef __GNUC__ __attribute__ ((aligned (8))) #endif = { R7(100), /* 100 through 227 */ R4(100 + (1 << 7)), /* 228 through 243 */ R3(100 + (1 << 7) + (1 << 4)), /* 244 through 251 */ R2(100 + (1 << 7) + (1 << 4) + (1 << 3)) /* 252 through 255 */ }; @@ -3235,17 +3241,17 @@ JSString JSString::hundredStringTable[] #undef R #define R(c) ((c) < 10 ? JSString::unitStringTable + ((c) + '0') : \ (c) < 100 ? JSString::length2StringTable + \ ((size_t)TO_SMALL_CHAR(((c) / 10) + '0') << 6) + \ TO_SMALL_CHAR(((c) % 10) + '0') : \ JSString::hundredStringTable + ((c) - 100)) -JSString *JSString::intStringTable[] = { R8(0) }; +const JSString *const JSString::intStringTable[] = { R8(0) }; #undef R #ifdef __SUNPRO_CC #pragma pack(0) #else #pragma pack(pop) #endif @@ -3480,17 +3486,17 @@ js_NewDependentString(JSContext *cx, JSS return cx->runtime->emptyString; if (start == 0 && length == base->length()) return base; jschar *chars = base->chars() + start; if (length == 1 && *chars < UNIT_STRING_LIMIT) - return &JSString::unitStringTable[*chars]; + return const_cast<JSString *>(&JSString::unitStringTable[*chars]); /* Try to avoid long chains of dependent strings. */ while (base->isDependent()) base = base->dependentBase(); JS_ASSERT(base->isFlat()); ds = js_NewGCString(cx); @@ -4207,17 +4213,17 @@ DeflatedStringCache::sweep(JSContext *cx /* * We must take a lock even during the GC as JS_GetStringBytes() can be * called outside the request. */ JS_ACQUIRE_LOCK(lock); for (Map::Enum e(map); !e.empty(); e.popFront()) { JSString *str = e.front().key; - if (js_IsAboutToBeFinalized(str)) { + if (IsAboutToBeFinalized(str)) { char *bytes = e.front().value; e.removeFront(); /* * We cannot use cx->free here as bytes may come from the * embedding that calls JS_NewString(cx, bytes, length). Those * bytes may not be allocated via js_malloc and may not have * space for the background free list. @@ -4346,17 +4352,17 @@ js_GetStringBytes(JSContext *cx, JSStrin */ return JSString::deflatedIntStringTable + ((str - JSString::hundredStringTable) * 4); } if (cx) { rt = cx->runtime; } else { /* JS_GetStringBytes calls us with null cx. */ - rt = js_GetGCThingRuntime(str); + rt = GetGCThingRuntime(str); } return rt->deflatedStringCache->getBytes(cx, str); } /* * From java.lang.Character.java: *
--- a/js/src/jsstr.h +++ b/js/src/jsstr.h @@ -50,28 +50,31 @@ */ #include <ctype.h> #include "jsapi.h" #include "jsprvtd.h" #include "jshashtable.h" #include "jslock.h" #include "jsobj.h" #include "jsvalue.h" +#include "jscell.h" #define JSSTRING_BIT(n) ((size_t)1 << (n)) #define JSSTRING_BITMASK(n) (JSSTRING_BIT(n) - 1) enum { UNIT_STRING_LIMIT = 256U, SMALL_CHAR_LIMIT = 128U, /* Bigger chars cannot be in a length-2 string. */ NUM_SMALL_CHARS = 64U, INT_STRING_LIMIT = 256U, NUM_HUNDRED_STRINGS = 156U }; +extern JSStringFinalizeOp str_finalizers[8]; + extern jschar * js_GetDependentStringChars(JSString *str); extern JSString * JS_FASTCALL js_ConcatStrings(JSContext *cx, JSString *left, JSString *right); extern JSString * JS_FASTCALL js_ConcatStringsZ(JSContext *cx, const char *left, JSString *right); @@ -123,17 +126,17 @@ namespace js { namespace mjit { * NB: Always use the length() and chars() accessor methods. */ struct JSString { friend class js::TraceRecorder; friend class js::mjit::Compiler; friend JSAtom * js_AtomizeString(JSContext *cx, JSString *str, uintN flags); - + public: /* * Not private because we want to be able to use static * initializers for them. Don't use these directly! */ size_t mLengthAndFlags; /* in all strings */ union { jschar *mChars; /* in flat and dependent strings */ JSString *mLeft; /* in rope interior and top nodes */ @@ -191,17 +194,24 @@ struct JSString { static const size_t TYPE_MASK = JSSTRING_BITMASK(2); static const size_t TYPE_FLAGS_MASK = JSSTRING_BITMASK(4); inline bool hasFlag(size_t flag) const { return (mLengthAndFlags & flag) != 0; } - public: + inline js::gc::Cell *asCell() { + return reinterpret_cast<js::gc::Cell *>(this); + } + + inline js::gc::FreeCell *asFreeCell() { + return reinterpret_cast<js::gc::FreeCell *>(this); + } + /* * Generous but sane length bound; the "-1" is there for comptibility with * OOM tests. */ static const size_t MAX_LENGTH = (1 << 28) - 1; inline size_t type() const { return mLengthAndFlags & TYPE_MASK; @@ -261,24 +271,26 @@ struct JSString { JS_ALWAYS_INLINE jschar *inlineStorage() { JS_ASSERT(isFlat()); return mInlineStorage; } /* Specific flat string initializer and accessor methods. */ JS_ALWAYS_INLINE void initFlat(jschar *chars, size_t length) { JS_ASSERT(length <= MAX_LENGTH); + JS_ASSERT(!isStatic(this)); e.mBase = NULL; e.mCapacity = 0; mLengthAndFlags = (length << FLAGS_LENGTH_SHIFT) | FLAT; mChars = chars; } JS_ALWAYS_INLINE void initFlatMutable(jschar *chars, size_t length, size_t cap) { JS_ASSERT(length <= MAX_LENGTH); + JS_ASSERT(!isStatic(this)); e.mBase = NULL; e.mCapacity = cap; mLengthAndFlags = (length << FLAGS_LENGTH_SHIFT) | FLAT | MUTABLE; mChars = chars; } JS_ALWAYS_INLINE jschar *flatChars() const { JS_ASSERT(isFlat()); @@ -319,36 +331,44 @@ struct JSString { * On the other hand, if the thread sees that the flag is unset, it could * be seeing a stale value when another thread has just atomized the string * and set the flag. But this can lead only to an extra call to * js_AtomizeString. This function would find that the string was already * hashed and return it with the atomized bit set. */ inline void flatSetAtomized() { JS_ASSERT(isFlat()); + JS_ASSERT(!isStatic(this)); JS_ATOMIC_SET_MASK((jsword *)&mLengthAndFlags, ATOMIZED); } inline void flatSetMutable() { JS_ASSERT(isFlat()); JS_ASSERT(!isAtomized()); mLengthAndFlags |= MUTABLE; } inline void flatClearMutable() { JS_ASSERT(isFlat()); - mLengthAndFlags &= ~MUTABLE; + + /* + * We cannot eliminate the flag check before writing to mLengthAndFlags as + * static strings may reside in write-protected memory. See bug 599481. + */ + if (mLengthAndFlags & MUTABLE) + mLengthAndFlags &= ~MUTABLE; } /* * The chars pointer should point somewhere inside the buffer owned by bstr. * The caller still needs to pass bstr for GC purposes. */ inline void initDependent(JSString *bstr, jschar *chars, size_t len) { JS_ASSERT(len <= MAX_LENGTH); + JS_ASSERT(!isStatic(this)); e.mParent = NULL; mChars = chars; mLengthAndFlags = DEPENDENT | (len << FLAGS_LENGTH_SHIFT); e.mBase = bstr; } inline JSString *dependentBase() const { JS_ASSERT(isDependent()); @@ -363,16 +383,17 @@ struct JSString { JS_ASSERT(isDependent()); return length(); } /* Rope-related initializers and accessors. */ inline void initTopNode(JSString *left, JSString *right, size_t len, JSRopeBufferInfo *buf) { JS_ASSERT(left->length() + right->length() <= MAX_LENGTH); + JS_ASSERT(!isStatic(this)); mLengthAndFlags = TOP_NODE | (len << FLAGS_LENGTH_SHIFT); mLeft = left; e.mRight = right; e.mBufferWithInfo = buf; } inline void convertToInteriorNode(JSString *parent) { JS_ASSERT(isTopNode()); @@ -502,42 +523,44 @@ struct JSString { } #ifdef __SUNPRO_CC #pragma align 8 (__1cIJSStringPunitStringTable_, __1cIJSStringSlength2StringTable_, __1cIJSStringShundredStringTable_) #endif static const SmallChar INVALID_SMALL_CHAR = -1; - static jschar fromSmallChar[]; - static SmallChar toSmallChar[]; - static JSString unitStringTable[]; - static JSString length2StringTable[]; - static JSString hundredStringTable[]; + static const jschar fromSmallChar[]; + static const SmallChar toSmallChar[]; + static const JSString unitStringTable[]; + static const JSString length2StringTable[]; + static const JSString hundredStringTable[]; /* * Since int strings can be unit strings, length-2 strings, or hundred * strings, we keep a table to map from integer to the correct string. */ - static JSString *intStringTable[]; + static const JSString *const intStringTable[]; static const char deflatedIntStringTable[]; static const char deflatedUnitStringTable[]; static const char deflatedLength2StringTable[]; static JSString *unitString(jschar c); static JSString *getUnitString(JSContext *cx, JSString *str, size_t index); static JSString *length2String(jschar c1, jschar c2); static JSString *intString(jsint i); + + JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thingKind); }; /* * Short strings should be created in cases where it's worthwhile to avoid * mallocing the string buffer for a small string. We keep 2 string headers' * worth of space in short strings so that more strings can be stored this way. */ -struct JSShortString { +struct JSShortString : js::gc::Cell { JSString mHeader; JSString mDummy; /* * Set the length of the string, and return a buffer for the caller to write * to. This buffer must be written immediately, and should not be modified * afterward. */ @@ -556,16 +579,18 @@ struct JSShortString { } static const size_t MAX_SHORT_STRING_LENGTH = ((sizeof(JSString) + 2 * sizeof(size_t)) / sizeof(jschar)) - 1; static inline bool fitsIntoShortString(size_t length) { return length <= MAX_SHORT_STRING_LENGTH; } + + JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thingKind); }; /* * We're doing some tricks to give us more space for short strings, so make * sure that space is ordered in the way we expect. */ JS_STATIC_ASSERT(offsetof(JSString, mInlineStorage) == 2 * sizeof(void *)); JS_STATIC_ASSERT(offsetof(JSShortString, mDummy) == sizeof(JSString)); @@ -702,18 +727,16 @@ class JSRopeBuilder { }; JS_STATIC_ASSERT(JSString::INTERIOR_NODE & JSString::ROPE_BIT); JS_STATIC_ASSERT(JSString::TOP_NODE & JSString::ROPE_BIT); JS_STATIC_ASSERT(((JSString::MAX_LENGTH << JSString::FLAGS_LENGTH_SHIFT) >> JSString::FLAGS_LENGTH_SHIFT) == JSString::MAX_LENGTH); -JS_STATIC_ASSERT(sizeof(JSString) % JS_GCTHING_ALIGN == 0); - extern const jschar * js_GetStringChars(JSContext *cx, JSString *str); extern const jschar * js_UndependString(JSContext *cx, JSString *str); extern JSBool js_MakeStringImmutable(JSContext *cx, JSString *str);
--- a/js/src/jsstrinlines.h +++ b/js/src/jsstrinlines.h @@ -41,17 +41,17 @@ #define jsstrinlines_h___ #include "jsstr.h" inline JSString * JSString::unitString(jschar c) { JS_ASSERT(c < UNIT_STRING_LIMIT); - return &unitStringTable[c]; + return const_cast<JSString *>(&unitStringTable[c]); } inline JSString * JSString::getUnitString(JSContext *cx, JSString *str, size_t index) { JS_ASSERT(index < str->length()); jschar c = str->chars()[index]; if (c < UNIT_STRING_LIMIT) @@ -59,24 +59,68 @@ JSString::getUnitString(JSContext *cx, J return js_NewDependentString(cx, str, index, 1); } inline JSString * JSString::length2String(jschar c1, jschar c2) { JS_ASSERT(fitsInSmallChar(c1)); JS_ASSERT(fitsInSmallChar(c2)); - return &length2StringTable[(((size_t)toSmallChar[c1]) << 6) + toSmallChar[c2]]; + return const_cast<JSString *> + (&length2StringTable[(((size_t)toSmallChar[c1]) << 6) + toSmallChar[c2]]); } inline JSString * JSString::intString(jsint i) { jsuint u = jsuint(i); JS_ASSERT(u < INT_STRING_LIMIT); - return JSString::intStringTable[u]; + return const_cast<JSString *>(JSString::intStringTable[u]); +} + +inline void +JSString::finalize(JSContext *cx, unsigned thingKind) { + if (JS_LIKELY(thingKind == js::gc::FINALIZE_STRING)) { + JS_ASSERT(!JSString::isStatic(this)); + JS_RUNTIME_UNMETER(cx->runtime, liveStrings); + if (isDependent()) { + JS_ASSERT(dependentBase()); + JS_RUNTIME_UNMETER(cx->runtime, liveDependentStrings); + } else if (isFlat()) { + /* + * flatChars for stillborn string is null, but cx->free checks + * for a null pointer on its own. + */ + cx->free(flatChars()); + } else if (isTopNode()) { + cx->free(topNodeBuffer()); + } + } else { + unsigned type = thingKind - js::gc::FINALIZE_EXTERNAL_STRING0; + JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers)); + JS_ASSERT(!isStatic(this)); + JS_ASSERT(isFlat()); + JS_RUNTIME_UNMETER(cx->runtime, liveStrings); + + /* A stillborn string has null chars. */ + jschar *chars = flatChars(); + if (!chars) + return; + JSStringFinalizeOp finalizer = str_finalizers[type]; + if (finalizer) + finalizer(cx, this); + } +} + +inline void +JSShortString::finalize(JSContext *cx, unsigned thingKind) +{ + JS_ASSERT(js::gc::FINALIZE_SHORT_STRING == thingKind); + JS_ASSERT(!JSString::isStatic(header())); + JS_ASSERT(header()->isFlat()); + JS_RUNTIME_UNMETER(cx->runtime, liveStrings); } inline JSRopeBuilder::JSRopeBuilder(JSContext *cx) : cx(cx), mStr(cx->runtime->emptyString) {} #endif /* jsstrinlines_h___ */
--- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -98,16 +98,17 @@ #include <sys/mman.h> #include <fcntl.h> #include <string.h> #include <elf.h> #endif namespace nanojit { using namespace js; +using namespace js::gc; /* Implement embedder-specific nanojit members. */ void* nanojit::Allocator::allocChunk(size_t nbytes) { VMAllocator *vma = (VMAllocator*)this; JS_ASSERT(!vma->outOfMemory()); @@ -2292,17 +2293,17 @@ ResetJIT(JSContext* cx, TraceVisFlushRea void FlushJITCache(JSContext *cx) { ResetJIT(cx, FR_OOM); } static void -TrashTree(JSContext* cx, TreeFragment* f); +TrashTree(TreeFragment* f); template <class T> static T& InitConst(const T &t) { return const_cast<T &>(t); } @@ -2531,20 +2532,20 @@ TraceRecorder::TraceRecorder(JSContext* } TraceRecorder::~TraceRecorder() { /* Should already have been adjusted by callers before calling delete. */ JS_ASSERT(traceMonitor->recorder != this); if (trashSelf) - TrashTree(cx, fragment->root); + TrashTree(fragment->root); for (unsigned int i = 0; i < whichTreesToTrash.length(); i++) - TrashTree(cx, whichTreesToTrash[i]); + TrashTree(whichTreesToTrash[i]); /* Purge the tempAlloc used during recording. */ tempAlloc().reset(); forgetGuardedShapes(); } inline bool @@ -2609,17 +2610,17 @@ TraceRecorder::finishAbort(const char* r * tree. Otherwise, remove the VMSideExits we added while recording, which * are about to be invalid. * * BIG FAT WARNING: resetting the length is only a valid strategy as long as * there may be only one recorder active for a single TreeInfo at a time. * Otherwise, we may be throwing away another recorder's valid side exits. */ if (fragment->root == fragment) { - TrashTree(cx, fragment->toTreeFragment()); + TrashTree(fragment->toTreeFragment()); } else { JS_ASSERT(numSideExitsBefore <= fragment->root->sideExits.length()); fragment->root->sideExits.setLength(numSideExitsBefore); } /* Grab local copies of members needed after |delete this|. */ JSContext* localcx = cx; TraceMonitor* localtm = traceMonitor; @@ -2915,56 +2916,77 @@ TraceMonitor::flush() verbose_only( branches = NULL; ) PodArrayZero(vmfragments); reFragments = new (alloc) REHashMap(alloc); needFlush = JS_FALSE; } -static inline void -MarkTree(JSTracer* trc, TreeFragment *f) -{ +inline bool +HasUnreachableGCThings(TreeFragment *f) +{ + /* + * We do not check here for dead scripts as JSScript is not a GC thing. + * Instead PurgeScriptFragments is used to remove dead script fragments. + * See bug 584860. + */ + if (IsAboutToBeFinalized(f->globalObj)) + return true; Value* vp = f->gcthings.data(); - unsigned len = f->gcthings.length(); - while (len--) { + for (unsigned len = f->gcthings.length(); len; --len) { Value &v = *vp++; - JS_SET_TRACING_NAME(trc, "jitgcthing"); JS_ASSERT(v.isMarkable()); - Mark(trc, v.asGCThing(), v.gcKind()); + if (IsAboutToBeFinalized(v.toGCThing())) + return true; } const Shape** shapep = f->shapes.data(); - len = f->shapes.length(); - while (len--) { + for (unsigned len = f->shapes.length(); len; --len) { const Shape* shape = *shapep++; - shape->trace(trc); - } + if (!shape->marked()) + return true; + } + return false; } void -TraceMonitor::mark(JSTracer* trc) -{ - if (!trc->context->runtime->gcFlushCodeCaches) { - for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) { - TreeFragment* f = vmfragments[i]; - while (f) { - if (f->code()) - MarkTree(trc, f); - TreeFragment* peer = f->peer; - while (peer) { - if (peer->code()) - MarkTree(trc, peer); - peer = peer->peer; - } - f = f->next; +TraceMonitor::sweep() +{ + JS_ASSERT(!ontrace()); + debug_only_print0(LC_TMTracer, "Purging fragments with dead things"); + + for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) { + TreeFragment** fragp = &vmfragments[i]; + while (TreeFragment* frag = *fragp) { + TreeFragment* peer = frag; + do { + if (HasUnreachableGCThings(peer)) + break; + peer = peer->peer; + } while (peer); + if (peer) { + debug_only_printf(LC_TMTracer, + "TreeFragment peer %p has dead gc thing." + "Disconnecting tree %p with ip %p\n", + (void *) peer, (void *) frag, frag->ip); + JS_ASSERT(frag->root == frag); + *fragp = frag->next; + do { + verbose_only( FragProfiling_FragFinalizer(frag, this); ) + TrashTree(frag); + frag = frag->peer; + } while (frag); + } else { + fragp = &frag->next; } } - if (recorder) - MarkTree(trc, recorder->getTree()); - } + } + + if (recorder && HasUnreachableGCThings(recorder->getTree())) + recorder->finishAbort("dead GC things"); } /* * Box a value from the native stack back into the Value format. */ static inline void NativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot) { @@ -5666,34 +5688,34 @@ TraceRecorder::startRecorder(JSContext* ResetJIT(cx, FR_OOM); return false; } return true; } static void -TrashTree(JSContext* cx, TreeFragment* f) +TrashTree(TreeFragment* f) { JS_ASSERT(f == f->root); debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f); if (!f->code()) return; AUDIT(treesTrashed); debug_only_print0(LC_TMTracer, "Trashing tree info.\n"); f->setCode(NULL); TreeFragment** data = f->dependentTrees.data(); unsigned length = f->dependentTrees.length(); for (unsigned n = 0; n < length; ++n) - TrashTree(cx, data[n]); + TrashTree(data[n]); data = f->linkedTrees.data(); length = f->linkedTrees.length(); for (unsigned n = 0; n < length; ++n) - TrashTree(cx, data[n]); + TrashTree(data[n]); } static void SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) { VOUCH_DOES_NOT_REQUIRE_STACK(); /* Assert that we have a correct sp distance from cx->fp()->slots in fi. */ @@ -5895,17 +5917,17 @@ AttemptToStabilizeTree(JSContext* cx, JS JS_ASSERT(from->nGlobalTypes() == from->globalSlots->length()); /* This exit is no longer unstable, so remove it. */ if (exit->exitType == UNSTABLE_LOOP_EXIT) from->removeUnstableExit(exit); debug_only_stmt(DumpPeerStability(tm, peer->ip, globalObj, from->globalShape, from->argc);) return false; } else if (consensus == TypeConsensus_Undemotes) { /* The original tree is unconnectable, so trash it. */ - TrashTree(cx, peer); + TrashTree(peer); return false; } SlotList *globalSlots = from->globalSlots; JS_ASSERT(from == from->root); /* If this tree has been blacklisted, don't try to record a new one. */ @@ -7807,32 +7829,37 @@ FinishJIT(TraceMonitor *tm) JS_REQUIRES_STACK void PurgeScriptFragments(JSContext* cx, JSScript* script) { debug_only_printf(LC_TMTracer, "Purging fragments for JSScript %p.\n", (void*)script); TraceMonitor* tm = &JS_TRACE_MONITOR(cx); + + /* A recorder script is being evaluated and can not be destroyed or GC-ed. */ + JS_ASSERT_IF(tm->recorder, + JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length); + for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) { TreeFragment** fragp = &tm->vmfragments[i]; while (TreeFragment* frag = *fragp) { if (JS_UPTRDIFF(frag->ip, script->code) < script->length) { /* This fragment is associated with the script. */ debug_only_printf(LC_TMTracer, "Disconnecting TreeFragment %p " "with ip %p, in range [%p,%p).\n", (void*)frag, frag->ip, script->code, script->code + script->length); JS_ASSERT(frag->root == frag); *fragp = frag->next; do { verbose_only( FragProfiling_FragFinalizer(frag, tm); ) - TrashTree(cx, frag); + TrashTree(frag); } while ((frag = frag->peer) != NULL); continue; } fragp = &frag->next; } } RecordAttemptMap &table = *tm->recordAttempts; @@ -10369,17 +10396,18 @@ TraceRecorder::record_JSOP_LEAVEWITH() // is entered or exited. static JSBool JS_FASTCALL functionProbe(JSContext *cx, JSFunction *fun, JSBool enter) { cx->doFunctionCallback(fun, FUN_SCRIPT(fun), enter); return true; } -JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, BOOL, 0, 0) +JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, BOOL, + 0, ACCSET_STORE_ANY) #endif JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_RETURN() { /* A return from callDepth 0 terminates the current loop, except for recursion. */ if (callDepth == 0) { AUDIT(returnLoopExits);
--- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -1419,18 +1419,19 @@ class TraceRecorder friend class DetermineTypesVisitor; friend class RecursiveSlotMap; friend class UpRecursiveSlotMap; friend MonitorResult MonitorLoopEdge(JSContext*, uintN&); friend TracePointAction MonitorTracePoint(JSContext*, uintN &inlineCallCount, bool &blacklist); friend void AbortRecording(JSContext*, const char*); friend class BoxArg; + friend void TraceMonitor::sweep(); -public: + public: static bool JS_REQUIRES_STACK startRecorder(JSContext*, VMSideExit*, VMFragment*, unsigned stackSlots, unsigned ngslots, JSValueType* typeMap, VMSideExit* expectedInnerExit, jsbytecode* outerTree, uint32 outerArgc, bool speculate); /* Accessors. */ VMFragment* getFragment() const { return fragment; }
--- a/js/src/jstypedarray.cpp +++ b/js/src/jstypedarray.cpp @@ -59,16 +59,17 @@ #include "jsstaticcheck.h" #include "jsbit.h" #include "jsvector.h" #include "jstypedarray.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; /* * ArrayBuffer * * This class holds the underlying raw buffer that the TypedArray classes * access. It can be created explicitly and passed to a TypedArray, or * can be created implicitly by constructing a TypedArray with a size. */ @@ -301,17 +302,17 @@ TypedArray::obj_lookupProperty(JSContext return proto->lookupProperty(cx, id, objp, propp); } void TypedArray::obj_trace(JSTracer *trc, JSObject *obj) { TypedArray *tarray = fromJSObject(obj); JS_ASSERT(tarray); - JS_CALL_OBJECT_TRACER(trc, tarray->bufferJS, "typedarray.buffer"); + MarkObject(trc, *tarray->bufferJS, "typedarray.buffer"); } JSBool TypedArray::obj_getAttributes(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp) { *attrsp = (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) ? JSPROP_PERMANENT | JSPROP_READONLY : JSPROP_PERMANENT | JSPROP_ENUMERATE;
--- a/js/src/jsvalue.h +++ b/js/src/jsvalue.h @@ -595,17 +595,17 @@ class Value JS_ALWAYS_INLINE JSObject *toObjectOrNull() const { JS_ASSERT(isObjectOrNull()); return JSVAL_TO_OBJECT_IMPL(data); } JS_ALWAYS_INLINE - void *asGCThing() const { + void *toGCThing() const { JS_ASSERT(isGCThing()); return JSVAL_TO_GCTHING_IMPL(data); } JS_ALWAYS_INLINE bool toBoolean() const { JS_ASSERT(isBoolean()); return JSVAL_TO_BOOLEAN_IMPL(data); @@ -876,16 +876,29 @@ typedef JSBool (* DefinePropOp)(JSContext *cx, JSObject *obj, jsid id, const Value *value, PropertyOp getter, PropertyOp setter, uintN attrs); typedef JSBool (* PropertyIdOp)(JSContext *cx, JSObject *obj, jsid id, Value *vp); typedef JSBool (* StrictPropertyIdOp)(JSContext *cx, JSObject *obj, jsid id, Value *vp, JSBool strict); typedef JSBool (* CallOp)(JSContext *cx, uintN argc, Value *vp); +typedef JSBool +(* LookupPropOp)(JSContext *cx, JSObject *obj, jsid id, JSObject **objp, + JSProperty **propp); +typedef JSBool +(* AttributesOp)(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp); +typedef JSType +(* TypeOfOp)(JSContext *cx, JSObject *obj); +typedef void +(* TraceOp)(JSTracer *trc, JSObject *obj); +typedef JSObject * +(* ObjectOp)(JSContext *cx, JSObject *obj); +typedef void +(* FinalizeOp)(JSContext *cx, JSObject *obj); class AutoIdVector; /* * Prepare to make |obj| non-extensible; in particular, fully resolve its properties. * On error, return false. * If |obj| is now ready to become non-extensible, set |*fixed| to true and return true. * If |obj| refuses to become non-extensible, set |*fixed| to false and return true; the @@ -954,29 +967,29 @@ struct ClassExtension { JSIteratorOp iteratorObject; JSObjectOp wrappedObject; /* NB: infallible, null returns are treated as the original object */ }; #define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL} struct ObjectOps { - JSLookupPropOp lookupProperty; + js::LookupPropOp lookupProperty; js::DefinePropOp defineProperty; js::PropertyIdOp getProperty; js::StrictPropertyIdOp setProperty; - JSAttributesOp getAttributes; - JSAttributesOp setAttributes; + js::AttributesOp getAttributes; + js::AttributesOp setAttributes; js::StrictPropertyIdOp deleteProperty; js::NewEnumerateOp enumerate; - JSTypeOfOp typeOf; - JSTraceOp trace; + js::TypeOfOp typeOf; + js::TraceOp trace; js::FixOp fix; - JSObjectOp thisObject; - JSFinalizeOp clear; + js::ObjectOp thisObject; + js::FinalizeOp clear; }; #define JS_NULL_OBJECT_OPS {NULL,NULL,NULL,NULL,NULL,NULL, NULL,NULL,NULL,NULL,NULL,NULL} struct Class { JS_CLASS_MEMBERS; ClassExtension ext; ObjectOps ops;
--- a/js/src/jswrapper.cpp +++ b/js/src/jswrapper.cpp @@ -45,20 +45,22 @@ #include "jsnum.h" #include "jsregexp.h" #include "jswrapper.h" #include "methodjit/PolyIC.h" #include "methodjit/MonoIC.h" #ifdef JS_METHODJIT # include "assembler/jit/ExecutableAllocator.h" #endif +#include "jscompartment.h" #include "jsobjinlines.h" using namespace js; +using namespace js::gc; static int sWrapperFamily = 0; bool JSObject::isWrapper() const { return isProxy() && getProxyHandler()->family() == &sWrapperFamily; } @@ -251,17 +253,17 @@ JSWrapper::fun_toString(JSContext *cx, J str = JSProxyHandler::fun_toString(cx, wrapper, indent); leave(cx, wrapper); return str; } void JSWrapper::trace(JSTracer *trc, JSObject *wrapper) { - JS_CALL_OBJECT_TRACER(trc, wrappedObject(wrapper), "wrappedObject"); + MarkObject(trc, *wrappedObject(wrapper), "wrappedObject"); } bool JSWrapper::enter(JSContext *cx, JSObject *wrapper, jsid id, bool set) { return true; } @@ -289,254 +291,16 @@ TransparentObjectWrapper(JSContext *cx, { // Allow wrapping outer window proxies. JS_ASSERT(!obj->isWrapper() || obj->getClass()->ext.innerObject); return JSWrapper::New(cx, obj, wrappedProto, NULL, &JSCrossCompartmentWrapper::singleton); } } -JSCompartment::JSCompartment(JSRuntime *rt) - : rt(rt), principals(NULL), data(NULL), marked(false), debugMode(false) -{ - JS_INIT_CLIST(&scripts); -} - -JSCompartment::~JSCompartment() -{ -} - -bool -JSCompartment::init() -{ - return crossCompartmentWrappers.init(); -} - -bool -JSCompartment::wrap(JSContext *cx, Value *vp) -{ - JS_ASSERT(cx->compartment == this); - - uintN flags = 0; - - JS_CHECK_RECURSION(cx, return false); - - /* Only GC things have to be wrapped or copied. */ - if (!vp->isMarkable()) - return true; - - /* Static strings do not have to be wrapped. */ - if (vp->isString() && JSString::isStatic(vp->toString())) - return true; - - /* Unwrap incoming objects. */ - if (vp->isObject()) { - JSObject *obj = &vp->toObject(); - - /* If the object is already in this compartment, we are done. */ - if (obj->getCompartment(cx) == this) - return true; - - /* Don't unwrap an outer window proxy. */ - if (!obj->getClass()->ext.innerObject) { - obj = vp->toObject().unwrap(&flags); - OBJ_TO_OUTER_OBJECT(cx, obj); - if (!obj) - return false; - - vp->setObject(*obj); - } - - /* If the wrapped object is already in this compartment, we are done. */ - if (obj->getCompartment(cx) == this) - return true; - } - - /* If we already have a wrapper for this value, use it. */ - if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(*vp)) { - *vp = p->value; - return true; - } - - if (vp->isString()) { - Value orig = *vp; - JSString *str = vp->toString(); - JSString *wrapped = js_NewStringCopyN(cx, str->chars(), str->length()); - if (!wrapped) - return false; - vp->setString(wrapped); - return crossCompartmentWrappers.put(orig, *vp); - } - - JSObject *obj = &vp->toObject(); - - /* - * Recurse to wrap the prototype. Long prototype chains will run out of - * stack, causing an error in CHECK_RECURSE. - * - * Wrapping the proto before creating the new wrapper and adding it to the - * cache helps avoid leaving a bad entry in the cache on OOM. But note that - * if we wrapped both proto and parent, we would get infinite recursion - * here (since Object.prototype->parent->proto leads to Object.prototype - * itself). - */ - JSObject *proto = obj->getProto(); - if (!wrap(cx, &proto)) - return false; - - /* - * We hand in the original wrapped object into the wrap hook to allow - * the wrap hook to reason over what wrappers are currently applied - * to the object. - */ - JSObject *wrapper = cx->runtime->wrapObjectCallback(cx, obj, proto, flags); - if (!wrapper) - return false; - wrapper->setProto(proto); - vp->setObject(*wrapper); - if (!crossCompartmentWrappers.put(wrapper->getProxyPrivate(), *vp)) - return false; - - /* - * Wrappers should really be parented to the wrapped parent of the wrapped - * object, but in that case a wrapped global object would have a NULL - * parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead, - * we parent all wrappers to the global object in their home compartment. - * This loses us some transparency, and is generally very cheesy. - */ - JSObject *global; - if (cx->hasfp()) { - global = cx->fp()->scopeChain().getGlobal(); - } else { - global = cx->globalObject; - OBJ_TO_INNER_OBJECT(cx, global); - if (!global) - return false; - } - - wrapper->setParent(global); - return true; -} - -bool -JSCompartment::wrap(JSContext *cx, JSString **strp) -{ - AutoValueRooter tvr(cx, StringValue(*strp)); - if (!wrap(cx, tvr.addr())) - return false; - *strp = tvr.value().toString(); - return true; -} - -bool -JSCompartment::wrap(JSContext *cx, JSObject **objp) -{ - if (!*objp) - return true; - AutoValueRooter tvr(cx, ObjectValue(**objp)); - if (!wrap(cx, tvr.addr())) - return false; - *objp = &tvr.value().toObject(); - return true; -} - -bool -JSCompartment::wrapId(JSContext *cx, jsid *idp) -{ - if (JSID_IS_INT(*idp)) - return true; - AutoValueRooter tvr(cx, IdToValue(*idp)); - if (!wrap(cx, tvr.addr())) - return false; - return ValueToId(cx, tvr.value(), idp); -} - -bool -JSCompartment::wrap(JSContext *cx, PropertyOp *propp) -{ - Value v = CastAsObjectJsval(*propp); - if (!wrap(cx, &v)) - return false; - *propp = CastAsPropertyOp(v.toObjectOrNull()); - return true; -} - -bool -JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) -{ - return wrap(cx, &desc->obj) && - (!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) && - (!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) && - wrap(cx, &desc->value); -} - -bool -JSCompartment::wrap(JSContext *cx, AutoIdVector &props) -{ - jsid *vector = props.begin(); - jsint length = props.length(); - for (size_t n = 0; n < size_t(length); ++n) { - if (!wrapId(cx, &vector[n])) - return false; - } - return true; -} - -bool -JSCompartment::wrapException(JSContext *cx) -{ - JS_ASSERT(cx->compartment == this); - - if (cx->throwing) { - AutoValueRooter tvr(cx, cx->exception); - cx->throwing = false; - cx->exception.setNull(); - if (wrap(cx, tvr.addr())) { - cx->throwing = true; - cx->exception = tvr.value(); - } - return false; - } - return true; -} - -void -JSCompartment::sweep(JSContext *cx) -{ - /* Remove dead wrappers from the table. */ - for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { - if (js_IsAboutToBeFinalized(e.front().value.asGCThing())) - e.removeFront(); - } -} - -void -JSCompartment::purge(JSContext *cx) -{ -#ifdef JS_METHODJIT - for (JSScript *script = (JSScript *)scripts.next; - &script->links != &scripts; - script = (JSScript *)script->links.next) { - if (script->jit) { -# if defined JS_POLYIC - mjit::ic::PurgePICs(cx, script); -# endif -# if defined JS_MONOIC - /* - * MICs do not refer to data which can be GC'ed, but are sensitive - * to shape regeneration. - */ - if (cx->runtime->gcRegenShapes) - mjit::ic::PurgeMICs(cx, script); -# endif - } - } -#endif -} - AutoCompartment::AutoCompartment(JSContext *cx, JSObject *target) : context(cx), origin(cx->compartment), target(target), destination(target->getCompartment(cx)), input(cx), entered(false) {
--- a/js/src/jswrapper.h +++ b/js/src/jswrapper.h @@ -40,16 +40,18 @@ * ***** END LICENSE BLOCK ***** */ #ifndef jswrapper_h___ #define jswrapper_h___ #include "jsapi.h" #include "jsproxy.h" +JS_BEGIN_EXTERN_C + /* No-op wrapper handler base class. */ class JSWrapper : public js::JSProxyHandler { uintN mFlags; public: uintN flags() const { return mFlags; } explicit JS_FRIEND_API(JSWrapper(uintN flags)); @@ -170,9 +172,11 @@ class AutoCompartment AutoCompartment & operator=(const AutoCompartment &); }; extern JSObject * TransparentObjectWrapper(JSContext *cx, JSObject *obj, JSObject *wrappedProto, uintN flags); } +JS_END_EXTERN_C + #endif
--- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -75,16 +75,17 @@ #include "jsinterpinlines.h" #include "jsobjinlines.h" #ifdef DEBUG #include <string.h> /* for #ifdef DEBUG memset calls */ #endif using namespace js; +using namespace js::gc; /* * NOTES * - in the js shell, you must use the -x command line option, or call * options('xml') before compiling anything that uses XML literals * * TODO * - XXXbe patrol @@ -868,16 +869,25 @@ static JSBool attr_identity(const void *a, const void *b) { const JSXML *xmla = (const JSXML *) a; const JSXML *xmlb = (const JSXML *) b; return qname_identity(xmla->name, xmlb->name); } +void +JSXMLArrayCursor::trace(JSTracer *trc) { +#ifdef DEBUG + size_t index = 0; +#endif + for (JSXMLArrayCursor *cursor = this; cursor; cursor = cursor->next) + js::gc::MarkGCThing(trc, cursor->root, "cursor_root", index++); +} + static void XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor *cursor) { cursor->trace(trc); } /* NB: called with null cx from the GC, via xml_trace => JSXMLArray::trim. */ bool @@ -1277,17 +1287,17 @@ ParseNodeToXML(Parser *parser, JSParseNo JSString *str; uint32 length, n, i, j; JSParseNode *pn2, *pn3, *head, **pnp; JSObject *ns; JSObject *qn, *attrjqn; JSXMLClass xml_class; int stackDummy;