author | Jon Coppeard <jcoppeard@mozilla.com> |
Wed, 06 Mar 2013 18:10:22 +0000 | |
changeset 124127 | 05113da6e613f75d59884b23d7d91f2bfd64641b |
parent 124126 | 0d3f5942d825d1345e33fa68473c2329d5aa74d8 |
child 124128 | 59f5f4b017edad7bd1166597c08c1aad88882a09 |
push id | 24408 |
push user | ryanvm@gmail.com |
push date | Fri, 08 Mar 2013 04:58:11 +0000 |
treeherder | mozilla-central@cb432984d5ce [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | terrence |
bugs | 848449 |
milestone | 22.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/gc/Marking.cpp +++ b/js/src/gc/Marking.cpp @@ -151,17 +151,16 @@ AsGCMarker(JSTracer *trc) JS_ASSERT(IS_GC_MARKING_TRACER(trc)); return static_cast<GCMarker *>(trc); } template<typename T> static void MarkInternal(JSTracer *trc, T **thingp) { - AutoAssertNoGC nogc; JS_ASSERT(thingp); T *thing = *thingp; CheckMarkedThing(trc, thing); /* * Don't mark things outside a compartment if we are in a per-compartment * GC.
--- a/js/src/ion/Bailouts.cpp +++ b/js/src/ion/Bailouts.cpp @@ -72,18 +72,16 @@ IonBailoutIterator::dump() const } else { IonFrameIterator::dump(); } } static RawScript GetBailedJSScript(JSContext *cx) { - AutoAssertNoGC nogc; - // Just after the frame conversion, we can safely interpret the ionTop as JS // frame because it targets the bailed JS frame converted to an exit frame. IonJSFrameLayout *frame = reinterpret_cast<IonJSFrameLayout*>(cx->mainThread().ionTop); switch (GetCalleeTokenTag(frame->calleeToken())) { case CalleeToken_Function: { JSFunction *fun = CalleeTokenToFunction(frame->calleeToken()); return fun->nonLazyScript(); } @@ -93,17 +91,16 @@ GetBailedJSScript(JSContext *cx) JS_NOT_REACHED("unexpected callee token kind"); return NULL; } } void StackFrame::initFromBailout(JSContext *cx, SnapshotIterator &iter) { - AutoAssertNoGC nogc; uint32_t exprStackSlots = iter.slots() - script()->nfixed; #ifdef TRACK_SNAPSHOTS iter.spewBailingFrom(); #endif IonSpew(IonSpew_Bailouts, " expr stack slots %u, is function frame %u", exprStackSlots, isFunctionFrame()); @@ -185,18 +182,16 @@ StackFrame::initFromBailout(JSContext *c // will have the real arguments in the slots and not always be equal. JS_ASSERT_IF(JSOp(*regs.pc) != JSOP_FUNAPPLY, exprStackSlots == js_ReconstructStackDepth(cx, script(), regs.pc)); } static StackFrame * PushInlinedFrame(JSContext *cx, StackFrame *callerFrame) { - AutoAssertNoGC nogc; - // Grab the callee object out of the caller's frame, which has already been restored. // N.B. we currently assume that the caller frame is at a JSOP_CALL pc for the caller frames, // which will not be the case when we inline getters (in which case it would be a // JSOP_GETPROP). That will have to be handled differently. FrameRegs ®s = cx->regs(); JS_ASSERT(JSOp(*regs.pc) == JSOP_CALL || JSOp(*regs.pc) == JSOP_NEW || JSOp(*regs.pc) == JSOP_FUNAPPLY); int callerArgc = GET_ARGC(regs.pc); @@ -226,17 +221,16 @@ PushInlinedFrame(JSContext *cx, StackFra fp->formals()[-2].setObject(*fun); return fp; } static uint32_t ConvertFrames(JSContext *cx, IonActivation *activation, IonBailoutIterator &it) { - AutoAssertNoGC nogc; IonSpew(IonSpew_Bailouts, "Bailing out %s:%u, IonScript %p", it.script()->filename, it.script()->lineno, (void *) it.ionScript()); IonSpew(IonSpew_Bailouts, " reading from snapshot offset %u size %u", it.snapshotOffset(), it.ionScript()->snapshotsSize()); #ifdef DEBUG // Use count is reset after invalidation. Log use count on bailouts to // determine if we have a critical sequence of bailout. // @@ -342,17 +336,16 @@ ConvertFrames(JSContext *cx, IonActivati JS_NOT_REACHED("bad bailout kind"); return BAILOUT_RETURN_FATAL_ERROR; } uint32_t ion::Bailout(BailoutStack *sp) { - AutoAssertNoGC nogc; JSContext *cx = GetIonContext()->cx; // We don't have an exit frame. cx->mainThread().ionTop = NULL; IonActivationIterator ionActivations(cx); IonBailoutIterator iter(ionActivations, sp); IonActivation *activation = ionActivations.activation(); // IonCompartment *ioncompartment = cx->compartment->ionCompartment(); @@ -365,17 +358,16 @@ ion::Bailout(BailoutStack *sp) EnsureExitFrame(iter.jsFrame()); return retval; } uint32_t ion::InvalidationBailout(InvalidationBailoutStack *sp, size_t *frameSizeOut) { - AutoAssertNoGC nogc; sp->checkInvariants(); JSContext *cx = GetIonContext()->cx; // We don't have an exit frame. cx->mainThread().ionTop = NULL; IonActivationIterator ionActivations(cx); IonBailoutIterator iter(ionActivations, sp); @@ -477,17 +469,16 @@ ion::ReflowTypeInfo(uint32_t bailoutResu types::TypeScript::Monitor(cx, script, pc, result); return true; } uint32_t ion::RecompileForInlining() { - AutoAssertNoGC nogc; JSContext *cx = GetIonContext()->cx; RawScript script = cx->fp()->script(); IonSpew(IonSpew_Inlining, "Recompiling script to inline calls %s:%d", script->filename, script->lineno); // Invalidate the script to force a recompile. if (!Invalidate(cx, script, /* resetUses */ false))
--- a/js/src/ion/Ion.cpp +++ b/js/src/ion/Ion.cpp @@ -1894,17 +1894,16 @@ ion::FastInvoke(JSContext *cx, HandleFun JS_ASSERT_IF(result.isMagic(), result.isMagic(JS_ION_ERROR)); return result.isMagic() ? IonExec_Error : IonExec_Ok; } static void InvalidateActivation(FreeOp *fop, uint8_t *ionTop, bool invalidateAll) { - AutoAssertNoGC nogc; IonSpew(IonSpew_Invalidate, "BEGIN invalidating activation"); size_t frameno = 1; for (IonFrameIterator it(ionTop); !it.done(); ++it, ++frameno) { JS_ASSERT_IF(frameno == 1, it.type() == IonFrame_Exit); #ifdef DEBUG @@ -2036,17 +2035,16 @@ ion::InvalidateAll(FreeOp *fop, JSCompar } } void ion::Invalidate(types::TypeCompartment &types, FreeOp *fop, const Vector<types::RecompileInfo> &invalid, bool resetUses) { - AutoAssertNoGC nogc; IonSpew(IonSpew_Invalidate, "Start invalidation."); AutoFlushCache afc ("Invalidate"); // Add an invalidation reference to all invalidated IonScripts to indicate // to the traversal which frames have been invalidated. bool anyInvalidation = false; for (size_t i = 0; i < invalid.length(); i++) { const types::CompilerOutput &co = *invalid[i].compilerOutput(types); @@ -2112,24 +2110,22 @@ ion::Invalidate(types::TypeCompartment & if (resetUses) script->resetUseCount(); } } void ion::Invalidate(JSContext *cx, const Vector<types::RecompileInfo> &invalid, bool resetUses) { - AutoAssertNoGC nogc; ion::Invalidate(cx->compartment->types, cx->runtime->defaultFreeOp(), invalid, resetUses); } bool ion::Invalidate(JSContext *cx, RawScript script, ExecutionMode mode, bool resetUses) { - AutoAssertNoGC nogc; JS_ASSERT(script->hasIonScript()); Vector<types::RecompileInfo> scripts(cx); switch (mode) { case SequentialExecution: JS_ASSERT(script->hasIonScript()); if (!scripts.append(script->ionScript()->recompileInfo()))
--- a/js/src/ion/IonBuilder.cpp +++ b/js/src/ion/IonBuilder.cpp @@ -137,18 +137,16 @@ IonBuilder::CFGState::TableSwitch(jsbyte state.tableswitch.ins = ins; state.tableswitch.currentBlock = 0; return state; } JSFunction * IonBuilder::getSingleCallTarget(types::StackTypeSet *calleeTypes) { - AutoAssertNoGC nogc; - if (!calleeTypes) return NULL; RawObject obj = calleeTypes->getSingleton(); if (!obj || !obj->isFunction()) return NULL; return obj->toFunction(); @@ -5155,18 +5153,16 @@ TestSingletonPropertyTypes(JSContext *cx // value at the top of the stack. // (4) If a type barrier is in place, and has a single type, an unbox // instruction replaces the top of the stack. // (5) Lastly, a type barrier instruction replaces the top of the stack. bool IonBuilder::pushTypeBarrier(MInstruction *ins, types::StackTypeSet *actual, types::StackTypeSet *observed) { - AutoAssertNoGC nogc; - // If the instruction has no side effects, we'll resume the entire operation. // The actual type barrier will occur in the interpreter. If the // instruction is effectful, even if it has a singleton type, there // must be a resume point capturing the original def, and resuming // to that point will explicitly monitor the new type. if (!actual) { JS_ASSERT(!observed);
--- a/js/src/ion/IonCaches.cpp +++ b/js/src/ion/IonCaches.cpp @@ -1772,17 +1772,16 @@ BindNameIC::attachGlobal(JSContext *cx, return linkAndAttachStub(cx, masm, ion, "global", rejoinOffset, &exitOffset); } static inline void GenerateScopeChainGuard(MacroAssembler &masm, JSObject *scopeObj, Register scopeObjReg, RawShape shape, Label *failures) { - AutoAssertNoGC nogc; if (scopeObj->isCall()) { // We can skip a guard on the call object if the script's bindings are // guaranteed to be immutable (and thus cannot introduce shadowing // variables). CallObject *callObj = &scopeObj->asCall(); if (!callObj->isForEval()) { RawFunction fun = &callObj->callee(); RawScript script = fun->nonLazyScript();
--- a/js/src/ion/IonFrames-inl.h +++ b/js/src/ion/IonFrames-inl.h @@ -79,17 +79,16 @@ IonFrameIterator::frameSize() const JS_ASSERT(type_ != IonFrame_Exit); return frameSize_; } // Returns the JSScript associated with the topmost Ion frame. inline RawScript GetTopIonJSScript(JSContext *cx, const SafepointIndex **safepointIndexOut, void **returnAddrOut) { - AutoAssertNoGC nogc; IonFrameIterator iter(cx->mainThread().ionTop); JS_ASSERT(iter.type() == IonFrame_Exit); ++iter; // If needed, grab the safepoint index. if (safepointIndexOut) *safepointIndexOut = iter.safepoint();
--- a/js/src/ion/IonFrames.cpp +++ b/js/src/ion/IonFrames.cpp @@ -49,17 +49,16 @@ IonFrameIterator::checkInvalidation() co { IonScript *dummy; return checkInvalidation(&dummy); } bool IonFrameIterator::checkInvalidation(IonScript **ionScriptOut) const { - AutoAssertNoGC nogc; uint8_t *returnAddr = returnAddressToFp(); RawScript script = this->script(); // N.B. the current IonScript is not the same as the frame's // IonScript if the frame has since been invalidated. IonScript *currentIonScript = script->ion; bool invalidated = !script->hasIonScript() || !currentIonScript->containsReturnAddress(returnAddr); if (!invalidated) @@ -153,17 +152,16 @@ IonFrameIterator::isEntryJSFrame() const return false; } return true; } RawScript IonFrameIterator::script() const { - AutoAssertNoGC nogc; JS_ASSERT(isScripted()); RawScript script = ScriptFromCalleeToken(calleeToken()); JS_ASSERT(script); return script; } Value * IonFrameIterator::nativeVp() const @@ -318,17 +316,16 @@ ion::HandleException(ResumeFromException // them. InlineFrameIterator frames(cx, &iter); for (;;) { CloseLiveIterators(cx, frames); // When profiling, each frame popped needs a notification that // the function has exited, so invoke the probe that a function // is exiting. - AutoAssertNoGC nogc; RawScript script = frames.script(); Probes::exitScript(cx, script, script->function(), NULL); if (!frames.more()) break; ++frames; } IonScript *ionScript = NULL; @@ -929,17 +926,16 @@ InlineFrameIterator::InlineFrameIterator framesRead_ = iter->framesRead_ - 1; findNextFrame(); } } void InlineFrameIterator::findNextFrame() { - AutoAssertNoGC nogc; JS_ASSERT(more()); si_ = start_; // Read the initial frame. callee_ = frame_->maybeCallee(); script_ = frame_->script(); pc_ = script_->code + si_.pcOffset(); @@ -1177,17 +1173,16 @@ struct DumpOp { #endif i_++; } }; void InlineFrameIterator::dump() const { - AutoAssertNoGC nogc; if (more()) fprintf(stderr, " JS frame (inlined)\n"); else fprintf(stderr, " JS frame\n"); bool isFunction = false; if (isFunctionFrame()) { isFunction = true;
--- a/js/src/ion/IonFrames.h +++ b/js/src/ion/IonFrames.h @@ -64,17 +64,16 @@ CalleeTokenToScript(CalleeToken token) { JS_ASSERT(GetCalleeTokenTag(token) == CalleeToken_Script); return (RawScript)(uintptr_t(token) & ~uintptr_t(0x3)); } static inline RawScript ScriptFromCalleeToken(CalleeToken token) { - AutoAssertNoGC nogc; switch (GetCalleeTokenTag(token)) { case CalleeToken_Script: return CalleeTokenToScript(token); case CalleeToken_Function: return CalleeTokenToFunction(token)->nonLazyScript(); } JS_NOT_REACHED("invalid callee token tag"); return NULL;
--- a/js/src/ion/VMFunctions.cpp +++ b/js/src/ion/VMFunctions.cpp @@ -249,17 +249,16 @@ StringsEqual(JSContext *cx, HandleString } template bool StringsEqual<true>(JSContext *cx, HandleString lhs, HandleString rhs, JSBool *res); template bool StringsEqual<false>(JSContext *cx, HandleString lhs, HandleString rhs, JSBool *res); JSBool ObjectEmulatesUndefined(RawObject obj) { - AutoAssertNoGC nogc; return EmulatesUndefined(obj); } bool IteratorMore(JSContext *cx, HandleObject obj, JSBool *res) { RootedValue tmp(cx); if (!js_IteratorMore(cx, obj, &tmp))
--- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -3342,17 +3342,16 @@ JS_NewObject(JSContext *cx, JSClass *jsc Class *clasp = Valueify(jsclasp); if (!clasp) clasp = &ObjectClass; /* default class is Object */ JS_ASSERT(clasp != &FunctionClass); JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL)); JSObject *obj = NewObjectWithClassProto(cx, clasp, proto, parent); - AutoAssertNoGC nogc; if (obj) { TypeObjectFlags flags = 0; if (clasp->emulatesUndefined()) flags |= OBJECT_FLAG_EMULATES_UNDEFINED; if (flags) MarkTypeObjectFlags(cx, obj, flags); } @@ -3373,17 +3372,16 @@ JS_NewObjectWithGivenProto(JSContext *cx Class *clasp = Valueify(jsclasp); if (!clasp) clasp = &ObjectClass; /* default class is Object */ JS_ASSERT(clasp != &FunctionClass); JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL)); JSObject *obj = NewObjectWithGivenProto(cx, clasp, proto, parent); - AutoAssertNoGC nogc; if (obj) MarkTypeObjectUnknownProperties(cx, obj->type()); return obj; } JS_PUBLIC_API(JSObject *) JS_NewObjectForConstructor(JSContext *cx, JSClass *clasp, const jsval *vp) { @@ -4591,17 +4589,16 @@ JS_NewPropertyIterator(JSContext *cx, JS iterobj->setSlot(JSSLOT_ITER_INDEX, Int32Value(index)); return iterobj; } JS_PUBLIC_API(JSBool) JS_NextProperty(JSContext *cx, JSObject *iterobjArg, jsid *idp) { RootedObject iterobj(cx, iterobjArg); - AutoAssertNoGC nogc; AssertHeapIsIdle(cx); CHECK_REQUEST(cx); assertSameCompartment(cx, iterobj); int32_t i = iterobj->getSlot(JSSLOT_ITER_INDEX).toInt32(); if (i < 0) { /* Native case: private data is a property tree node pointer. */ JS_ASSERT(iterobj->getParent()->isNative()); @@ -6131,31 +6128,29 @@ JS_DecodeBytes(JSContext *cx, const char AssertHeapIsIdle(cx); CHECK_REQUEST(cx); return InflateStringToBuffer(cx, src, srclen, dst, dstlenp); } JS_PUBLIC_API(char *) JS_EncodeString(JSContext *cx, JSRawString str) { - AutoAssertNoGC nogc; AssertHeapIsIdle(cx); CHECK_REQUEST(cx); JSLinearString *linear = str->ensureLinear(cx); if (!linear) return NULL; return LossyTwoByteCharsToNewLatin1CharsZ(cx, linear->range()).c_str(); } JS_PUBLIC_API(char *) JS_EncodeStringToUTF8(JSContext *cx, JSRawString str) { - AutoAssertNoGC nogc; AssertHeapIsIdle(cx); CHECK_REQUEST(cx); JSLinearString *linear = str->ensureLinear(cx); if (!linear) return NULL; return TwoByteCharsToNewUTF8CharsZ(cx, linear->range()).c_str(); @@ -7034,18 +7029,16 @@ JS_IsIdentifier(JSContext *cx, JSString *isIdentifier = js::frontend::IsIdentifier(linearStr); return true; } JS_PUBLIC_API(JSBool) JS_DescribeScriptedCaller(JSContext *cx, JSScript **script, unsigned *lineno) { - AutoAssertNoGC nogc; - if (script) *script = NULL; if (lineno) *lineno = 0; ScriptFrameIter i(cx); if (i.done()) return JS_FALSE;
--- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -1326,18 +1326,16 @@ enum ComparatorMatchResult { /* * Specialize behavior for comparator functions with particular common bytecode * patterns: namely, |return x - y| and |return y - x|. */ ComparatorMatchResult MatchNumericComparator(const Value &v) { - AutoAssertNoGC nogc; - if (!v.isObject()) return Match_None; JSObject &obj = v.toObject(); if (!obj.isFunction()) return Match_None; JSFunction *fun = obj.toFunction();
--- a/js/src/jsboolinlines.h +++ b/js/src/jsboolinlines.h @@ -29,17 +29,16 @@ BooleanGetPrimitiveValue(JSContext *cx, } return BooleanGetPrimitiveValueSlow(cx, obj, vp); } inline bool EmulatesUndefined(RawObject obj) { - AutoAssertNoGC nogc; RawObject actual = MOZ_LIKELY(!obj->isWrapper()) ? obj : UnwrapObject(obj); bool emulatesUndefined = actual->getClass()->emulatesUndefined(); MOZ_ASSERT_IF(emulatesUndefined, obj->type()->flags & types::OBJECT_FLAG_EMULATES_UNDEFINED); return emulatesUndefined; } } /* namespace js */
--- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -492,18 +492,16 @@ ReportError(JSContext *cx, const char *m /* * The given JSErrorReport object have been zeroed and must not outlive * cx->fp() (otherwise report->originPrincipals may become invalid). */ static void PopulateReportBlame(JSContext *cx, JSErrorReport *report) { - AutoAssertNoGC nogc; - /* * Walk stack until we find a frame that is associated with a non-builtin * rather than a builtin frame. */ NonBuiltinScriptFrameIter iter(cx); if (iter.done()) return; @@ -517,18 +515,16 @@ PopulateReportBlame(JSContext *cx, JSErr * complications of pre-allocating an exception object which required * running the Exception class initializer early etc. * Instead we just invoke the errorReporter with an "Out Of Memory" * type message, and then hope the process ends swiftly. */ void js_ReportOutOfMemory(JSContext *cx) { - AutoAssertNoGC nogc; - cx->runtime->hadOutOfMemory = true; JSErrorReport report; JSErrorReporter onError = cx->errorReporter; /* Get the message for this error, but we won't expand any arguments. */ const JSErrorFormatString *efs = js_GetLocalizedErrorMessage(cx, NULL, NULL, JSMSG_OUT_OF_MEMORY);
--- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -842,17 +842,16 @@ GetAtomTotalSize(JSContext *cx, JSAtom * return sizeof(AtomStateEntry) + sizeof(HashNumber) + sizeof(JSString) + (atom->length() + 1) * sizeof(jschar); } JS_PUBLIC_API(size_t) JS_GetFunctionTotalSize(JSContext *cx, JSFunction *fun) { - AutoAssertNoGC nogc; size_t nbytes = sizeof *fun; nbytes += JS_GetObjectTotalSize(cx, fun); if (fun->isInterpreted()) nbytes += JS_GetScriptTotalSize(cx, fun->nonLazyScript()); if (fun->displayAtom()) nbytes += GetAtomTotalSize(cx, fun->displayAtom()); return nbytes; } @@ -1033,17 +1032,16 @@ js_CallContextDebugHandler(JSContext *cx default: return JS_TRUE; } } JS_PUBLIC_API(StackDescription *) JS::DescribeStack(JSContext *cx, unsigned maxFrames) { - AutoAssertNoGC nogc; Vector<FrameDescription> frames(cx); for (ScriptFrameIter i(cx); !i.done(); ++i) { FrameDescription desc; desc.script = i.script(); desc.lineno = PCToLineNumber(i.script(), i.pc()); desc.fun = i.maybeCallee(); if (!frames.append(desc))
--- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -1108,18 +1108,16 @@ js::CallOrConstructBoundFunction(JSConte *vp = args.rval(); return true; } #if JS_HAS_GENERATORS static JSBool fun_isGenerator(JSContext *cx, unsigned argc, Value *vp) { - AutoAssertNoGC nogc; - RawFunction fun; if (!IsFunctionObject(vp[1], &fun)) { JS_SET_RVAL(cx, vp, BooleanValue(false)); return true; } bool result = false; if (fun->hasScript()) {
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -4324,17 +4324,17 @@ BudgetIncrementalGC(JSRuntime *rt, int64 * garbage. We disable inlining to ensure that the bottom of the stack with * possible GC roots recorded in MarkRuntime excludes any pointers we use during * the marking implementation. */ static JS_NEVER_INLINE void GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gckind, gcreason::Reason reason) { /* If we attempt to invoke the GC while we are running in the GC, assert. */ - AutoAssertNoGC nogc; + JS_ASSERT(!rt->isHeapBusy()); #ifdef DEBUG for (ZonesIter zone(rt); !zone.done(); zone.next()) JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, zone->isGCScheduled()); #endif AutoGCSession gcsession(rt);
--- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -48,41 +48,38 @@ struct AutoMarkInDeadZone bool scheduled; }; namespace gc { inline JSGCTraceKind GetGCThingTraceKind(const void *thing) { - AutoAssertNoGC nogc; JS_ASSERT(thing); const Cell *cell = reinterpret_cast<const Cell *>(thing); return MapAllocToTraceKind(cell->getAllocKind()); } /* Capacity for slotsToThingKind */ const size_t SLOTS_TO_THING_KIND_LIMIT = 17; extern AllocKind slotsToThingKind[]; /* Get the best kind to use when making an object with the given slot count. */ static inline AllocKind GetGCObjectKind(size_t numSlots) { - AutoAssertNoGC nogc; if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) return FINALIZE_OBJECT16; return slotsToThingKind[numSlots]; } static inline AllocKind GetGCObjectKind(Class *clasp) { - AutoAssertNoGC nogc; if (clasp == &FunctionClass) return JSFunction::FinalizeKind; uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp); if (clasp->flags & JSCLASS_HAS_PRIVATE) nslots++; return GetGCObjectKind(nslots); } @@ -93,62 +90,57 @@ GetGCArrayKind(size_t numSlots) extern AllocKind slotsToThingKind[]; /* * Dense arrays can use their fixed slots to hold their elements array * (less two Values worth of ObjectElements header), but if more than the * maximum number of fixed slots is needed then the fixed slots will be * unused. */ - AutoAssertNoGC nogc; JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2); if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT) return FINALIZE_OBJECT2; return slotsToThingKind[numSlots + 2]; } static inline AllocKind GetGCObjectFixedSlotsKind(size_t numFixedSlots) { extern AllocKind slotsToThingKind[]; - AutoAssertNoGC nogc; JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT); return slotsToThingKind[numFixedSlots]; } static inline AllocKind GetBackgroundAllocKind(AllocKind kind) { - AutoAssertNoGC nogc; JS_ASSERT(!IsBackgroundFinalized(kind)); JS_ASSERT(kind <= FINALIZE_OBJECT_LAST); return (AllocKind) (kind + 1); } /* * Try to get the next larger size for an object, keeping BACKGROUND * consistent. */ static inline bool TryIncrementAllocKind(AllocKind *kindp) { - AutoAssertNoGC nogc; size_t next = size_t(*kindp) + 2; if (next >= size_t(FINALIZE_OBJECT_LIMIT)) return false; *kindp = AllocKind(next); return true; } /* Get the number of fixed slots and initial capacity associated with a kind. */ static inline size_t GetGCKindSlots(AllocKind thingKind) { - AutoAssertNoGC nogc; /* Using a switch in hopes that thingKind will usually be a compile-time constant. */ switch (thingKind) { case FINALIZE_OBJECT0: case FINALIZE_OBJECT0_BACKGROUND: return 0; case FINALIZE_OBJECT2: case FINALIZE_OBJECT2_BACKGROUND: return 2; @@ -168,17 +160,16 @@ GetGCKindSlots(AllocKind thingKind) JS_NOT_REACHED("Bad object finalize kind"); return 0; } } static inline size_t GetGCKindSlots(AllocKind thingKind, Class *clasp) { - AutoAssertNoGC nogc; size_t nslots = GetGCKindSlots(thingKind); /* An object's private data uses the space taken by its last fixed slot. */ if (clasp->flags & JSCLASS_HAS_PRIVATE) { JS_ASSERT(nslots > 0); nslots--; } @@ -190,18 +181,16 @@ GetGCKindSlots(AllocKind thingKind, Clas nslots = 0; return nslots; } static inline void GCPoke(JSRuntime *rt) { - AutoAssertNoGC nogc; - rt->gcPoke = true; #ifdef JS_GC_ZEAL /* Schedule a GC to happen "soon" after a GC poke. */ if (rt->gcZeal() == js::gc::ZealPokeValue) rt->gcNextScheduled = 1; #endif }
--- a/js/src/jsinfer.cpp +++ b/js/src/jsinfer.cpp @@ -6020,17 +6020,16 @@ JSObject::makeLazyType(JSContext *cx, Ha /* De-lazification of functions can GC, so we need to do it up here. */ if (obj->isFunction() && obj->toFunction()->isInterpretedLazy()) { RootedFunction fun(cx, obj->toFunction()); if (!fun->getOrCreateScript(cx)) return NULL; } Rooted<TaggedProto> proto(cx, obj->getTaggedProto()); TypeObject *type = cx->compartment->types.newTypeObject(cx, obj->getClass(), proto); - AutoAssertNoGC nogc; if (!type) { if (cx->typeInferenceEnabled()) cx->compartment->types.setPendingNukeTypes(cx); return obj->type_; } if (!cx->typeInferenceEnabled()) { /* This can only happen if types were previously nuked. */
--- a/js/src/jsinferinlines.h +++ b/js/src/jsinferinlines.h @@ -270,17 +270,16 @@ TypeFlagPrimitive(TypeFlags flags) /* * Get the canonical representation of an id to use when doing inference. This * maintains the constraint that if two different jsids map to the same property * in JS (e.g. 3 and "3"), they have the same type representation. */ inline RawId IdToTypeId(RawId id) { - AutoAssertNoGC nogc; JS_ASSERT(!JSID_IS_EMPTY(id)); /* * All integers must map to the aggregate property for index types, including * negative integers. */ if (JSID_IS_INT(id)) return JSID_VOID; @@ -587,18 +586,16 @@ TypeMonitorCall(JSContext *cx, const js: } return true; } inline bool TrackPropertyTypes(JSContext *cx, RawObject obj, RawId id) { - AutoAssertNoGC nogc; - if (!cx->typeInferenceEnabled() || obj->hasLazyType() || obj->type()->unknownProperties()) return false; if (obj->hasSingletonType() && !obj->type()->maybeGetProperty(id, cx)) return false; return true; } @@ -678,17 +675,16 @@ MarkTypePropertyConfigured(JSContext *cx if (TrackPropertyTypes(cx, obj, id)) obj->type()->markPropertyConfigured(cx, id); } /* Mark a state change on a particular object. */ inline void MarkObjectStateChange(JSContext *cx, RawObject obj) { - AutoAssertNoGC nogc; if (cx->typeInferenceEnabled() && !obj->hasLazyType() && !obj->type()->unknownProperties()) obj->type()->markStateChange(cx); } /* * For an array or object which has not yet escaped and been referenced elsewhere, * pick a new type based on the object's current contents. */ @@ -721,18 +717,16 @@ UseNewTypeAtEntry(JSContext *cx, StackFr JSScript *prevScript = fp->prev()->script(); return UseNewType(cx, prevScript, fp->prevpc()); } inline bool UseNewTypeForClone(JSFunction *fun) { - AutoAssertNoGC nogc; - if (!fun->isInterpreted()) return false; if (fun->nonLazyScript()->shouldCloneAtCallsite) return true; if (fun->hasSingletonType()) return false; @@ -793,57 +787,52 @@ UseNewTypeForClone(JSFunction *fun) TypeScript::NumTypeSets(RawScript script) { return script->nTypeSets + analyze::TotalSlots(script); } /* static */ inline HeapTypeSet * TypeScript::ReturnTypes(RawScript script) { - AutoAssertNoGC nogc; TypeSet *types = script->types->typeArray() + script->nTypeSets + js::analyze::CalleeSlot(); return types->toHeapTypeSet(); } /* static */ inline StackTypeSet * TypeScript::ThisTypes(RawScript script) { - AutoAssertNoGC nogc; TypeSet *types = script->types->typeArray() + script->nTypeSets + js::analyze::ThisSlot(); return types->toStackTypeSet(); } /* * Note: for non-escaping arguments and locals, argTypes/localTypes reflect * only the initial type of the variable (e.g. passed values for argTypes, * or undefined for localTypes) and not types from subsequent assignments. */ /* static */ inline StackTypeSet * TypeScript::ArgTypes(RawScript script, unsigned i) { - AutoAssertNoGC nogc; JS_ASSERT(i < script->function()->nargs); TypeSet *types = script->types->typeArray() + script->nTypeSets + js::analyze::ArgSlot(i); return types->toStackTypeSet(); } /* static */ inline StackTypeSet * TypeScript::LocalTypes(RawScript script, unsigned i) { - AutoAssertNoGC nogc; JS_ASSERT(i < script->nfixed); TypeSet *types = script->types->typeArray() + script->nTypeSets + js::analyze::LocalSlot(script, i); return types->toStackTypeSet(); } /* static */ inline StackTypeSet * TypeScript::SlotTypes(RawScript script, unsigned slot) { - AutoAssertNoGC nogc; JS_ASSERT(slot < js::analyze::TotalSlots(script)); TypeSet *types = script->types->typeArray() + script->nTypeSets + slot; return types->toStackTypeSet(); } /* static */ inline TypeObject * TypeScript::StandardType(JSContext *cx, JSProtoKey key) { @@ -963,17 +952,16 @@ TypeScript::MonitorUnknown(JSContext *cx { if (cx->typeInferenceEnabled()) TypeDynamicResult(cx, script, pc, Type::UnknownType()); } /* static */ inline void TypeScript::GetPcScript(JSContext *cx, JSScript **script, jsbytecode **pc) { - AutoAssertNoGC nogc; #ifdef JS_ION if (cx->fp()->beginsIonActivation()) { ion::GetPcScript(cx, script, pc); return; } #endif *script = cx->fp()->script(); *pc = cx->regs().pc; @@ -1605,17 +1593,16 @@ TypeObject::getProperty(JSContext *cx, R if (!*pprop) { setBasePropertyCount(propertyCount); if (!addProperty(cx, id, pprop)) { setBasePropertyCount(0); propertySet = NULL; return NULL; } - AutoAssertNoGC nogc; if (propertyCount == OBJECT_FLAG_PROPERTY_COUNT_LIMIT) { markUnknown(cx); /* * Return an arbitrary property in the object, as all have unknown * type and are treated as configured. */ unsigned count = getPropertyCount(); @@ -1634,17 +1621,16 @@ TypeObject::getProperty(JSContext *cx, R types->setOwnProperty(cx, false); return types; } inline HeapTypeSet * TypeObject::maybeGetProperty(RawId id, JSContext *cx) { - AutoAssertNoGC nogc; JS_ASSERT(JSID_IS_VOID(id) || JSID_IS_EMPTY(id) || JSID_IS_STRING(id)); JS_ASSERT_IF(!JSID_IS_EMPTY(id), id == IdToTypeId(id)); JS_ASSERT(!unknownProperties()); Property *prop = HashSetLookup<RawId,Property,Property> (propertySet, basePropertyCount(), id); return prop ? &prop->types : NULL;
--- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -789,17 +789,16 @@ js::UnwindScope(JSContext *cx, AbstractF } } } void js::UnwindForUncatchableException(JSContext *cx, const FrameRegs ®s) { /* c.f. the regular (catchable) TryNoteIter loop in Interpret. */ - AutoAssertNoGC nogc; for (TryNoteIter tni(cx, regs); !tni.done(); ++tni) { JSTryNote *tn = *tni; if (tn->kind == JSTRY_ITER) { Value *sp = regs.spForStackDepth(tn->stackDepth); UnwindIteratorForUncatchableException(cx, &sp[-1].toObject()); } } }
--- a/js/src/jsinterpinlines.h +++ b/js/src/jsinterpinlines.h @@ -119,17 +119,16 @@ ComputeThis(JSContext *cx, AbstractFrame * temporary, the arguments object has been created a some other failed guard * that called JSScript::argumentsOptimizationFailed. In this case, it is * always valid (and necessary) to replace JS_OPTIMIZED_ARGUMENTS with the real * arguments object. */ static inline bool IsOptimizedArguments(AbstractFramePtr frame, Value *vp) { - AutoAssertNoGC nogc; if (vp->isMagic(JS_OPTIMIZED_ARGUMENTS) && frame.script()->needsArgsObj()) *vp = ObjectValue(frame.argsObj()); return vp->isMagic(JS_OPTIMIZED_ARGUMENTS); } /* * One optimized consumer of MagicValue(JS_OPTIMIZED_ARGUMENTS) is f.apply. * However, this speculation must be guarded before calling 'apply' in case it
--- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -642,17 +642,16 @@ js::GetIterator(JSContext *cx, HandleObj /* * The iterator object for JSITER_ENUMERATE never escapes, so we * don't care for the proper parent/proto to be set. This also * allows us to re-use a previous iterator object that is not * currently active. */ { - AutoAssertNoGC nogc; RawObject pobj = obj; do { if (!pobj->isNative() || !pobj->hasEmptyElements() || pobj->hasUncacheableProto() || obj->getOps()->enumerate || pobj->getClass()->enumerate != JS_EnumerateStub) { shapes.clear();
--- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -1881,17 +1881,16 @@ JSObject::ReserveForTradeGuts(JSContext } return true; } void JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &reserved) { - AutoAssertNoGC nogc; JS_ASSERT(a->compartment() == b->compartment()); JS_ASSERT(a->isFunction() == b->isFunction()); /* * Swap the object's types, to restore their initial type information. * The prototypes and classes of the objects were swapped in ReserveForTradeGuts. */ TypeObject *tmp = a->type_; @@ -4354,18 +4353,16 @@ baseops::DeleteSpecial(JSContext *cx, Ha { Rooted<jsid> id(cx, SPECIALID_TO_JSID(sid)); return baseops::DeleteGeneric(cx, obj, id, rval, strict); } bool js::HasDataProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) { - AutoAssertNoGC nogc; - if (JSID_IS_INT(id) && obj->containsDenseElement(JSID_TO_INT(id))) { *vp = obj->getDenseElement(JSID_TO_INT(id)); return true; } if (RawShape shape = obj->nativeLookup(cx, id)) { if (shape->hasDefaultGetter() && shape->hasSlot()) { *vp = obj->nativeGetSlot(shape->slot()); @@ -4778,17 +4775,16 @@ js_GetterOnlyPropertyStub(JSContext *cx, * Routines to print out values during debugging. These are FRIEND_API to help * the debugger find them and to support temporarily hacking js_Dump* calls * into other code. */ void dumpValue(const Value &v) { - AutoAssertNoGC nogc; if (v.isNull()) fprintf(stderr, "null"); else if (v.isUndefined()) fprintf(stderr, "undefined"); else if (v.isInt32()) fprintf(stderr, "%d", v.toInt32()); else if (v.isDouble()) fprintf(stderr, "%g", v.toDouble()); @@ -5059,17 +5055,16 @@ js_DumpStackFrame(JSContext *cx, StackFr } } #endif /* DEBUG */ JS_FRIEND_API(void) js_DumpBacktrace(JSContext *cx) { - AutoAssertNoGC nogc; Sprinter sprinter(cx); sprinter.init(); size_t depth = 0; for (StackIter i(cx); !i.done(); ++i, ++depth) { if (i.isScript()) { const char *filename = JS_GetScriptFilename(cx, i.script()); unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc()); RawScript script = i.script();
--- a/js/src/jspropertycacheinlines.h +++ b/js/src/jspropertycacheinlines.h @@ -27,18 +27,16 @@ * We must lock pobj on a hit in order to close races with threads that might * be deleting a property from its scope, or otherwise invalidating property * caches (on all threads) by re-generating JSObject::shape(). */ JS_ALWAYS_INLINE void js::PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject **obj, JSObject **pobj, PropertyCacheEntry **entry, PropertyName **name) { - AutoAssertNoGC nogc; - JS_ASSERT(this == &cx->propertyCache()); RawShape kshape = (*obj)->lastProperty(); *entry = &table[hash(pc, kshape)]; PCMETER(pctestentry = *entry); PCMETER(tests++); JS_ASSERT(obj != pobj); if ((*entry)->kpc == pc && (*entry)->kshape == kshape) { @@ -60,18 +58,16 @@ js::PropertyCache::test(JSContext *cx, j if (!*name) PCMETER(misses++); } JS_ALWAYS_INLINE bool js::PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj, PropertyCacheEntry **entryp, JSObject **obj2p, PropertyName **namep) { - AutoAssertNoGC nogc; - JS_ASSERT(this == &cx->propertyCache()); RawShape kshape = obj->lastProperty(); PropertyCacheEntry *entry = &table[hash(pc, kshape)]; *entryp = entry; PCMETER(pctestentry = entry); PCMETER(tests++); PCMETER(settests++);
--- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -2255,25 +2255,23 @@ js_GetScriptLineExtent(RawScript script) lineno = maxLineNo; return 1 + lineno - script->lineno; } unsigned js::CurrentLine(JSContext *cx) { - AutoAssertNoGC nogc; return PCToLineNumber(cx->fp()->script(), cx->regs().pc); } void js::CurrentScriptFileLineOriginSlow(JSContext *cx, const char **file, unsigned *linenop, JSPrincipals **origin) { - AutoAssertNoGC nogc; NonBuiltinScriptFrameIter iter(cx); if (iter.done()) { *file = NULL; *linenop = 0; *origin = NULL; return; } @@ -2385,17 +2383,16 @@ js::CloneScript(JSContext *cx, HandleObj .setUserBit(src->userBit); RootedScript dst(cx, JSScript::Create(cx, enclosingScope, src->savedCallerFun, options, src->staticLevel, src->scriptSource(), src->sourceStart, src->sourceEnd)); if (!dst) { js_free(data); return NULL; } - AutoAssertNoGC nogc; dst->bindings = bindings; /* This assignment must occur before all the Rebase calls. */ dst->data = data; dst->dataSize = size; memcpy(data, src->data, size);
--- a/js/src/jsscriptinlines.h +++ b/js/src/jsscriptinlines.h @@ -40,17 +40,16 @@ AliasedFormalIter::AliasedFormalIter(js: extern void CurrentScriptFileLineOriginSlow(JSContext *cx, const char **file, unsigned *linenop, JSPrincipals **origin); inline void CurrentScriptFileLineOrigin(JSContext *cx, const char **file, unsigned *linenop, JSPrincipals **origin, LineOption opt = NOT_CALLED_FROM_JSOP_EVAL) { if (opt == CALLED_FROM_JSOP_EVAL) { - AutoAssertNoGC nogc; JSScript *script = NULL; jsbytecode *pc = NULL; types::TypeScript::GetPcScript(cx, &script, &pc); JS_ASSERT(JSOp(*pc) == JSOP_EVAL); JS_ASSERT(*(pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); *file = script->filename; *linenop = GET_UINT16(pc + JSOP_EVAL_LENGTH); *origin = script->originPrincipals;
--- a/js/src/jsstr.cpp +++ b/js/src/jsstr.cpp @@ -1176,18 +1176,16 @@ str_contains(JSContext *cx, unsigned arg } else { double d; if (!ToInteger(cx, args[1], &d)) return false; pos = uint32_t(Min(Max(d, 0.0), double(UINT32_MAX))); } } - AutoAssertNoGC nogc; - // Step 8 uint32_t textLen = str->length(); const jschar *textChars = str->getChars(cx); if (!textChars) return false; // Step 9 uint32_t start = Min(Max(pos, 0U), textLen); @@ -1230,19 +1228,17 @@ str_indexOf(JSContext *cx, unsigned argc } else { double d; if (!ToInteger(cx, args[1], &d)) return false; pos = uint32_t(Min(Max(d, 0.0), double(UINT32_MAX))); } } - AutoAssertNoGC nogc; - - // Step 8 + // Step 8 uint32_t textLen = str->length(); const jschar *textChars = str->getChars(cx); if (!textChars) return false; // Step 9 uint32_t start = Min(Max(pos, 0U), textLen); @@ -1361,18 +1357,16 @@ str_startsWith(JSContext *cx, unsigned a } else { double d; if (!ToInteger(cx, args[1], &d)) return false; pos = uint32_t(Min(Max(d, 0.0), double(UINT32_MAX))); } } - AutoAssertNoGC nogc; - // Step 8 uint32_t textLen = str->length(); const jschar *textChars = str->getChars(cx); if (!textChars) return false; // Step 9 uint32_t start = Min(Max(pos, 0U), textLen); @@ -1421,18 +1415,16 @@ str_endsWith(JSContext *cx, unsigned arg } else { double d; if (!ToInteger(cx, args[1], &d)) return false; pos = uint32_t(Min(Max(d, 0.0), double(UINT32_MAX))); } } - AutoAssertNoGC nogc; - // Step 6 const jschar *textChars = str->getChars(cx); if (!textChars) return false; // Step 9 uint32_t end = Min(Max(pos, 0U), textLen); @@ -2611,18 +2603,16 @@ static const uint32_t ReplaceOptArg = 2; * object, e.g. 'function(a) { return b[a]; }'. Avoid calling the script in * such cases, which are used by javascript packers (particularly the popular * Dean Edwards packer) to efficiently encode large scripts. We only handle the * code patterns generated by such packers here. */ static JSObject * LambdaIsGetElem(JSObject &lambda) { - AutoAssertNoGC nogc; - if (!lambda.isFunction()) return NULL; JSFunction *fun = lambda.toFunction(); if (!fun->hasScript()) return NULL; RawScript script = fun->nonLazyScript(); @@ -2946,17 +2936,16 @@ class SplitStringMatcher SplitStringMatcher(JSContext *cx, HandleLinearString sep) : sep(cx, sep) {} static const bool returnsCaptures = false; bool operator()(JSContext *cx, JSLinearString *str, size_t index, SplitMatchResult *res) const { - AutoAssertNoGC nogc; JS_ASSERT(index == 0 || index < str->length()); const jschar *chars = str->chars(); int match = StringMatch(chars + index, str->length() - index, sep->chars(), sep->length()); if (match == -1) res->setFailure(); else res->setResult(sep->length(), index + match + sep->length());
--- a/js/src/jsworkers.cpp +++ b/js/src/jsworkers.cpp @@ -67,18 +67,16 @@ CompiledScriptMatches(JSCompartment *com if (script) return target == script; return target->compartment() == compartment; } void js::CancelOffThreadIonCompile(JSCompartment *compartment, JSScript *script) { - AutoAssertNoGC nogc; - if (!compartment->rt->workerThreadState) return; WorkerThreadState &state = *compartment->rt->workerThreadState; ion::IonCompartment *ion = compartment->ionCompartment(); if (!ion) return;
--- a/js/src/methodjit/BaseAssembler.h +++ b/js/src/methodjit/BaseAssembler.h @@ -143,17 +143,16 @@ class Assembler : public ValueAssembler stackAdjust(0), #ifdef DEBUG callIsAligned(false), #endif sps(sps), vmframe(vmframe), pc(NULL) { - AutoAssertNoGC nogc; startLabel = label(); if (vmframe) sps->setPushed(vmframe->script()); } Assembler(MJITInstrumentation *sps, jsbytecode **pc) : callPatches(SystemAllocPolicy()), availInCall(0), @@ -1340,17 +1339,16 @@ static const JSC::MacroAssembler::Regist /* * Get a free object for the specified GC kind in compartment, writing it * to result and filling it in according to templateObject. Returns a jump * taken if a free thing was not retrieved. Note: don't call this directly, * use Compiler::getNewObject instead. */ Jump getNewObject(JSContext *cx, RegisterID result, JSObject *templateObject) { - AutoAssertNoGC nogc; gc::AllocKind allocKind = templateObject->getAllocKind(); JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST); int thingSize = (int)gc::Arena::thingSize(allocKind); JS_ASSERT(cx->typeInferenceEnabled()); JS_ASSERT(!templateObject->hasDynamicSlots()); JS_ASSERT(!templateObject->hasDynamicElements());
--- a/js/src/methodjit/BaseCompiler.h +++ b/js/src/methodjit/BaseCompiler.h @@ -130,17 +130,16 @@ class LinkerHelper : public JSC::LinkBuf js_ReportOutOfMemory(cx); return NULL; } m_size = masm.size(); // must come after call to executableAllocAndCopy()! return pool; } JSC::CodeLocationLabel finalize(VMFrame &f) { - AutoAssertNoGC nogc; masm.finalize(*this); JSC::CodeLocationLabel label = finalizeCodeAddendum(); Probes::registerICCode(f.cx, f.chunk(), f.script(), f.pc(), label.executableAddress(), masm.size()); return label; } void maybeLink(MaybeJump jump, JSC::CodeLocationLabel label) {
--- a/js/src/methodjit/InvokeHelpers.cpp +++ b/js/src/methodjit/InvokeHelpers.cpp @@ -718,24 +718,22 @@ stubs::ScriptDebugEpilogue(VMFrame &f) { if (!js::ScriptDebugEpilogue(f.cx, f.fp(), JS_TRUE)) THROW(); } void JS_FASTCALL stubs::ScriptProbeOnlyPrologue(VMFrame &f) { - AutoAssertNoGC nogc; Probes::enterScript(f.cx, f.script(), f.script()->function(), f.fp()); } void JS_FASTCALL stubs::ScriptProbeOnlyEpilogue(VMFrame &f) { - AutoAssertNoGC nogc; Probes::exitScript(f.cx, f.script(), f.script()->function(), f.fp()); } void JS_FASTCALL stubs::CrossChunkShim(VMFrame &f, void *edge_) { AssertCanGC(); DebugOnly<CrossChunkEdge*> edge = (CrossChunkEdge *) edge_;
--- a/js/src/methodjit/MethodJIT.h +++ b/js/src/methodjit/MethodJIT.h @@ -1070,17 +1070,16 @@ VMFrame::script() if (regs.inlined()) return chunk()->inlineFrames()[regs.inlined()->inlineIndex].fun->nonLazyScript(); return fp()->script(); } inline jsbytecode * VMFrame::pc() { - AutoAssertNoGC nogc; if (regs.inlined()) return script()->code + regs.inlined()->pcOffset; return regs.pc; } } /* namespace js */ inline void *
--- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -423,18 +423,16 @@ NativeStubLinker::init(JSContext *cx) * observed types for the opcode or loads the result into a register pair * (it will go through a type barrier afterwards). */ bool mjit::NativeStubEpilogue(VMFrame &f, Assembler &masm, NativeStubLinker::FinalJump *result, int32_t initialFrameDepth, int32_t vpOffset, MaybeRegisterID typeReg, MaybeRegisterID dataReg) { - AutoAssertNoGC nogc; - /* Reload fp, which may have been clobbered by restoreStackBase(). */ masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); Jump hasException = masm.branchTest32(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg); Address resultAddress(JSFrameReg, vpOffset); @@ -836,18 +834,16 @@ class CallCompiler : public BaseCompiler repatch.relink(ic.oolJump(), cs); return true; } #endif bool generateFullCallStub(JSScript *script, uint32_t flags) { - AutoAssertNoGC nogc; - /* * Create a stub that works with arity mismatches. Like the fast-path, * this allocates a frame on the caller side, but also performs extra * checks for compilability. Perhaps this should be a separate, shared * trampoline, but for now we generate it dynamically. */ Assembler masm; InlineFrameAssembler inlFrame(masm, ic, flags); @@ -983,17 +979,16 @@ class CallCompiler : public BaseCompiler ic.funGuard.executableAddress(), static_cast<void*>(ic.fastGuardedObject)); return true; } bool generateStubForClosures(JSObject *obj) { - AutoAssertNoGC nogc; JS_ASSERT(ic.frameSize.isStatic()); /* Slightly less fast path - guard on fun->script() instead. */ Assembler masm; Registers tempRegs(Registers::AvailRegs); tempRegs.takeReg(ic.funObjReg); @@ -1211,18 +1206,16 @@ class CallCompiler : public BaseCompiler Repatcher repatch(f.chunk()); repatch.relink(ic.funJump, ic.funJumpTarget); return true; } bool generateCallsiteCloneStub(HandleFunction original, HandleFunction fun) { - AutoAssertNoGC nogc; - Assembler masm; // If we have a callsite clone, we do the folowing hack: // // 1) Patch funJump to a stub which guards on the identity of the // original function. If this guard fails, we jump to the original // funJump target. // 2) Load the clone into the callee register. @@ -1297,17 +1290,16 @@ class CallCompiler : public BaseCompiler JSFunction *fun = ucr.fun; if (!ucr.codeAddr) { // No JM code is available for this script yet. if (ucr.unjittable) disable(); #ifdef JS_ION - AutoAssertNoGC nogc; // If the following conditions pass, try to inline a call into // an IonMonkey JIT'd function. if (!callingNew && fun && !ic.hasJMStub() && !ic.hasIonStub() && ic.frameSize.isStatic() && @@ -1492,17 +1484,16 @@ ic::SplatApplyArgs(VMFrame &f) #if defined(_MSC_VER) # pragma optimize("", on) #endif void ic::GenerateArgumentCheckStub(VMFrame &f) { - AutoAssertNoGC nogc; JS_ASSERT(f.cx->typeInferenceEnabled()); JITScript *jit = f.jit(); StackFrame *fp = f.fp(); JSFunction *fun = fp->fun(); RawScript script = fun->nonLazyScript(); if (jit->argsCheckPool)
--- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -110,17 +110,16 @@ class PICStubCompiler : public BaseCompi bool hadGC() { return gcNumber != f.cx->runtime->gcNumber; } protected: void spew(const char *event, const char *op) { #ifdef JS_METHODJIT_SPEW - AutoAssertNoGC nogc; JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n", type, event, op, f.script()->filename, CurrentLine(cx)); #endif } }; static bool GeneratePrototypeGuards(JSContext *cx, Vector<JSC::MacroAssembler::Jump,8> &mismatches, Assembler &masm, @@ -724,17 +723,16 @@ struct GetPropHelper { } if (!IsCacheableProtoChain(obj, holder)) return ic.disable(f, "non-native holder"); shape = prop; return Lookup_Cacheable; } LookupStatus testForGet() { - AutoAssertNoGC nogc; if (!shape->hasDefaultGetter()) { if (shape->hasGetterValue()) { JSObject *getterObj = shape->getterObject(); if (!getterObj->isFunction() || !getterObj->toFunction()->isNative()) return ic.disable(f, "getter object not a native function"); } if (shape->hasSlot() && holder != obj) return ic.disable(f, "slotful getter hook through prototype"); @@ -1047,18 +1045,16 @@ class GetPropCompiler : public PICStubCo return Lookup_Cacheable; } /* For JSPropertyOp getters. */ void generateGetterStub(Assembler &masm, RawShape shape, jsid userid, Label start, Vector<Jump, 8> &shapeMismatches) { - AutoAssertNoGC nogc; - /* * Getter hook needs to be called from the stub. The state is fully * synced and no registers are live except the result registers. */ JS_ASSERT(pic.canCallHook); PropertyOp getter = shape->getterOp(); masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER), @@ -1159,19 +1155,17 @@ class GetPropCompiler : public PICStubCo linkerEpilogue(linker, start, shapeMismatches); } /* For getters backed by a JSNative. */ void generateNativeGetterStub(Assembler &masm, RawShape shape, Label start, Vector<Jump, 8> &shapeMismatches) { - AutoAssertNoGC nogc; - - /* + /* * Getter hook needs to be called from the stub. The state is fully * synced and no registers are live except the result registers. */ JS_ASSERT(pic.canCallHook); JSFunction *fun = shape->getterObject()->toFunction(); Native native = fun->native(); @@ -2166,29 +2160,26 @@ ic::BindName(VMFrame &f, ic::PICInfo *pi f.regs.sp[0].setObject(*obj); } void BaseIC::spew(VMFrame &f, const char *event, const char *message) { #ifdef JS_METHODJIT_SPEW - AutoAssertNoGC nogc; JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n", js_CodeName[JSOp(*f.pc())], event, message, f.cx->fp()->script()->filename, CurrentLine(f.cx)); #endif } /* Total length of scripts preceding a frame. */ inline uint32_t frameCountersOffset(VMFrame &f) { - AutoAssertNoGC nogc; - JSContext *cx = f.cx; uint32_t offset = 0; if (cx->regs().inlined()) { offset += cx->fp()->script()->length; uint32_t index = cx->regs().inlined()->inlineIndex; InlineFrame *frames = f.chunk()->inlineFrames(); for (unsigned i = 0; i < index; i++)
--- a/js/src/methodjit/Retcon.cpp +++ b/js/src/methodjit/Retcon.cpp @@ -135,18 +135,16 @@ Recompiler::patchNative(JSCompartment *c } JS_ASSERT(found); } void Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script) { - AutoAssertNoGC nogc; - /* * Check if the VMFrame returns directly into the script's jitcode. This * depends on the invariant that f->fp() reflects the frame at the point * where the call occurred, irregardless of any frames which were pushed * inside the call. */ StackFrame *fp = f->fp(); void **addr = f->returnAddressLocation(); @@ -180,18 +178,16 @@ Recompiler::patchFrame(JSCompartment *co } } } } StackFrame * Recompiler::expandInlineFrameChain(StackFrame *outer, InlineFrame *inner) { - AutoAssertNoGC nogc; - StackFrame *parent; if (inner->parent) parent = expandInlineFrameChain(outer, inner->parent); else parent = outer; JaegerSpew(JSpew_Recompile, "Expanding inline frame\n"); @@ -226,17 +222,16 @@ JITCodeReturnAddress(void *data) * Expand all inlined frames within fp per 'inlined' and update next and regs * to refer to the new innermost frame. */ void Recompiler::expandInlineFrames(JSCompartment *compartment, StackFrame *fp, mjit::CallSite *inlined, StackFrame *next, VMFrame *f) { - AutoAssertNoGC nogc; JS_ASSERT_IF(next, next->prev() == fp && next->prevInline() == inlined); /* * Treat any frame expansion as a recompilation event, so that f.jit() is * stable if no recompilations have occurred. */ compartment->types.frameExpansions++; @@ -332,17 +327,16 @@ ExpandInlineFrames(JSCompartment *compar fp->setDownFramesExpanded(); } } } void ClearAllFrames(JSCompartment *compartment) { - AutoAssertNoGC nogc; if (!compartment || !compartment->rt->hasJaegerRuntime()) return; ExpandInlineFrames(compartment); compartment->types.recompilations++; for (VMFrame *f = compartment->rt->jaegerRuntime().activeFrame(); @@ -399,17 +393,16 @@ ClearAllFrames(JSCompartment *compartmen * * - For VMFrames whose entryncode address (the value of entryfp->ncode before * being clobbered with JaegerTrampolineReturn) is in the original script, * redirect that entryncode to the interpoline. */ void Recompiler::clearStackReferences(FreeOp *fop, JSScript *script) { - AutoAssertNoGC nogc; JS_ASSERT(script->hasMJITInfo()); JaegerSpew(JSpew_Recompile, "recompiling script (file \"%s\") (line \"%d\") (length \"%d\") (usecount=\"%d\")\n", script->filename, script->lineno, script->length, (int) script->getUseCount()); JSCompartment *comp = script->compartment(); types::AutoEnterAnalysis enter(fop, comp);
--- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -836,17 +836,16 @@ stubs::TriggerIonCompile(VMFrame &f) f.jit()->destroyChunk(f.cx->runtime->defaultFreeOp(), f.chunkIndex(), /* resetUses = */ false); } #endif void JS_FASTCALL stubs::RecompileForInline(VMFrame &f) { - AutoAssertNoGC nogc; ExpandInlineFrames(f.cx->compartment); Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), f.script()); f.jit()->destroyChunk(f.cx->runtime->defaultFreeOp(), f.chunkIndex(), /* resetUses = */ false); } void JS_FASTCALL stubs::Trap(VMFrame &f, uint32_t trapTypes) { @@ -1551,17 +1550,16 @@ stubs::CheckArgumentTypes(VMFrame &f) ic::GenerateArgumentCheckStub(f); #endif } #ifdef DEBUG void JS_FASTCALL stubs::AssertArgumentTypes(VMFrame &f) { - AutoAssertNoGC nogc; StackFrame *fp = f.fp(); JSFunction *fun = fp->fun(); RawScript script = fun->nonLazyScript(); /* * Don't check the type of 'this' for constructor frames, the 'this' value * has not been constructed yet. */ @@ -1584,17 +1582,16 @@ stubs::AssertArgumentTypes(VMFrame &f) * there is an invariant failure when initially entering a loop. */ void JS_FASTCALL stubs::MissedBoundsCheckEntry(VMFrame &f) {} void JS_FASTCALL stubs::MissedBoundsCheckHead(VMFrame &f) {} void * JS_FASTCALL stubs::InvariantFailure(VMFrame &f, void *rval) { - AutoAssertNoGC nogc; /* * Patch this call to the return site of the call triggering the invariant * failure (or a MissedBoundsCheck* function if the failure occurred on * initial loop entry), and trigger a recompilation which will then * redirect to the rejoin point for that call. We want to make things look * to the recompiler like we are still inside that call, and that after * recompilation we will return to the call's rejoin point. */
--- a/js/src/vm/ArgumentsObject-inl.h +++ b/js/src/vm/ArgumentsObject-inl.h @@ -64,33 +64,31 @@ ArgumentsObject::setArg(unsigned i, cons HeapValue &lhs = data()->args[i]; JS_ASSERT(!lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT)); lhs = v; } inline const Value & ArgumentsObject::element(uint32_t i) const { - AutoAssertNoGC nogc; JS_ASSERT(!isElementDeleted(i)); const Value &v = data()->args[i]; if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT)) { CallObject &callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall(); for (AliasedFormalIter fi(callobj.callee().nonLazyScript()); ; fi++) { if (fi.frameIndex() == i) return callobj.aliasedVar(fi); } } return v; } inline void ArgumentsObject::setElement(uint32_t i, const Value &v) { - AutoAssertNoGC nogc; JS_ASSERT(!isElementDeleted(i)); HeapValue &lhs = data()->args[i]; if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT)) { CallObject &callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall(); for (AliasedFormalIter fi(callobj.callee().nonLazyScript()); ; fi++) { if (fi.frameIndex() == i) { callobj.setAliasedVar(fi, v); return;
--- a/js/src/vm/CharacterEncoding.cpp +++ b/js/src/vm/CharacterEncoding.cpp @@ -12,17 +12,16 @@ #include "jscntxtinlines.h" #include "jsscriptinlines.h" using namespace JS; Latin1CharsZ JS::LossyTwoByteCharsToNewLatin1CharsZ(JSContext *cx, TwoByteChars tbchars) { - AutoAssertNoGC nogc; JS_ASSERT(cx); size_t len = tbchars.length(); unsigned char *latin1 = cx->pod_malloc<unsigned char>(len + 1); if (!latin1) return Latin1CharsZ(); for (size_t i = 0; i < len; ++i) latin1[i] = static_cast<unsigned char>(tbchars[i]); latin1[len] = '\0'; @@ -140,17 +139,16 @@ bufferTooSmall: JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BUFFER_TOO_SMALL); return false; } UTF8CharsZ JS::TwoByteCharsToNewUTF8CharsZ(JSContext *cx, TwoByteChars tbchars) { - AutoAssertNoGC nogc; JS_ASSERT(cx); /* Get required buffer size. */ jschar *str = tbchars.start().get(); size_t len = GetDeflatedUTF8StringLength(cx, str, tbchars.length()); /* Allocate buffer. */ unsigned char *utf8 = cx->pod_malloc<unsigned char>(len + 1);
--- a/js/src/vm/Debugger.cpp +++ b/js/src/vm/Debugger.cpp @@ -1232,17 +1232,16 @@ Debugger::onSingleStep(JSContext *cx, Mu * we're not receiving traps we didn't ask for. Even when frames is * non-empty (and thus we know this trap was requested), do the check * anyway, to make sure the count has the correct non-zero value. * * The converse --- ensuring that we do receive traps when we should --- can * be done with unit tests. */ { - AutoAssertNoGC nogc; uint32_t stepperCount = 0; RawScript trappingScript = iter.script(); GlobalObject *global = cx->global(); if (GlobalObject::DebuggerVector *debuggers = global->getDebuggers()) { for (Debugger **p = debuggers->begin(); p != debuggers->end(); p++) { Debugger *dbg = *p; for (FrameMap::Range r = dbg->frames.all(); !r.empty(); r.popFront()) { AbstractFramePtr frame = r.front().key; @@ -3733,17 +3732,16 @@ DebuggerFrame_getScript(JSContext *cx, u args.rval().setObjectOrNull(scriptObject); return true; } static JSBool DebuggerFrame_getOffset(JSContext *cx, unsigned argc, Value *vp) { THIS_FRAME(cx, argc, vp, "get offset", args, thisobj, iter); - AutoAssertNoGC nogc; RawScript script = iter.script(); iter.updatePcQuadratic(); jsbytecode *pc = iter.pc(); JS_ASSERT(script->code <= pc); JS_ASSERT(pc < script->code + script->length); size_t offset = pc - script->code; args.rval().setNumber(double(offset)); return true;
--- a/js/src/vm/ObjectImpl.cpp +++ b/js/src/vm/ObjectImpl.cpp @@ -288,17 +288,16 @@ js::ObjectImpl::slotInRange(uint32_t slo * causes stack pointer offsets to go awry and spp to refer to something higher * up the stack. */ MOZ_NEVER_INLINE #endif RawShape js::ObjectImpl::nativeLookup(JSContext *cx, jsid id) { - AutoAssertNoGC nogc; MOZ_ASSERT(isNative()); Shape **spp; return Shape::search(cx, lastProperty(), id, &spp); } void js::ObjectImpl::markChildren(JSTracer *trc) {
--- a/js/src/vm/SPSProfiler.cpp +++ b/js/src/vm/SPSProfiler.cpp @@ -371,17 +371,16 @@ SPSProfiler::registerICCode(mjit::JITChu JS_ASSERT(ptr); return ptr->value->ics.append(ICInfo(base, size, pc)); } void SPSProfiler::discardMJITCode(mjit::JITScript *jscr, mjit::JITChunk *chunk, void* address) { - AutoAssertNoGC nogc; if (!jminfo.initialized()) return; unregisterScript(jscr->script, chunk); for (unsigned i = 0; i < chunk->nInlineFrames; i++) unregisterScript(chunk->inlineFrames()[i].fun->nonLazyScript(), chunk); }
--- a/js/src/vm/ScopeObject.cpp +++ b/js/src/vm/ScopeObject.cpp @@ -654,17 +654,16 @@ ClonedBlockObject::create(JSContext *cx, JS_ASSERT(obj->isDelegate()); return &obj->asClonedBlock(); } void ClonedBlockObject::copyUnaliasedValues(AbstractFramePtr frame) { - AutoAssertNoGC nogc; StaticBlockObject &block = staticBlock(); unsigned base = frame.script()->nfixed + block.stackDepth(); for (unsigned i = 0; i < slotCount(); ++i) { if (!block.isAliased(i)) setVar(i, frame.unaliasedLocal(base + i), DONT_CHECK_ALIASING); } } @@ -1009,17 +1008,16 @@ ScopeIter::operator++() break; } return *this; } void ScopeIter::settle() { - AutoAssertNoGC nogc; /* * Given an iterator state (cur_, block_), figure out which (potentially * optimized) scope the iterator should report. Thus, the result is a pair * (type_, hasScopeObject_) where hasScopeObject_ indicates whether the * scope object has been optimized away and does not exist on the scope * chain. Beware: while ScopeIter iterates over the scopes of a single * frame, the scope chain (pointed to by cur_) continues into the scopes of * enclosing frames. Thus, it is important not to look at cur_ until it is @@ -1229,17 +1227,16 @@ class DebugScopeProxy : public BaseProxy /* Handle unaliased let and catch bindings at block scope. */ if (scope->isClonedBlock()) { Rooted<ClonedBlockObject *> block(cx, &scope->asClonedBlock()); RawShape shape = block->lastProperty()->search(cx, id); if (!shape) return false; - AutoAssertNoGC nogc; unsigned i = shape->shortid(); if (block->staticBlock().isAliased(i)) return false; if (maybeframe) { RawScript script = maybeframe.script(); unsigned local = block->slotToLocalIndex(script->bindings, shape->slot()); if (action == GET)
--- a/js/src/vm/Shape.cpp +++ b/js/src/vm/Shape.cpp @@ -92,17 +92,16 @@ Shape::makeOwnBaseShape(JSContext *cx) this->base_ = nbase; return true; } void Shape::handoffTableTo(RawShape shape) { - AutoAssertNoGC nogc; JS_ASSERT(inDictionary() && shape->inDictionary()); if (this == shape) return; JS_ASSERT(base()->isOwned() && !shape->base()->isOwned()); RawBaseShape nbase = base(); @@ -113,17 +112,16 @@ Shape::handoffTableTo(RawShape shape) nbase->adoptUnowned(shape->base()->toUnowned()); shape->base_ = nbase; } /* static */ bool Shape::hashify(JSContext *cx, Shape *shape) { - AutoAssertNoGC nogc; JS_ASSERT(!shape->hasTable()); if (!shape->ensureOwnBaseShape(cx)) return false; JSRuntime *rt = cx->runtime; ShapeTable *table = rt->new_<ShapeTable>(shape->entryCount()); if (!table)
--- a/js/src/vm/Shape.h +++ b/js/src/vm/Shape.h @@ -1049,18 +1049,16 @@ struct StackShape #define SHAPE_STORE_PRESERVING_COLLISION(spp, shape) \ (*(spp) = (RawShape) (uintptr_t(shape) | SHAPE_HAD_COLLISION(*(spp)))) namespace js { inline RawShape Shape::search(JSContext *cx, Shape *start, jsid id, Shape ***pspp, bool adding) { - AutoAssertNoGC nogc; - if (start->inDictionary()) { *pspp = start->table().search(id, adding); return SHAPE_FETCH(*pspp); } *pspp = NULL; if (start->hasTable()) {
--- a/js/src/vm/Stack-inl.h +++ b/js/src/vm/Stack-inl.h @@ -70,17 +70,16 @@ StackFrame::compartment() const JS_ASSERT(scopeChain()->compartment() == script()->compartment()); return scopeChain()->compartment(); } #ifdef JS_METHODJIT inline mjit::JITScript * StackFrame::jit() { - AutoAssertNoGC nogc; return script()->getJIT(isConstructing(), script()->zone()->compileBarriers()); } #endif inline void StackFrame::initPrev(JSContext *cx) { JS_ASSERT(flags_ & HAS_PREVPC); @@ -187,43 +186,40 @@ StackFrame::jitHeavyweightFunctionProlog flags_ |= HAS_CALL_OBJ; return true; } inline void StackFrame::initVarsToUndefined() { - AutoAssertNoGC nogc; SetValueRangeToUndefined(slots(), script()->nfixed); } inline JSObject * StackFrame::createRestParameter(JSContext *cx) { JS_ASSERT(fun()->hasRest()); unsigned nformal = fun()->nargs - 1, nactual = numActualArgs(); unsigned nrest = (nactual > nformal) ? nactual - nformal : 0; return NewDenseCopiedArray(cx, nrest, actuals() + nformal, NULL); } inline Value & StackFrame::unaliasedVar(unsigned i, MaybeCheckAliasing checkAliasing) { - AutoAssertNoGC nogc; JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i)); JS_ASSERT(i < script()->nfixed); return slots()[i]; } inline Value & StackFrame::unaliasedLocal(unsigned i, MaybeCheckAliasing checkAliasing) { #ifdef DEBUG - AutoAssertNoGC nogc; if (checkAliasing) { JS_ASSERT(i < script()->nslots); if (i < script()->nfixed) { JS_ASSERT(!script()->varIsAliased(i)); } else { unsigned depth = i - script()->nfixed; for (StaticBlockObject *b = maybeBlockChain(); b; b = b->enclosingBlock()) { if (b->containsVarAtDepth(depth)) { @@ -374,21 +370,18 @@ StackFrame::callObj() const } /*****************************************************************************/ STATIC_POSTCONDITION(!return || ubound(from) >= nvals) JS_ALWAYS_INLINE bool StackSpace::ensureSpace(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals) const { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); assertInvariants(); JS_ASSERT(from >= firstUnused()); #ifdef XP_WIN JS_ASSERT(from <= commitEnd_); #endif if (JS_UNLIKELY(conservativeEnd_ - from < nvals)) return ensureSpaceSlow(cx, report, from, nvals); @@ -407,21 +400,18 @@ StackSpace::getStackLimit(JSContext *cx, } /*****************************************************************************/ JS_ALWAYS_INLINE StackFrame * ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArgs &args, JSFunction *fun, HandleScript script, StackFrame::Flags *flags) const { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); JS_ASSERT(fun->nonLazyScript() == script); unsigned nformal = fun->nargs; Value *firstUnused = args.end(); JS_ASSERT(firstUnused == space().firstUnused()); /* Include extra space to satisfy the method-jit stackLimit invariant. */ @@ -454,21 +444,18 @@ ContextStack::getCallFrame(JSContext *cx return reinterpret_cast<StackFrame *>(firstUnused + ncopy); } JS_ALWAYS_INLINE bool ContextStack::pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &args, HandleFunction callee, HandleScript script, InitialFrameFlags initial, MaybeReportError report) { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); JS_ASSERT(onTop()); JS_ASSERT(regs.sp == args.end()); /* Cannot assert callee == args.callee() since this is called from LeaveTree. */ JS_ASSERT(callee->nonLazyScript() == script); StackFrame::Flags flags = ToFrameFlags(initial); StackFrame *fp = getCallFrame(cx, report, args, callee, script, &flags); @@ -542,18 +529,16 @@ ContextStack::popFrameAfterOverflow() StackFrame *fp = regs.fp(); regs.popFrame(fp->actuals() + fp->numActualArgs()); } inline RawScript ContextStack::currentScript(jsbytecode **ppc, MaybeAllowCrossCompartment allowCrossCompartment) const { - AutoAssertNoGC nogc; - if (ppc) *ppc = NULL; if (!hasfp()) return NULL; FrameRegs ®s = this->regs(); StackFrame *fp = regs.fp();
--- a/js/src/vm/Stack.cpp +++ b/js/src/vm/Stack.cpp @@ -716,21 +716,18 @@ StackSpace::markActiveCompartments() for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) MarkCompartmentActive(fp); } } JS_FRIEND_API(bool) StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals) const { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); assertInvariants(); JSCompartment *dest = cx->compartment; bool trusted = dest->principals == cx->runtime->trustedPrincipals(); Value *end = trusted ? trustedEnd_ : defaultEnd_; /* @@ -889,21 +886,18 @@ ContextStack::onTop() const * * Additionally, to minimize calls to ensureSpace, ensureOnTop ensures that * there is space for nvars slots on top of the stack. */ Value * ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars, MaybeExtend extend, bool *pushedSeg) { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); Value *firstUnused = space().firstUnused(); FrameRegs *regs = cx->maybeRegs(); #ifdef JS_METHODJIT /* * The only calls made by inlined methodjit frames can be to other JIT * frames associated with the same VMFrame. If we try to Invoke(), @@ -973,21 +967,18 @@ ContextStack::popSegment() if (!seg_) cx_->maybeMigrateVersionOverride(); } bool ContextStack::pushInvokeArgs(JSContext *cx, unsigned argc, InvokeArgsGuard *iag, MaybeReportError report) { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX); unsigned nvars = 2 + argc; Value *firstUnused = ensureOnTop(cx, report, nvars, CAN_EXTEND, &iag->pushedSeg_); if (!firstUnused) return false; @@ -1017,21 +1008,18 @@ ContextStack::popInvokeArgs(const Invoke Debug_SetValueRangeToCrashOnTouch(space().firstUnused(), oldend); } StackFrame * ContextStack::pushInvokeFrame(JSContext *cx, MaybeReportError report, const CallArgs &args, JSFunction *funArg, InitialFrameFlags initial, FrameGuard *fg) { - mozilla::Maybe<AutoAssertNoGC> maybeNoGC; if (report) AssertCanGC(); - else - maybeNoGC.construct(); JS_ASSERT(onTop()); JS_ASSERT(space().firstUnused() == args.end()); RootedFunction fun(cx, funArg); RootedScript script(cx, fun->nonLazyScript()); StackFrame::Flags flags = ToFrameFlags(initial); @@ -1277,17 +1265,16 @@ void StackIter::poisonRegs() { data_.pc_ = (jsbytecode *)0xbad; } void StackIter::popFrame() { - AutoAssertNoGC nogc; StackFrame *oldfp = data_.fp_; JS_ASSERT(data_.seg_->contains(oldfp)); data_.fp_ = data_.fp_->prev(); if (data_.seg_->contains(data_.fp_)) { InlinedSite *inline_; data_.pc_ = oldfp->prevpc(&inline_); JS_ASSERT(!inline_); @@ -1304,17 +1291,16 @@ StackIter::popCall() data_.calls_ = data_.calls_->prev(); if (!data_.seg_->contains(data_.fp_)) poisonRegs(); } void StackIter::settleOnNewSegment() { - AutoAssertNoGC nogc; if (FrameRegs *regs = data_.seg_->maybeRegs()) data_.pc_ = regs->pc; else poisonRegs(); } void StackIter::startOnSegment(StackSegment *seg) @@ -1342,18 +1328,16 @@ StackIter::startOnSegment(StackSegment * * the innermost invocation so implicit memory ordering is used since both * push values on the stack. * - a native call's 'callee' argument is clobbered on return while the * CallArgsList element is still visible. */ void StackIter::settleOnNewState() { - AutoAssertNoGC nogc; - /* Reset whether or we popped a call last time we settled. */ data_.poppedCallDuringSettle_ = false; /* * There are elements of the calls_ and fp_ chains that we want to skip * over so iterate until we settle on one or until there are no more. */ while (true) { @@ -1548,17 +1532,16 @@ StackIter::StackIter(const Data &data) { JS_ASSERT(data.cx_); } #ifdef JS_ION void StackIter::popIonFrame() { - AutoAssertNoGC nogc; // Keep fp which describes all ion frames. poisonRegs(); if (data_.ionFrames_.isScripted() && ionInlineFrames_.more()) { ++ionInlineFrames_; data_.pc_ = ionInlineFrames_.pc(); } else { ++data_.ionFrames_; while (!data_.ionFrames_.done() && !data_.ionFrames_.isScripted()) @@ -2023,17 +2006,16 @@ StackIter::setReturnValue(const Value &v break; } JS_NOT_REACHED("Unexpected state"); } size_t StackIter::numFrameSlots() const { - AutoAssertNoGC nogc; switch (data_.state_) { case DONE: case NATIVE: break; case ION: #ifdef JS_ION return ionInlineFrames_.snapshotIterator().slots() - ionInlineFrames_.script()->nfixed; #else @@ -2046,17 +2028,16 @@ StackIter::numFrameSlots() const } JS_NOT_REACHED("Unexpected state"); return 0; } Value StackIter::frameSlotValue(size_t index) const { - AutoAssertNoGC nogc; switch (data_.state_) { case DONE: case NATIVE: break; case ION: #ifdef JS_ION { ion::SnapshotIterator si(ionInlineFrames_.snapshotIterator());
--- a/js/src/vm/String.h +++ b/js/src/vm/String.h @@ -872,55 +872,50 @@ class AutoNameVector : public AutoVector } /* namespace js */ /* Avoid requiring vm/String-inl.h just to call getChars. */ JS_ALWAYS_INLINE const jschar * JSString::getChars(JSContext *cx) { - JS::AutoAssertNoGC nogc; if (JSLinearString *str = ensureLinear(cx)) return str->chars(); return NULL; } JS_ALWAYS_INLINE const jschar * JSString::getCharsZ(JSContext *cx) { - JS::AutoAssertNoGC nogc; if (JSFlatString *str = ensureFlat(cx)) return str->chars(); return NULL; } JS_ALWAYS_INLINE JSLinearString * JSString::ensureLinear(JSContext *cx) { - JS::AutoAssertNoGC nogc; return isLinear() ? &asLinear() : asRope().flatten(cx); } JS_ALWAYS_INLINE JSFlatString * JSString::ensureFlat(JSContext *cx) { - JS::AutoAssertNoGC nogc; return isFlat() ? &asFlat() : isDependent() ? asDependent().undepend(cx) : asRope().flatten(cx); } JS_ALWAYS_INLINE JSStableString * JSString::ensureStable(JSContext *maybecx) { - JS::AutoAssertNoGC nogc; if (isRope()) { JSFlatString *flat = asRope().flatten(maybecx); if (!flat) return NULL; JS_ASSERT(!flat->isInline()); return &flat->asStable(); }