author | Terrence Cole <terrence@mozilla.com> |
Tue, 04 Dec 2012 11:10:53 -0800 | |
changeset 115149 | 5acd87d0cf3398132377949d4b701f2c4d4d9316 |
parent 115148 | c20d8d5480e156d749cc21d829d15b801dbfd96f |
child 115150 | d1fba48c7253db66bdca75e049a239f49d168303 |
push id | 23973 |
push user | emorley@mozilla.com |
push date | Thu, 06 Dec 2012 10:04:18 +0000 |
treeherder | mozilla-central@ddda5400c826 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | billm |
bugs | 817091, 811168 |
milestone | 20.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/gc/Barrier.h +++ b/js/src/gc/Barrier.h @@ -607,17 +607,18 @@ class HeapId : public EncapsulatedId template<class T> class ReadBarriered { T *value; public: ReadBarriered() : value(NULL) {} ReadBarriered(T *value) : value(value) {} - ReadBarriered(Unrooted<T*> unrooted) : value(unrooted) {} + ReadBarriered(const Unrooted<T*> &unrooted) : value(unrooted) {} + ReadBarriered(const Rooted<T*> &rooted) : value(rooted) {} T *get() const { if (!value) return NULL; T::readBarrier(value); return value; }
--- a/js/src/gc/Root.h +++ b/js/src/gc/Root.h @@ -71,21 +71,16 @@ * generated, which is quicker and easier to fix than when relying on a * separate rooting analysis. * * - MutableHandle<T> is a non-const reference to Rooted<T>. It is used in the * same way as Handle<T> and includes a |set(const T &v)| method to allow * updating the value of the referenced Rooted<T>. A MutableHandle<T> can be * created from a Rooted<T> by using |Rooted<T>::operator&()|. * - * - Return<T> is the type of a value returned from a function. Return<T> is - * opaque and cannot be accessed unless correctly rooted. It is invalid to - * create a named Return<T>, so the return value must be assigned to - * Rooted<T> immediately, or discarded and not referenced again. - * * In some cases the small performance overhead of exact rooting is too much. * In these cases, try the following: * * - Move all Rooted<T> above inner loops: this allows you to re-use the root * on each iteration of the loop. * * - Pass Handle<T> through your hot call stack to avoid re-rooting costs at * every invocation. @@ -112,16 +107,34 @@ * scope. This can be used to force |v| out of scope before its C++ scope * would end naturally. The usage of braces C++ syntactical scopes |{...}| * is strongly perferred to this, but sometimes will not work because of * awkwardly overlapping lifetimes. * * There also exists a set of RawT typedefs for modules without rooting * concerns, such as the GC. Do not use these as they provide no rooting * protection whatsoever. + * + * The following diagram explains the list of supported, implicit type + * conversions between classes of this family: + * + * RawT ----> UnrootedT + * | ^ + * | | + * | v + * +--------> Rooted<T> <---> Handle<T> + * ^ ^ + * | | + * | | + * +---> MutableHandle<T> + * (via &) + * + * Currently all of these types implicit conversion to RawT. These are present + * only for the purpose of bootstrapping exact rooting and will be removed in + * the future (Bug 817164). */ namespace js { template <typename T> class Rooted; template <typename T> class Unrooted; template <typename T> @@ -305,31 +318,31 @@ class MutableHandle : public js::Mutable typedef MutableHandle<JSObject*> MutableHandleObject; typedef MutableHandle<JSFunction*> MutableHandleFunction; typedef MutableHandle<JSScript*> MutableHandleScript; typedef MutableHandle<JSString*> MutableHandleString; typedef MutableHandle<jsid> MutableHandleId; typedef MutableHandle<Value> MutableHandleValue; +} /* namespace JS */ + +namespace js { + /* * Raw pointer used as documentation that a parameter does not need to be * rooted. */ typedef JSObject * RawObject; typedef JSFunction * RawFunction; typedef JSScript * RawScript; typedef JSString * RawString; typedef jsid RawId; typedef Value RawValue; -} /* namespace JS */ - -namespace js { - /* * InternalHandle is a handle to an internal pointer into a gcthing. Use * InternalHandle when you have a pointer to a direct field of a gcthing, or * when you need a parameter type for something that *may* be a pointer to a * direct field of a gcthing. */ template <typename T> class InternalHandle {}; @@ -378,233 +391,50 @@ class InternalHandle<T*> * fromMarkedLocation(). */ InternalHandle(T *field) : holder(reinterpret_cast<void * const *>(&NullPtr::constNullValue)), offset(uintptr_t(field)) {} }; -/* - * Return<T> wraps GC things that are returned from accessor methods. The - * wrapper helps to ensure correct rooting of the returned pointer and safe - * access while unrooted. - * - * Example usage in a method declaration: - * - * class Foo { - * HeapPtrScript script_; - * ... - * public: - * Return<JSScript*> script() { return script_; } - * }; - * - * Example usage of method (1): - * - * Foo foo(...); - * RootedScript script(cx, foo->script()); - * - * Example usage of method (2): - * - * Foo foo(...); - * foo->script()->needsArgsObj(); - * - * The purpose of this class is to assert eagerly on incorrect use of GC thing - * pointers. For example: - * - * RootedShape shape(cx, ...); - * shape->parent.init(js_NewGCThing<Shape*>(cx, ...)); - * - * In this expression, C++ is allowed to order these calls as follows: - * - * Call Effect - * ---- ------ - * 1) RootedShape::operator-> Stores shape::ptr_ to stack. - * 2) js_NewGCThing<Shape*> Triggers GC and compaction of shapes. This - * moves shape::ptr_ to a new location. - * 3) HeapPtrObject::init This call takes the relocated shape::ptr_ - * as |this|, crashing or, worse, corrupting - * the program's state on the first access - * to a member variable. - * - * If Shape::parent were an accessor function returning a Return<Shape*>, this - * could not happen: Return ensures either immediate rooting or no GC within - * the same expression. - */ -template <typename T> -class Return -{ - typedef void (Return<T>::* ConvertibleToBool)(); - void nonNull() {} - - public: - template <typename S> - inline Return(const Unrooted<S> &unrooted, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0); - - template <typename S> - Return(const S &ptr, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0) - : ptr_(ptr) - { - EnterAssertNoGCScope(); - } - - Return(NullPtr) : ptr_(NULL) { - EnterAssertNoGCScope(); - } - - Return(const Return &ret) : ptr_(ret.ptr_) { - EnterAssertNoGCScope(); - } - - ~Return() { - LeaveAssertNoGCScope(); - } - -#ifndef DEBUG - /* - * In DEBUG builds, |Unrooted<T>| has a constructor that accepts - * |Return<T>|, which allows direct assignment into a |Unrooted<T>|. This - * is safe because |Unrooted<T>| implies a NoGCScope. In optimized builds, - * however, |Unrooted<T>| does not exist, only the UnrootedT typedef to a - * raw T. Thus, this unsafe unpack is protected by a different mechanism - * in debug builds. - */ - operator const T &() { return ptr_; } -#endif /* DEBUG */ - - /* - * |get(AutoAssertNoGC &)| is the safest way to access a Return<T> without - * rooting it first: it is impossible to call this method without an - * AutoAssertNoGC in scope, so the compiler will automatically catch any - * incorrect usage. - * - * Example: - * AutoAssertNoGC nogc; - * UnrootedScript script = fun->script().get(nogc); - */ - const T &get(AutoAssertNoGC &) const { - return ptr_; - } - - /* - * |operator->|'s result cannot be stored in a local variable, so it is - * safe to use in a CanGC context iff no GC can occur anywhere within the - * same expression (generally from one |;| to the next). |operator->| is - * protected at runtime by the fact that |Return<T>| is an AutoAssertNoGC. - * Still, care must be taken to avoid having the |Return<T>| on the stack - * during a GC, which would result in a runtime assertion. - * - * INCORRECT: - * fun->script()->bindings = myBindings->clone(cx, ...); - * - * The compiler is allowed to reorder |fun->script()::operator->()| above - * the call to |clone(cx, ...)|. In this case, the raw js::Script* C++ - * stores on the stack may be corrupted by a GC under |clone|. The - * subsequent dereference of this pointer to get |bindings| will result in - * an invalid access. |Return<T>| ensures that such usage asserts in DEBUG - * builds when it encounters this situation. Without this assertion, it is - * possible for such access to corrupt program state instead of crashing - * immediately. - * - * CORRECT: - * RootedScript clone(cx, myBindings->clone(cx, ...)); - * fun->script()->bindings = clone; - */ - const T &operator->() const { - return ptr_; - } - - /* - * |unsafeGet()| is unsafe for most uses. Usage of this method should be - * restricted to GC internals, assertions, or include a comment explaining - * how its usage is protected. - */ - const T &unsafeGet() const { - return ptr_; - } - - /* - * |operator==| is safe to use in any context. It is present to allow: - * JS_ASSERT(myScript == fun->script().unsafeGet()); - * - * To be rewritten as: - * JS_ASSERT(fun->script() == myScript); - * - * Note: the new order tells C++ to use |Return<JSScript*>::operator=| - * instead of direct pointer comparison. - */ - operator ConvertibleToBool() const { return ptr_ ? &Return<T>::nonNull : 0; } - bool operator==(const T &other) { return ptr_ == other; } - bool operator!=(const T &other) { return ptr_ != other; } - bool operator==(const Return<T> &other) { return ptr_ == other.ptr_; } - bool operator==(const JS::Handle<T> &other) { return ptr_ == other.get(); } - inline bool operator==(const Rooted<T> &other); - - private: - const T ptr_; -}; - +#ifdef DEBUG /* * |Unrooted<T>| acts as an AutoAssertNoGC after it is initialized. It otherwise * acts like as a normal pointer of type T. */ -#ifdef DEBUG template <typename T> class Unrooted { public: Unrooted() : ptr_(UninitializedTag()) {} /* * |Unrooted<T>| can be initialized from a convertible |Rooted<S>| or * |Handle<S>|. This is so that we can call AutoAssertNoGC methods that * take |Unrooted<T>| parameters with a convertible rooted argument * without explicit unpacking. * * Note: Even though this allows implicit conversion to |Unrooted<T>| * type, this is safe because Unrooted<T> acts as an AutoAssertNoGC scope. */ template <typename S> - inline Unrooted(Rooted<S> &root, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0); + inline Unrooted(const Rooted<S> &root, + typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0); template <typename S> - Unrooted(JS::Handle<S> &root, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0) + Unrooted(const JS::Handle<S> &root, + typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0) : ptr_(root.get()) { JS_ASSERT(ptr_ != UninitializedTag()); EnterAssertNoGCScope(); } /* - * |Unrooted<T>| can accept |Return<T>| without any casts. This is safe - * because |Unrooted<T>| acts as an |AutoAssertNoGC| scope. This is to - * enable usage such as: - * - * Return<Foo*> - * CreateFoo(JSContext *cx, ...) - * { - * Unrooted<Foo*> foo = js_NewFoo(cx); - * foo.initialize(...); - * return foo; - * } - */ - template <typename S> - Unrooted(const Return<S> &ret, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0) - : ptr_(ret.unsafeGet()) - { - JS_ASSERT(ptr_ != UninitializedTag()); - EnterAssertNoGCScope(); - } - - /* * |Unrooted<T>| can initialize by copying from a convertible type * |Unrooted<S>|. This enables usage such as: * * Unrooted<BaseShape*> base = js_NewBaseShape(cx); * Unrooted<UnownedBaseShape*> ubase = static_cast<UnrootedUnownedBaseShape>(ubase); */ template <typename S> Unrooted(const Unrooted<S> &other) @@ -620,37 +450,31 @@ class Unrooted EnterAssertNoGCScope(); } Unrooted(const T &p) : ptr_(p) { JS_ASSERT(ptr_ != UninitializedTag()); EnterAssertNoGCScope(); } + Unrooted(const JS::NullPtr &) : ptr_(NULL) { + EnterAssertNoGCScope(); + } + ~Unrooted() { if (ptr_ != UninitializedTag()) LeaveAssertNoGCScope(); } void drop() { if (ptr_ != UninitializedTag()) LeaveAssertNoGCScope(); ptr_ = UninitializedTag(); } - /* See notes for Unrooted::Unrooted(const Return<S> &) */ - template <typename S> - Unrooted &operator=(const Return<S> &other) { - JS_ASSERT(other.unsafeGet() != UninitializedTag()); - if (ptr_ == UninitializedTag()) - EnterAssertNoGCScope(); - ptr_ = other.unsafeGet(); - return *this; - } - /* See notes for Unrooted::Unrooted(const T &) */ Unrooted &operator=(T other) { JS_ASSERT(other != UninitializedTag()); if (ptr_ == UninitializedTag()) EnterAssertNoGCScope(); ptr_ = other; return *this; } @@ -677,21 +501,29 @@ class Unrooted T ptr_; }; /* * This macro simplifies declaration of the required matching raw-pointer for * optimized builds and Unrooted<T> template for debug builds. */ -# define ForwardDeclare(type) \ - class type; \ - typedef Unrooted<type*> Unrooted##type; \ +# define ForwardDeclare(type) \ + class type; \ + typedef Unrooted<type*> Unrooted##type; \ typedef type * Raw##type +# define ForwardDeclareJS(type) \ + struct JS##type; \ + namespace js { \ + typedef Unrooted<JS##type*> Unrooted##type; \ + typedef JS##type * Raw##type; \ + } \ + struct JS##type + template <typename T> T DropUnrooted(Unrooted<T> &unrooted) { T rv = unrooted; unrooted.drop(); return rv; } @@ -704,50 +536,43 @@ T DropUnrooted(T &unrooted) } template <> inline RawId DropUnrooted(RawId &id) { return id; } #else /* NDEBUG */ /* In opt builds |UnrootedFoo| is a real |Foo*|. */ -# define ForwardDeclare(type) \ - class type; \ - typedef type * Unrooted##type; \ +# define ForwardDeclare(type) \ + class type; \ + typedef type * Unrooted##type; \ typedef type * Raw##type -/* - * Note: we still define Unrooted<T> in optimized builds so that we do not need - * #ifdef DEBUG around every debug specialization. We just ensure that the - * class is never initialized by deleting its constructors. - */ +# define ForwardDeclareJS(type) \ + struct JS##type; \ + namespace js { \ + typedef JS##type * Unrooted##type; \ + typedef JS##type * Raw##type; \ + } \ + struct JS##type + template <typename T> class Unrooted { private: Unrooted() MOZ_DELETE; Unrooted(const Unrooted &) MOZ_DELETE; ~Unrooted() MOZ_DELETE; }; template <typename T> T DropUnrooted(T &unrooted) { return unrooted; } #endif /* DEBUG */ -template <typename T> template <typename S> -inline -Return<T>::Return(const Unrooted<S> &unrooted, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy) - /* Note: |static_cast| acquires raw.ptr_ in DEBUG builds. */ - : ptr_(static_cast<S>(unrooted)) -{ - EnterAssertNoGCScope(); -} - /* * By default, pointers should use the inheritance hierarchy to find their * ThingRootKind. Some pointer types are explicitly set in jspubtd.h so that * Rooted<T> may be used without the class definition being available. */ template <typename T> struct RootKind<T *> { @@ -851,45 +676,27 @@ class Rooted : public RootedBase<T> MOZ_GUARD_OBJECT_NOTIFIER_PARAM) : ptr(initial) { MOZ_GUARD_OBJECT_NOTIFIER_INIT; init(pt); } template <typename S> - Rooted(JSContext *cx, const Return<S> &initial - MOZ_GUARD_OBJECT_NOTIFIER_PARAM) - : ptr(initial.unsafeGet()) - { - MOZ_GUARD_OBJECT_NOTIFIER_INIT; - init(cx); - } - - template <typename S> Rooted(JSContext *cx, const Unrooted<S> &initial MOZ_GUARD_OBJECT_NOTIFIER_PARAM) - : ptr(initial.ptr_) + : ptr(static_cast<S>(initial)) #if defined(JSGC_ROOT_ANALYSIS) , scanned(false) #endif { MOZ_GUARD_OBJECT_NOTIFIER_INIT; init(cx); } - template <typename S> - Rooted(js::PerThreadData *pt, const Return<S> &initial - MOZ_GUARD_OBJECT_NOTIFIER_PARAM) - : ptr(initial.ptr_) - { - MOZ_GUARD_OBJECT_NOTIFIER_INIT; - init(pt); - } - ~Rooted() { #if defined(JSGC_ROOT_ANALYSIS) || defined(JSGC_USE_EXACT_ROOTING) JS_ASSERT(*stack == this); *stack = prev; #endif } #if defined(JSGC_ROOT_ANALYSIS) || defined(JSGC_USE_EXACT_ROOTING) @@ -909,22 +716,16 @@ class Rooted : public RootedBase<T> return ptr; } T &operator=(const Rooted &value) { ptr = value; return ptr; } - template <typename S> - T &operator=(const Return<S> &value) { - ptr = value.unsafeGet(); - return ptr; - } - private: void commonInit(Rooted<void*> **thingGCRooters) { #if defined(JSGC_ROOT_ANALYSIS) || defined(JSGC_USE_EXACT_ROOTING) ThingRootKind kind = RootMethods<T>::kind(); this->stack = reinterpret_cast<Rooted<T>**>(&thingGCRooters[kind]); this->prev = *stack; *stack = this; @@ -948,28 +749,21 @@ class Rooted : public RootedBase<T> }; #if !(defined(JSGC_ROOT_ANALYSIS) || defined(JSGC_USE_EXACT_ROOTING)) // Defined in vm/String.h. template <> class Rooted<JSStableString *>; #endif -template <typename T> -bool -Return<T>::operator==(const Rooted<T> &other) -{ - return ptr_ == other.get(); -} - #ifdef DEBUG template <typename T> template <typename S> inline -Unrooted<T>::Unrooted(Rooted<S> &root, - typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy) +Unrooted<T>::Unrooted(const Rooted<S> &root, + typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy) : ptr_(root.get()) { JS_ASSERT(ptr_ != UninitializedTag()); EnterAssertNoGCScope(); } #endif /* DEBUG */ typedef Rooted<JSObject*> RootedObject;
--- a/js/src/ion/Bailouts.cpp +++ b/js/src/ion/Bailouts.cpp @@ -69,28 +69,28 @@ IonBailoutIterator::dump() const break; ++frames; } } else { IonFrameIterator::dump(); } } -static JSScript* +static UnrootedScript GetBailedJSScript(JSContext *cx) { AutoAssertNoGC nogc; // Just after the frame conversion, we can safely interpret the ionTop as JS // frame because it targets the bailed JS frame converted to an exit frame. IonJSFrameLayout *frame = reinterpret_cast<IonJSFrameLayout*>(cx->runtime->ionTop); switch (GetCalleeTokenTag(frame->calleeToken())) { case CalleeToken_Function: { JSFunction *fun = CalleeTokenToFunction(frame->calleeToken()); - return fun->nonLazyScript().get(nogc); + return fun->nonLazyScript(); } case CalleeToken_Script: return CalleeTokenToScript(frame->calleeToken()); default: JS_NOT_REACHED("unexpected callee token kind"); return NULL; } } @@ -173,18 +173,18 @@ StackFrame::initFromBailout(JSContext *c } unsigned pcOff = iter.pcOffset(); regs.pc = script()->code + pcOff; if (iter.resumeAfter()) regs.pc = GetNextPc(regs.pc); IonSpew(IonSpew_Bailouts, " new PC is offset %u within script %p (line %d)", - pcOff, (void *)script().get(nogc), PCToLineNumber(script().get(nogc), regs.pc)); - JS_ASSERT(exprStackSlots == js_ReconstructStackDepth(cx, script().get(nogc), regs.pc)); + pcOff, (void *)script(), PCToLineNumber(script(), regs.pc)); + JS_ASSERT(exprStackSlots == js_ReconstructStackDepth(cx, script(), regs.pc)); } static StackFrame * PushInlinedFrame(JSContext *cx, StackFrame *callerFrame) { AutoAssertNoGC nogc; // Grab the callee object out of the caller's frame, which has already been restored. @@ -319,17 +319,17 @@ ConvertFrames(JSContext *cx, IonActivati return BAILOUT_RETURN_CACHED_SHAPE_GUARD; // When bailing out from an argument check, none of the code of the // function has run yet. When profiling, this means that the function // hasn't flagged its entry just yet. It has been "entered," however, so // we flag it here manually that the entry has happened. case Bailout_ArgumentCheck: fp->unsetPushedSPSFrame(); - Probes::enterScript(cx, fp->script().unsafeGet(), fp->script()->function(), fp); + Probes::enterScript(cx, fp->script(), fp->script()->function(), fp); return BAILOUT_RETURN_ARGUMENT_CHECK; } JS_NOT_REACHED("bad bailout kind"); return BAILOUT_RETURN_FATAL_ERROR; } static inline void @@ -497,18 +497,19 @@ ion::ReflowTypeInfo(uint32_t bailoutResu types::TypeScript::Monitor(cx, script, pc, result); return true; } uint32_t ion::RecompileForInlining() { + AutoAssertNoGC nogc; JSContext *cx = GetIonContext()->cx; - RawScript script = cx->fp()->script().unsafeGet(); + UnrootedScript script = cx->fp()->script(); IonSpew(IonSpew_Inlining, "Recompiling script to inline calls %s:%d", script->filename, script->lineno); // Invalidate the script to force a recompile. if (!Invalidate(cx, script, /* resetUses */ false)) return BAILOUT_RETURN_FATAL_ERROR; @@ -658,17 +659,17 @@ ion::ThunkToInterpreter(Value *vp) if (status == Interpret_OSR) { // The interpreter currently does not ask to perform inline OSR, so // this path is unreachable. JS_NOT_REACHED("invalid"); IonSpew(IonSpew_Bailouts, "Performing inline OSR %s:%d", cx->fp()->script()->filename, - PCToLineNumber(cx->fp()->script().unsafeGet(), cx->regs().pc)); + PCToLineNumber(cx->fp()->script(), cx->regs().pc)); // We want to OSR again. We need to avoid the problem where frequent // bailouts cause recursive nestings of Interpret and EnterIon. The // interpreter therefore shortcuts out, and now we're responsible for // completing the OSR inline. // // Note that we set runningInIon so that if we re-enter C++ from within // the inlined OSR, StackIter will know to traverse these frames.
--- a/js/src/ion/CodeGenerator.cpp +++ b/js/src/ion/CodeGenerator.cpp @@ -306,17 +306,17 @@ CodeGenerator::visitLambda(LLambda *lir) } s; uint32_t word; } u; u.s.nargs = fun->nargs; u.s.flags = fun->flags & ~JSFunction::EXTENDED; JS_STATIC_ASSERT(offsetof(JSFunction, flags) == offsetof(JSFunction, nargs) + 2); masm.store32(Imm32(u.word), Address(output, offsetof(JSFunction, nargs))); - masm.storePtr(ImmGCPtr(fun->nonLazyScript().unsafeGet()), + masm.storePtr(ImmGCPtr(fun->nonLazyScript()), Address(output, JSFunction::offsetOfNativeOrScript())); masm.storePtr(scopeChain, Address(output, JSFunction::offsetOfEnvironment())); masm.storePtr(ImmGCPtr(fun->displayAtom()), Address(output, JSFunction::offsetOfAtom())); masm.bind(ool->rejoin()); return true; }
--- a/js/src/ion/Ion.cpp +++ b/js/src/ion/Ion.cpp @@ -1165,17 +1165,17 @@ IonCompile(JSContext *cx, JSScript *scri bool SequentialCompileContext::compile(IonBuilder *builder, MIRGraph *graph, AutoDestroyAllocator &autoDestroy) { JS_ASSERT(!builder->script()->ion); JSContext *cx = GetIonContext()->cx; - IonSpewNewFunction(graph, builder->script().unsafeGet()); + IonSpewNewFunction(graph, builder->script()); if (!builder->build()) { IonSpew(IonSpew_Abort, "Builder failed to build."); return false; } builder->clearForBackEnd(); // Try to compile the script off thread, if possible. Compilation cannot be @@ -1505,17 +1505,17 @@ EnterIon(JSContext *cx, StackFrame *fp, // The beginning of the actual args is not updated, so we just copy // the formal args into the actual args to get a linear vector which // can be copied by generateEnterJit. memcpy(maxArgv, formalArgv, formalArgc * sizeof(Value)); } calleeToken = CalleeToToken(&fp->callee()); } else { - calleeToken = CalleeToToken(fp->script().unsafeGet()); + calleeToken = CalleeToToken(fp->script()); } // Caller must construct |this| before invoking the Ion function. JS_ASSERT_IF(fp->isConstructing(), fp->functionThis().isObject()); Value result = Int32Value(numActualArgs); { AssertCompartmentUnchanged pcc(cx); IonContext ictx(cx, cx->compartment, NULL); @@ -1812,16 +1812,17 @@ ion::InvalidateAll(FreeOp *fop, JSCompar } } void ion::Invalidate(types::TypeCompartment &types, FreeOp *fop, const Vector<types::RecompileInfo> &invalid, bool resetUses) { + AutoAssertNoGC nogc; IonSpew(IonSpew_Invalidate, "Start invalidation."); AutoFlushCache afc ("Invalidate"); // Add an invalidation reference to all invalidated IonScripts to indicate // to the traversal which frames have been invalidated. bool anyInvalidation = false; for (size_t i = 0; i < invalid.length(); i++) { const types::CompilerOutput &co = *invalid[i].compilerOutput(types);
--- a/js/src/ion/IonBuilder.cpp +++ b/js/src/ion/IonBuilder.cpp @@ -147,17 +147,17 @@ IonBuilder::CFGState::LookupSwitch(jsbyt return state; } JSFunction * IonBuilder::getSingleCallTarget(uint32_t argc, jsbytecode *pc) { AutoAssertNoGC nogc; - types::StackTypeSet *calleeTypes = oracle->getCallTarget(script().get(nogc), argc, pc); + types::StackTypeSet *calleeTypes = oracle->getCallTarget(script(), argc, pc); if (!calleeTypes) return NULL; RawObject obj = calleeTypes->getSingleton(); if (!obj || !obj->isFunction()) return NULL; return obj->toFunction(); @@ -3844,17 +3844,17 @@ IonBuilder::getSingletonPrototype(JSFunc MDefinition * IonBuilder::createThisScriptedSingleton(HandleFunction target, HandleObject proto, MDefinition *callee) { // Generate an inline path to create a new |this| object with // the given singleton prototype. types::TypeObject *type = proto->getNewType(cx, target); if (!type) return NULL; - if (!types::TypeScript::ThisTypes(target->nonLazyScript().unsafeGet())->hasType(types::Type::ObjectType(type))) + if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(type))) return NULL; RootedObject templateObject(cx, js_CreateThisForFunctionWithProto(cx, target, proto)); if (!templateObject) return NULL; // Trigger recompilation if the templateObject changes. if (templateObject->type()->newScript)
--- a/js/src/ion/IonBuilder.h +++ b/js/src/ion/IonBuilder.h @@ -474,17 +474,17 @@ class IonBuilder : public MIRGenerator CodeGenerator *backgroundCodegen_; public: // Compilation index for this attempt. types::RecompileInfo const recompileInfo; void clearForBackEnd(); - Return<JSScript*> script() const { return script_; } + UnrootedScript script() const { return script_.get(); } CodeGenerator *backgroundCodegen() const { return backgroundCodegen_; } void setBackgroundCodegen(CodeGenerator *codegen) { backgroundCodegen_ = codegen; } private: JSContext *cx; jsbytecode *pc;
--- a/js/src/ion/IonCaches.cpp +++ b/js/src/ion/IonCaches.cpp @@ -1621,17 +1621,17 @@ GenerateScopeChainGuard(MacroAssembler & AutoAssertNoGC nogc; if (scopeObj->isCall()) { // We can skip a guard on the call object if the script's bindings are // guaranteed to be immutable (and thus cannot introduce shadowing // variables). CallObject *callObj = &scopeObj->asCall(); if (!callObj->isForEval()) { RawFunction fun = &callObj->callee(); - RawScript script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); if (!script->funHasExtensibleScope) return; } } else if (scopeObj->isGlobal()) { // If this is the last object on the scope walk, and the property we've // found is not configurable, then we don't need a shape guard because // the shape cannot be removed. if (shape && !shape->configurable())
--- a/js/src/ion/IonFrames-inl.h +++ b/js/src/ion/IonFrames-inl.h @@ -76,17 +76,17 @@ IonFrameIterator::prevType() const size_t IonFrameIterator::frameSize() const { JS_ASSERT(type_ != IonFrame_Exit); return frameSize_; } // Returns the JSScript associated with the topmost Ion frame. -inline JSScript * +inline UnrootedScript GetTopIonJSScript(JSContext *cx, const SafepointIndex **safepointIndexOut, void **returnAddrOut) { AutoAssertNoGC nogc; IonFrameIterator iter(cx->runtime->ionTop); JS_ASSERT(iter.type() == IonFrame_Exit); ++iter; // If needed, grab the safepoint index.
--- a/js/src/ion/IonFrames.cpp +++ b/js/src/ion/IonFrames.cpp @@ -921,17 +921,17 @@ InlineFrameIterator::findNextFrame() // Skip extra slots. while (si_.moreSlots()) si_.skip(); si_.nextFrame(); callee_ = funval.toObject().toFunction(); - script_ = callee_->nonLazyScript().get(nogc); + script_ = callee_->nonLazyScript(); pc_ = script_->code + si_.pcOffset(); } framesRead_++; } InlineFrameIterator InlineFrameIterator::operator++()
--- a/js/src/ion/IonFrames.h +++ b/js/src/ion/IonFrames.h @@ -37,50 +37,50 @@ GetCalleeTokenTag(CalleeToken token) return tag; } static inline CalleeToken CalleeToToken(JSFunction *fun) { return CalleeToken(uintptr_t(fun) | uintptr_t(CalleeToken_Function)); } static inline CalleeToken -CalleeToToken(JSScript *script) +CalleeToToken(RawScript script) { return CalleeToken(uintptr_t(script) | uintptr_t(CalleeToken_Script)); } static inline bool CalleeTokenIsFunction(CalleeToken token) { return GetCalleeTokenTag(token) == CalleeToken_Function; } static inline JSFunction * CalleeTokenToFunction(CalleeToken token) { JS_ASSERT(CalleeTokenIsFunction(token)); return (JSFunction *)token; } -static inline JSScript * +static inline UnrootedScript CalleeTokenToScript(CalleeToken token) { JS_ASSERT(GetCalleeTokenTag(token) == CalleeToken_Script); - return (JSScript *)(uintptr_t(token) & ~uintptr_t(0x3)); + return (RawScript)(uintptr_t(token) & ~uintptr_t(0x3)); } -static inline JSScript * +static inline UnrootedScript ScriptFromCalleeToken(CalleeToken token) { AutoAssertNoGC nogc; switch (GetCalleeTokenTag(token)) { case CalleeToken_Script: return CalleeTokenToScript(token); case CalleeToken_Function: - return CalleeTokenToFunction(token)->nonLazyScript().get(nogc); + return CalleeTokenToFunction(token)->nonLazyScript(); } JS_NOT_REACHED("invalid callee token tag"); - return NULL; + return UnrootedScript(NULL); } // In between every two frames lies a small header describing both frames. This // header, minimally, contains a returnAddress word and a descriptor word. The // descriptor describes the size and type of the previous frame, whereas the // returnAddress describes the address the newer frame (the callee) will return // to. The exact mechanism in which frames are laid out is architecture // dependent. @@ -264,17 +264,17 @@ MakeFrameDescriptor(uint32_t frameSize, # include "ion/arm/IonFrames-arm.h" #else # error "unsupported architecture" #endif namespace js { namespace ion { -JSScript * +UnrootedScript GetTopIonJSScript(JSContext *cx, const SafepointIndex **safepointIndexOut = NULL, void **returnAddrOut = NULL); void GetPcScript(JSContext *cx, MutableHandleScript scriptRes, jsbytecode **pcRes); // Given a slot index, returns the offset, in bytes, of that slot from an
--- a/js/src/ion/VMFunctions.cpp +++ b/js/src/ion/VMFunctions.cpp @@ -48,17 +48,17 @@ ShouldMonitorReturnType(JSFunction *fun) bool InvokeFunction(JSContext *cx, JSFunction *fun, uint32_t argc, Value *argv, Value *rval) { Value fval = ObjectValue(*fun); // In order to prevent massive bouncing between Ion and JM, see if we keep // hitting functions that are uncompilable. if (fun->isInterpreted()) { - if (fun->isInterpretedLazy() && !fun->getOrCreateScript(cx).unsafeGet()) + if (fun->isInterpretedLazy() && !fun->getOrCreateScript(cx)) return false; if (!fun->nonLazyScript()->canIonCompile()) { JSScript *script = GetTopIonJSScript(cx); if (script->hasIonScript() && ++script->ion->slowCallCount >= js_IonOptions.slowCallLimit) { AutoFlushCache afc("InvokeFunction"); @@ -98,17 +98,17 @@ InvokeConstructor(JSContext *cx, JSObjec { Value fval = ObjectValue(*obj); // See the comment in InvokeFunction. bool needsMonitor; if (obj->isFunction()) { if (obj->toFunction()->isInterpretedLazy() && - !obj->toFunction()->getOrCreateScript(cx).unsafeGet()) + !obj->toFunction()->getOrCreateScript(cx)) { return false; } needsMonitor = ShouldMonitorReturnType(obj->toFunction()); } else { needsMonitor = true; }
--- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -7091,19 +7091,19 @@ JS_DescribeScriptedCaller(JSContext *cx, if (lineno) *lineno = 0; ScriptFrameIter i(cx); if (i.done()) return JS_FALSE; if (script) - *script = i.script().get(nogc); + *script = i.script(); if (lineno) - *lineno = js::PCToLineNumber(i.script().get(nogc), i.pc()); + *lineno = js::PCToLineNumber(i.script(), i.pc()); return JS_TRUE; } #ifdef JS_THREADSAFE static PRStatus CallOnce(void *func) { JSInitCallback init = JS_DATA_TO_FUNC_PTR(JSInitCallback, func);
--- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1616,22 +1616,22 @@ typedef JS::Handle<jsid> JSHandleId; typedef JS::MutableHandle<JSObject*> JSMutableHandleObject; typedef JS::MutableHandle<JSFunction*> JSMutableHandleFunction; typedef JS::MutableHandle<JSScript*> JSMutableHandleScript; typedef JS::MutableHandle<JSString*> JSMutableHandleString; typedef JS::MutableHandle<JS::Value> JSMutableHandleValue; typedef JS::MutableHandle<jsid> JSMutableHandleId; -typedef JS::RawObject JSRawObject; -typedef JS::RawFunction JSRawFunction; -typedef JS::RawScript JSRawScript; -typedef JS::RawString JSRawString; -typedef JS::RawId JSRawId; -typedef JS::RawValue JSRawValue; +typedef js::RawObject JSRawObject; +typedef js::RawFunction JSRawFunction; +typedef js::RawScript JSRawScript; +typedef js::RawString JSRawString; +typedef js::RawId JSRawId; +typedef js::RawValue JSRawValue; /* JSClass operation signatures. */ /* * Add, delete, or get a property named by id in obj. Note the jsid id * type -- id may be a string (Unicode property identifier) or an int (element * index). The *vp out parameter, on success, is the new property value after * an add or get. After a successful delete, *vp is JSVAL_FALSE iff
--- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -658,17 +658,17 @@ PopulateReportBlame(JSContext *cx, JSErr * Walk stack until we find a frame that is associated with a non-builtin * rather than a builtin frame. */ NonBuiltinScriptFrameIter iter(cx); if (iter.done()) return; report->filename = iter.script()->filename; - report->lineno = PCToLineNumber(iter.script().get(nogc), iter.pc(), &report->column); + report->lineno = PCToLineNumber(iter.script(), iter.pc(), &report->column); report->originPrincipals = iter.script()->originPrincipals; } /* * We don't post an exception in this case, since doing so runs into * complications of pre-allocating an exception object which required * running the Exception class initializer early etc. * Instead we just invoke the errorReporter with an "Out Of Memory"
--- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -443,18 +443,17 @@ extern JS_PUBLIC_API(void) JS_ReleaseFunctionLocalNameArray(JSContext *cx, void *mark) { cx->tempLifoAlloc().release(mark); } JS_PUBLIC_API(JSScript *) JS_GetFunctionScript(JSContext *cx, JSFunction *fun) { - AutoAssertNoGC nogc; - return fun->maybeNonLazyScript().get(nogc); + return fun->maybeNonLazyScript(); } JS_PUBLIC_API(JSNative) JS_GetFunctionNative(JSContext *cx, JSFunction *fun) { return fun->maybeNative(); } @@ -492,18 +491,17 @@ JS_BrokenFrameIterator(JSContext *cx, JS *iteratorp = Jsvalify(fp); return *iteratorp; } JS_PUBLIC_API(JSScript *) JS_GetFrameScript(JSContext *cx, JSStackFrame *fpArg) { - AutoAssertNoGC nogc; - return Valueify(fpArg)->script().get(nogc); + return Valueify(fpArg)->script(); } JS_PUBLIC_API(jsbytecode *) JS_GetFramePC(JSContext *cx, JSStackFrame *fpArg) { /* * This API is used to compute the line number for jsd and XPConnect * exception handling backtraces. Once the stack gets really deep, the @@ -538,17 +536,17 @@ JS_SetTopFrameAnnotation(JSContext *cx, // Note that if this frame is running in Ion, the actual calling frame // could be inlined or a callee and thus we won't have a correct |fp|. // To account for this, ion::InvalidationBailout will transfer an // annotation from the old cx->fp() to the new top frame. This works // because we will never EnterIon on a frame with an annotation. fp->setAnnotation(annotation); - RawScript script = fp->script().get(nogc); + UnrootedScript script = fp->script(); ReleaseAllJITCode(cx->runtime->defaultFreeOp()); // Ensure that we'll never try to compile this again. JS_ASSERT(!script->hasAnyIonScript()); script->ion = ION_DISABLED_SCRIPT; script->parallelIon = ION_DISABLED_SCRIPT; } @@ -1008,17 +1006,17 @@ GetAtomTotalSize(JSContext *cx, JSAtom * JS_PUBLIC_API(size_t) JS_GetFunctionTotalSize(JSContext *cx, JSFunction *fun) { AutoAssertNoGC nogc; size_t nbytes = sizeof *fun; nbytes += JS_GetObjectTotalSize(cx, fun); if (fun->isInterpreted()) - nbytes += JS_GetScriptTotalSize(cx, fun->nonLazyScript().get(nogc)); + nbytes += JS_GetScriptTotalSize(cx, fun->nonLazyScript()); if (fun->displayAtom()) nbytes += GetAtomTotalSize(cx, fun->displayAtom()); return nbytes; } JS_PUBLIC_API(size_t) JS_GetScriptTotalSize(JSContext *cx, JSScript *script) { @@ -1200,18 +1198,18 @@ js_CallContextDebugHandler(JSContext *cx JS_PUBLIC_API(StackDescription *) JS::DescribeStack(JSContext *cx, unsigned maxFrames) { AutoAssertNoGC nogc; Vector<FrameDescription> frames(cx); for (ScriptFrameIter i(cx); !i.done(); ++i) { FrameDescription desc; - desc.script = i.script().get(nogc); - desc.lineno = PCToLineNumber(i.script().get(nogc), i.pc()); + desc.script = i.script(); + desc.lineno = PCToLineNumber(i.script(), i.pc()); desc.fun = i.maybeCallee(); if (!frames.append(desc)) return NULL; if (frames.length() == maxFrames) break; } StackDescription *desc = js_new<StackDescription>();
--- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -120,23 +120,21 @@ fun_getProperty(JSContext *cx, HandleObj return false; } ArgumentsObject *argsobj = ArgumentsObject::createUnexpected(cx, iter); if (!argsobj) return false; #ifdef JS_ION - AutoAssertNoGC nogc; - // If this script hasn't been compiled yet, make sure it will never // be compiled. IonMonkey does not guarantee |f.arguments| can be // fully recovered, so we try to mitigate observing this behavior by // detecting its use early. - RawScript script = iter.script().get(nogc); + UnrootedScript script = iter.script(); if (!script->hasAnyIonScript()) ion::ForbidCompilation(cx, script); #endif vp.setObject(*argsobj); return true; } @@ -339,17 +337,17 @@ fun_resolve(JSContext *cx, HandleObject const uint16_t offset = poisonPillProps[i]; if (JSID_IS_ATOM(id, OFFSET_TO_NAME(cx->runtime, offset))) { JS_ASSERT(!IsInternalFunctionObject(fun)); PropertyOp getter; StrictPropertyOp setter; unsigned attrs = JSPROP_PERMANENT; - if (fun->isInterpretedLazy() && !fun->getOrCreateScript(cx).unsafeGet()) + if (fun->isInterpretedLazy() && !fun->getOrCreateScript(cx)) return false; if (fun->isInterpreted() ? fun->inStrictMode() : fun->isBoundFunction()) { JSObject *throwTypeError = fun->global().getThrowTypeError(); getter = CastAsPropertyOp(throwTypeError); setter = CastAsStrictPropertyOp(throwTypeError); attrs |= JSPROP_GETTER | JSPROP_SETTER; } else { @@ -1107,17 +1105,17 @@ fun_isGenerator(JSContext *cx, unsigned RawFunction fun; if (!IsFunctionObject(vp[1], &fun)) { JS_SET_RVAL(cx, vp, BooleanValue(false)); return true; } bool result = false; if (fun->hasScript()) { - RawScript script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); JS_ASSERT(script->length != 0); result = script->isGenerator; } JS_SET_RVAL(cx, vp, BooleanValue(result)); return true; } #endif @@ -1475,17 +1473,17 @@ js_CloneFunctionObject(JSContext *cx, Ha NewObjectWithClassProto(cx, &FunctionClass, NULL, SkipScopeParent(parent), kind); if (!cloneobj) return NULL; RootedFunction clone(cx, cloneobj->toFunction()); clone->nargs = fun->nargs; clone->flags = fun->flags & ~JSFunction::EXTENDED; if (fun->isInterpreted()) { - clone->initScript(fun->nonLazyScript().unsafeGet()); + clone->initScript(fun->nonLazyScript()); clone->initEnvironment(parent); } else { clone->initNative(fun->native(), fun->jitInfo()); } clone->initAtom(fun->displayAtom()); if (kind == JSFunction::ExtendedFinalizeKind) { clone->flags |= JSFunction::EXTENDED;
--- a/js/src/jsfun.h +++ b/js/src/jsfun.h @@ -8,21 +8,22 @@ #define jsfun_h___ /* * JS function definitions. */ #include "jsprvtd.h" #include "jspubtd.h" #include "jsobj.h" #include "jsatom.h" -#include "jsscript.h" #include "jsstr.h" #include "gc/Barrier.h" +ForwardDeclareJS(Script); + namespace js { class FunctionExtended; } struct JSFunction : public JSObject { enum Flags { INTERPRETED = 0x0001, /* function has a JSScript and environment. */ NATIVE_CTOR = 0x0002, /* native that can be called as a constructor */ EXTENDED = 0x0004, /* structure is FunctionExtended */ @@ -173,44 +174,44 @@ struct JSFunction : public JSObject */ inline JSObject *environment() const; inline void setEnvironment(JSObject *obj); inline void initEnvironment(JSObject *obj); static inline size_t offsetOfEnvironment() { return offsetof(JSFunction, u.i.env_); } static inline size_t offsetOfAtom() { return offsetof(JSFunction, atom_); } - js::Return<JSScript*> getOrCreateScript(JSContext *cx) { + js::UnrootedScript getOrCreateScript(JSContext *cx) { JS_ASSERT(isInterpreted()); if (isInterpretedLazy()) { js::RootedFunction self(cx, this); js::MaybeCheckStackRoots(cx); if (!initializeLazyScript(cx)) - return js::NullPtr(); + return js::UnrootedScript(NULL); } JS_ASSERT(hasScript()); return JS::HandleScript::fromMarkedLocation(&u.i.script_); } bool maybeGetOrCreateScript(JSContext *cx, js::MutableHandle<JSScript*> script) { if (isNative()) { script.set(NULL); return true; } - script.set(getOrCreateScript(cx).unsafeGet()); + script.set(getOrCreateScript(cx)); return hasScript(); } - js::Return<JSScript*> nonLazyScript() const { + js::UnrootedScript nonLazyScript() const { JS_ASSERT(hasScript()); return JS::HandleScript::fromMarkedLocation(&u.i.script_); } - js::Return<JSScript*> maybeNonLazyScript() const { - return isInterpreted() ? nonLazyScript() : JS::NullPtr(); + js::UnrootedScript maybeNonLazyScript() const { + return isInterpreted() ? nonLazyScript() : js::UnrootedScript(NULL); } js::HeapPtrScript &mutableScript() { JS_ASSERT(isInterpreted()); return *(js::HeapPtrScript *)&u.i.script_; } inline void setScript(JSScript *script_);
--- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -596,17 +596,17 @@ js_NewGCScript(JSContext *cx) } inline js::Shape * js_NewGCShape(JSContext *cx) { return js::gc::NewGCThing<js::Shape>(cx, js::gc::FINALIZE_SHAPE, sizeof(js::Shape)); } -inline js::Return<js::BaseShape*> +inline js::UnrootedBaseShape js_NewGCBaseShape(JSContext *cx) { return js::gc::NewGCThing<js::BaseShape>(cx, js::gc::FINALIZE_BASE_SHAPE, sizeof(js::BaseShape)); } #if JS_HAS_XML_SUPPORT extern JSXML * js_NewGCXML(JSContext *cx);
--- a/js/src/jsinfer.cpp +++ b/js/src/jsinfer.cpp @@ -1418,20 +1418,20 @@ TypeConstraintPropagateThis::newType(JSC if (!object->interpretedFunction) return; callee = object->interpretedFunction; } else { /* Ignore calls to primitives, these will go through a stub. */ return; } - if (!(callee->getOrCreateScript(cx).unsafeGet() && callee->nonLazyScript()->ensureHasTypes(cx))) + if (!(callee->getOrCreateScript(cx) && callee->nonLazyScript()->ensureHasTypes(cx))) return; - TypeSet *thisTypes = TypeScript::ThisTypes(callee->nonLazyScript().unsafeGet()); + TypeSet *thisTypes = TypeScript::ThisTypes(callee->nonLazyScript()); if (this->types) this->types->addSubset(cx, thisTypes); else thisTypes->addType(cx, this->type); } void TypeConstraintArith::newType(JSContext *cx, TypeSet *source, Type type) @@ -5556,17 +5556,17 @@ JSScript::makeAnalysis(JSContext *cx) } return true; } /* static */ bool JSFunction::setTypeForScriptedFunction(JSContext *cx, HandleFunction fun, bool singleton) { - JS_ASSERT(fun->nonLazyScript().unsafeGet()); + JS_ASSERT(fun->nonLazyScript()); JS_ASSERT(fun->nonLazyScript()->function() == fun); if (!cx->typeInferenceEnabled()) return true; if (singleton) { if (!setSingletonType(cx, fun)) return false; @@ -5712,17 +5712,17 @@ TypeObject * JSObject::makeLazyType(JSContext *cx) { JS_ASSERT(hasLazyType()); JS_ASSERT(cx->compartment == compartment()); RootedObject self(cx, this); /* De-lazification of functions can GC, so we need to do it up here. */ if (self->isFunction() && self->toFunction()->isInterpretedLazy()) { - if (!self->toFunction()->getOrCreateScript(cx).unsafeGet()) + if (!self->toFunction()->getOrCreateScript(cx)) return NULL; } JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(getClass()); Rooted<TaggedProto> proto(cx, getTaggedProto()); TypeObject *type = cx->compartment->types.newTypeObject(cx, key, proto); AutoAssertNoGC nogc; if (!type) { if (cx->typeInferenceEnabled())
--- a/js/src/jsinferinlines.h +++ b/js/src/jsinferinlines.h @@ -698,17 +698,17 @@ UseNewTypeForClone(JSFunction *fun) * * Each instance of the innermost function will have a different wrapped * initialize method. We capture this, along with similar cases, by looking * for short scripts which use both .apply and arguments. For such scripts, * whenever creating a new instance of the function we both give that * instance a singleton type and clone the underlying script. */ - RawScript script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); if (script->length >= 50) return false; if (script->hasConsts() || script->hasObjects() || script->hasRegexps() || fun->isHeavyweight()) return false; bool hasArguments = false; @@ -900,17 +900,17 @@ TypeScript::GetPcScript(JSContext *cx, M { AutoAssertNoGC nogc; #ifdef JS_ION if (cx->fp()->beginsIonActivation()) { ion::GetPcScript(cx, script, pc); return; } #endif - script.set(cx->fp()->script().get(nogc)); + script.set(cx->fp()->script()); *pc = cx->regs().pc; } /* static */ inline void TypeScript::MonitorOverflow(JSContext *cx) { RootedScript script(cx); jsbytecode *pc;
--- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -2328,17 +2328,17 @@ BEGIN_CASE(JSOP_FUNCALL) DO_NEXT_OP(len); } if (!TypeMonitorCall(cx, args, construct)) goto error; InitialFrameFlags initial = construct ? INITIAL_CONSTRUCT : INITIAL_NONE; bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc); - RawScript funScript = fun->getOrCreateScript(cx).unsafeGet(); + RawScript funScript = fun->getOrCreateScript(cx); if (!funScript) goto error; if (!cx->stack.pushInlineFrame(cx, regs, args, *fun, funScript, initial)) goto error; SET_SCRIPT(regs.fp()->script()); #ifdef JS_METHODJIT script->resetLoopCount();
--- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -5235,17 +5235,17 @@ dumpValue(const Value &v) JSFunction *fun = v.toObject().toFunction(); if (fun->displayAtom()) { fputs("<function ", stderr); FileEscapedString(stderr, fun->displayAtom(), 0); } else { fputs("<unnamed function", stderr); } if (fun->hasScript()) { - JSScript *script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); fprintf(stderr, " (%s:%u)", script->filename ? script->filename : "", script->lineno); } fprintf(stderr, " at %p>", (void *) fun); } else if (v.isObject()) { JSObject *obj = &v.toObject(); Class *clasp = obj->getClass(); fprintf(stderr, "<%s%s at %p>", @@ -5501,19 +5501,19 @@ JS_FRIEND_API(void) js_DumpBacktrace(JSContext *cx) { AutoAssertNoGC nogc; Sprinter sprinter(cx); sprinter.init(); size_t depth = 0; for (StackIter i(cx); !i.done(); ++i, ++depth) { if (i.isScript()) { - const char *filename = JS_GetScriptFilename(cx, i.script().get(nogc)); - unsigned line = JS_PCToLineNumber(cx, i.script().get(nogc), i.pc()); - RawScript script = i.script().get(nogc); + const char *filename = JS_GetScriptFilename(cx, i.script()); + unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc()); + RawScript script = i.script(); sprinter.printf("#%d %14p %s:%d (%p @ %d)\n", depth, (i.isIon() ? 0 : i.interpFrame()), filename, line, script, i.pc() - script->code); } else { sprinter.printf("#%d ???\n", depth); } } fprintf(stdout, "%s", sprinter.string());
--- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -810,17 +810,17 @@ inline bool JSObject::isStrictArguments( inline bool JSObject::isString() const { return hasClass(&js::StringClass); } inline bool JSObject::isTypedArray() const { return IsTypedArrayClass(getClass()); } inline bool JSObject::isWeakMap() const { return hasClass(&js::WeakMapClass); } inline bool JSObject::isWith() const { return hasClass(&js::WithClass); } inline bool JSObject::isDebugScope() const { - extern bool js_IsDebugScopeSlow(JS::RawObject obj); + extern bool js_IsDebugScopeSlow(js::RawObject obj); return getClass() == &js::ObjectProxyClass && js_IsDebugScopeSlow(const_cast<JSObject*>(this)); } #if JS_HAS_XML_SUPPORT inline bool JSObject::isNamespace() const { return hasClass(&js::NamespaceClass); } inline bool JSObject::isXML() const { return hasClass(&js::XMLClass); } inline bool
--- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -1187,17 +1187,17 @@ js_NewPrinter(JSContext *cx, const char jp->strict = !!strict; jp->script = NULL; jp->dvgfence = NULL; jp->pcstack = NULL; jp->fun = fun; jp->localNames = NULL; jp->decompiledOpcodes = NULL; if (fun && fun->hasScript()) { - if (!SetPrinterLocalNames(cx, fun->nonLazyScript().unsafeGet(), jp)) { + if (!SetPrinterLocalNames(cx, fun->nonLazyScript(), jp)) { js_DestroyPrinter(jp); return NULL; } } return jp; } void @@ -1848,17 +1848,17 @@ DecompileSwitch(SprintStack *ss, TableEn #define LOCAL_ASSERT_RV(expr, rv) \ LOCAL_ASSERT_CUSTOM(expr, return (rv)) static JSAtom * GetArgOrVarAtom(JSPrinter *jp, unsigned slot) { LOCAL_ASSERT_RV(jp->fun, NULL); LOCAL_ASSERT_RV(slot < jp->script->bindings.count(), NULL); - LOCAL_ASSERT_RV(jp->script == jp->fun->nonLazyScript().unsafeGet(), NULL); + LOCAL_ASSERT_RV(jp->script == jp->fun->nonLazyScript(), NULL); JSAtom *name = (*jp->localNames)[slot].name(); #if !JS_HAS_DESTRUCTURING LOCAL_ASSERT_RV(name, NULL); #endif return name; } #define LOCAL_ASSERT(expr) LOCAL_ASSERT_RV(expr, "") @@ -4807,20 +4807,20 @@ Decompile(SprintStack *ss, jsbytecode *p /* * All allocation when decompiling is LIFO, using malloc or, * more commonly, arena-allocating from cx->tempLifoAlloc * Therefore after InitSprintStack succeeds, we must release * to mark before returning. */ LifoAllocScope las(&cx->tempLifoAlloc()); outerLocalNames = jp->localNames; - if (!SetPrinterLocalNames(cx, fun->nonLazyScript().unsafeGet(), jp)) + if (!SetPrinterLocalNames(cx, fun->nonLazyScript(), jp)) return NULL; - inner = fun->nonLazyScript().unsafeGet(); + inner = fun->nonLazyScript(); if (!InitSprintStack(cx, &ss2, jp, StackDepth(inner))) { js_delete(jp->localNames); jp->localNames = outerLocalNames; return NULL; } ss2.inGenExp = JS_TRUE; /* @@ -6219,17 +6219,17 @@ FindStartPC(JSContext *cx, ScriptFrameIt jsbytecode *current = *valuepc; if (spindex == JSDVG_IGNORE_STACK) return true; *valuepc = NULL; PCStack pcstack; - if (!pcstack.init(cx, iter.script().unsafeGet(), current)) + if (!pcstack.init(cx, iter.script(), current)) return false; if (spindex == JSDVG_SEARCH_STACK) { size_t index = iter.numFrameSlots(); Value s; // We search from fp->sp to base to find the most recently calculated // value matching v under assumption that it is it that caused
--- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -2128,36 +2128,36 @@ js_GetScriptLineExtent(RawScript script) return 1 + lineno - script->lineno; } unsigned js::CurrentLine(JSContext *cx) { AutoAssertNoGC nogc; - return PCToLineNumber(cx->fp()->script().get(nogc), cx->regs().pc); + return PCToLineNumber(cx->fp()->script(), cx->regs().pc); } void js::CurrentScriptFileLineOriginSlow(JSContext *cx, const char **file, unsigned *linenop, JSPrincipals **origin) { AutoAssertNoGC nogc; NonBuiltinScriptFrameIter iter(cx); if (iter.done()) { *file = NULL; *linenop = 0; *origin = NULL; return; } - RawScript script = iter.script().get(nogc); + UnrootedScript script = iter.script(); *file = script->filename; - *linenop = PCToLineNumber(iter.script().get(nogc), iter.pc()); + *linenop = PCToLineNumber(iter.script(), iter.pc()); *origin = script->originPrincipals; } template <class T> static inline T * Rebase(RawScript dst, RawScript src, T *srcp) { size_t off = reinterpret_cast<uint8_t *>(srcp) - src->data;
--- a/js/src/jsscriptinlines.h +++ b/js/src/jsscriptinlines.h @@ -43,17 +43,17 @@ CurrentScriptFileLineOriginSlow(JSContex inline void CurrentScriptFileLineOrigin(JSContext *cx, const char **file, unsigned *linenop, JSPrincipals **origin, LineOption opt = NOT_CALLED_FROM_JSOP_EVAL) { if (opt == CALLED_FROM_JSOP_EVAL) { AutoAssertNoGC nogc; JS_ASSERT(JSOp(*cx->regs().pc) == JSOP_EVAL); JS_ASSERT(*(cx->regs().pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); - RawScript script = cx->fp()->script().get(nogc); + UnrootedScript script = cx->fp()->script(); *file = script->filename; *linenop = GET_UINT16(cx->regs().pc + JSOP_EVAL_LENGTH); *origin = script->originPrincipals; return; } CurrentScriptFileLineOriginSlow(cx, file, linenop, origin); }
--- a/js/src/jsstr.cpp +++ b/js/src/jsstr.cpp @@ -2364,17 +2364,17 @@ LambdaIsGetElem(JSObject &lambda) if (!lambda.isFunction()) return NULL; JSFunction *fun = lambda.toFunction(); if (!fun->hasScript()) return NULL; - RawScript script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); jsbytecode *pc = script->code; /* * JSOP_GETALIASEDVAR tells us exactly where to find the base object 'b'. * Rule out the (unlikely) possibility of a heavyweight function since it * would make our scope walk off by 1. */ if (JSOp(*pc) != JSOP_GETALIASEDVAR || fun->isHeavyweight())
--- a/js/src/jsworkers.cpp +++ b/js/src/jsworkers.cpp @@ -87,40 +87,40 @@ js::CancelOffThreadIonCompile(JSCompartm if (!ion) return; AutoLockWorkerThreadState lock(compartment->rt); /* Cancel any pending entries for which processing hasn't started. */ for (size_t i = 0; i < state.ionWorklist.length(); i++) { ion::IonBuilder *builder = state.ionWorklist[i]; - if (CompiledScriptMatches(compartment, script, builder->script().unsafeGet())) { + if (CompiledScriptMatches(compartment, script, builder->script())) { FinishOffThreadIonCompile(builder); state.ionWorklist[i--] = state.ionWorklist.back(); state.ionWorklist.popBack(); } } /* Wait for in progress entries to finish up. */ for (size_t i = 0; i < state.numThreads; i++) { const WorkerThread &helper = state.threads[i]; while (helper.ionBuilder && - CompiledScriptMatches(compartment, script, helper.ionBuilder->script().unsafeGet())) + CompiledScriptMatches(compartment, script, helper.ionBuilder->script())) { helper.ionBuilder->cancel(); state.wait(WorkerThreadState::MAIN); } } ion::OffThreadCompilationVector &compilations = ion->finishedOffThreadCompilations(); /* Cancel code generation for any completed entries. */ for (size_t i = 0; i < compilations.length(); i++) { ion::IonBuilder *builder = compilations[i]; - if (CompiledScriptMatches(compartment, script, builder->script().unsafeGet())) { + if (CompiledScriptMatches(compartment, script, builder->script())) { ion::FinishOffThreadBuilder(builder); compilations[i--] = compilations.back(); compilations.popBack(); } } } bool @@ -311,17 +311,17 @@ WorkerThread::threadLoop() return; } state.wait(WorkerThreadState::WORKER); } ionBuilder = state.ionWorklist.popCopy(); DebugOnly<ion::ExecutionMode> executionMode = ionBuilder->info().executionMode(); - JS_ASSERT(GetIonScript(ionBuilder->script().unsafeGet(), executionMode) == ION_COMPILING_SCRIPT); + JS_ASSERT(GetIonScript(ionBuilder->script(), executionMode) == ION_COMPILING_SCRIPT); state.unlock(); { ion::IonContext ictx(NULL, ionBuilder->script()->compartment(), &ionBuilder->temp()); ionBuilder->setBackgroundCodegen(ion::CompileBackEnd(ionBuilder)); }
--- a/js/src/methodjit/BaseAssembler.h +++ b/js/src/methodjit/BaseAssembler.h @@ -143,17 +143,17 @@ class Assembler : public ValueAssembler #endif sps(sps), vmframe(vmframe), pc(NULL) { AutoAssertNoGC nogc; startLabel = label(); if (vmframe) - sps->setPushed(vmframe->script().get(nogc)); + sps->setPushed(vmframe->script()); } Assembler(MJITInstrumentation *sps, jsbytecode **pc) : callPatches(SystemAllocPolicy()), availInCall(0), extraStackSpace(0), stackAdjust(0), #ifdef DEBUG
--- a/js/src/methodjit/BaseCompiler.h +++ b/js/src/methodjit/BaseCompiler.h @@ -133,17 +133,17 @@ class LinkerHelper : public JSC::LinkBuf m_size = masm.size(); // must come after call to executableAllocAndCopy()! return pool; } JSC::CodeLocationLabel finalize(VMFrame &f) { AutoAssertNoGC nogc; masm.finalize(*this); JSC::CodeLocationLabel label = finalizeCodeAddendum(); - Probes::registerICCode(f.cx, f.chunk(), f.script().get(nogc), f.pc(), + Probes::registerICCode(f.cx, f.chunk(), f.script(), f.pc(), label.executableAddress(), masm.size()); return label; } void maybeLink(MaybeJump jump, JSC::CodeLocationLabel label) { if (!jump.isSet()) return; link(jump.get(), label);
--- a/js/src/methodjit/InvokeHelpers.cpp +++ b/js/src/methodjit/InvokeHelpers.cpp @@ -294,17 +294,17 @@ UncachedInlineCall(VMFrame &f, InitialFr RootedScript fscript(cx, f.script()); bool newType = construct && cx->typeInferenceEnabled() && types::UseNewType(cx, fscript, f.pc()); if (!types::TypeMonitorCall(cx, args, construct)) return false; /* Try to compile if not already compiled. */ - if (ShouldJaegerCompileCallee(cx, f.script().unsafeGet(), newscript, f.jit())) { + if (ShouldJaegerCompileCallee(cx, f.script(), newscript, f.jit())) { CompileStatus status = CanMethodJIT(cx, newscript, newscript->code, construct, CompileRequest_JIT, f.fp()); if (status == Compile_Error) { /* A runtime exception was thrown, get out. */ return false; } if (status == Compile_Abort) *unjittable = true; @@ -658,17 +658,17 @@ stubs::CreateThis(VMFrame &f, JSObject * THROW(); fp->thisValue() = ObjectValue(*obj); } void JS_FASTCALL stubs::ScriptDebugPrologue(VMFrame &f) { AssertCanGC(); - Probes::enterScript(f.cx, f.script().unsafeGet(), f.script()->function(), f.fp()); + Probes::enterScript(f.cx, f.script(), f.script()->function(), f.fp()); JSTrapStatus status = js::ScriptDebugPrologue(f.cx, f.fp()); switch (status) { case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: *f.returnAddressLocation() = f.cx->jaegerRuntime().forceReturnFromFastCall(); return; case JSTRAP_ERROR: @@ -685,41 +685,41 @@ stubs::ScriptDebugEpilogue(VMFrame &f) if (!js::ScriptDebugEpilogue(f.cx, f.fp(), JS_TRUE)) THROW(); } void JS_FASTCALL stubs::ScriptProbeOnlyPrologue(VMFrame &f) { AutoAssertNoGC nogc; - Probes::enterScript(f.cx, f.script().get(nogc), f.script()->function(), f.fp()); + Probes::enterScript(f.cx, f.script(), f.script()->function(), f.fp()); } void JS_FASTCALL stubs::ScriptProbeOnlyEpilogue(VMFrame &f) { AutoAssertNoGC nogc; - Probes::exitScript(f.cx, f.script().get(nogc), f.script()->function(), f.fp()); + Probes::exitScript(f.cx, f.script(), f.script()->function(), f.fp()); } void JS_FASTCALL stubs::CrossChunkShim(VMFrame &f, void *edge_) { AssertCanGC(); DebugOnly<CrossChunkEdge*> edge = (CrossChunkEdge *) edge_; mjit::ExpandInlineFrames(f.cx->compartment); - RawScript script = f.script().unsafeGet(); + UnrootedScript script = f.script(); JS_ASSERT(edge->target < script->length); JS_ASSERT(script->code + edge->target == f.pc()); - CompileStatus status = CanMethodJIT(f.cx, script, f.pc(), f.fp()->isConstructing(), + CompileStatus status = CanMethodJIT(f.cx, DropUnrooted(script), f.pc(), + f.fp()->isConstructing(), CompileRequest_Interpreter, f.fp()); - script = NULL; if (status == Compile_Error) THROW(); void **addr = f.returnAddressLocation(); *addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline); f.fp()->setRejoin(StubRejoin(REJOIN_RESUME)); } @@ -865,17 +865,17 @@ js_InternalInterpret(void *returnData, v JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); if (!obj) return js_InternalThrow(f); fp->thisValue() = ObjectValue(*obj); /* FALLTHROUGH */ } case REJOIN_THIS_CREATED: { - Probes::enterScript(f.cx, f.script().unsafeGet(), f.script()->function(), fp); + Probes::enterScript(f.cx, f.script(), f.script()->function(), fp); if (script->debugMode) { JSTrapStatus status = js::ScriptDebugPrologue(f.cx, f.fp()); switch (status) { case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: { /* Advance to the JSOP_STOP at the end of the script. */ @@ -925,17 +925,17 @@ js_InternalInterpret(void *returnData, v RootedObject callee(cx, &fp->callee()); JSObject *obj = js_CreateThisForFunction(cx, callee, types::UseNewTypeAtEntry(cx, fp)); if (!obj) return js_InternalThrow(f); fp->functionThis() = ObjectValue(*obj); } /* FALLTHROUGH */ case REJOIN_EVAL_PROLOGUE: - Probes::enterScript(cx, f.script().unsafeGet(), f.script()->function(), fp); + Probes::enterScript(cx, f.script(), f.script()->function(), fp); if (cx->compartment->debugMode()) { JSTrapStatus status = ScriptDebugPrologue(cx, fp); switch (status) { case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: return f.cx->jaegerRuntime().forceReturnFromFastCall(); case JSTRAP_ERROR:
--- a/js/src/methodjit/MethodJIT.h +++ b/js/src/methodjit/MethodJIT.h @@ -213,17 +213,17 @@ struct VMFrame */ StackFrame *fp() { return regs.fp(); } mjit::JITScript *jit() { return fp()->jit(); } inline mjit::JITChunk *chunk(); inline unsigned chunkIndex(); /* Get the inner script/PC in case of inlining. */ - inline Return<JSScript*> script(); + inline UnrootedScript script(); inline jsbytecode *pc(); #if defined(JS_CPU_SPARC) static const size_t offsetOfFp = 30 * sizeof(void *) + FrameRegs::offsetOfFp; static const size_t offsetOfInlined = 30 * sizeof(void *) + FrameRegs::offsetOfInlined; #elif defined(JS_CPU_MIPS) static const size_t offsetOfFp = 8 * sizeof(void *) + FrameRegs::offsetOfFp; static const size_t offsetOfInlined = 8 * sizeof(void *) + FrameRegs::offsetOfInlined; @@ -1059,17 +1059,17 @@ VMFrame::chunk() } inline unsigned VMFrame::chunkIndex() { return jit()->chunkIndex(regs.pc); } -inline Return<JSScript*> +inline UnrootedScript VMFrame::script() { AutoAssertNoGC nogc; if (regs.inlined()) return chunk()->inlineFrames()[regs.inlined()->inlineIndex].fun->nonLazyScript(); return fp()->script(); }
--- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -879,17 +879,17 @@ class CallCompiler : public BaseCompiler /* * Write the rejoin state to indicate this is a compilation call made * from an IC (the recompiler cannot detect calls made from ICs * automatically). */ masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)), FrameAddress(offsetof(VMFrame, stubRejoin))); - masm.bumpStubCount(f.script().get(nogc), f.pc(), Registers::tempCallReg()); + masm.bumpStubCount(f.script(), f.pc(), Registers::tempCallReg()); /* Try and compile. On success we get back the nmap pointer. */ void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction); DataLabelPtr inlined; if (ic.frameSize.isStatic()) { masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1); masm.fallibleVMCall(cx->typeInferenceEnabled(), compilePtr, f.regs.pc, &inlined, ic.frameSize.staticLocalSlots()); @@ -996,17 +996,17 @@ class CallCompiler : public BaseCompiler RegisterID t0 = tempRegs.takeAnyReg().reg(); /* Guard that it's actually a function object. */ Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, t0, &FunctionClass); /* Guard that it's the same script. */ Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript()); Jump funGuard = masm.branchPtr(Assembler::NotEqual, scriptAddr, - ImmPtr(obj->toFunction()->nonLazyScript().get(nogc))); + ImmPtr(obj->toFunction()->nonLazyScript())); Jump done = masm.jump(); LinkerHelper linker(masm, JSC::JAEGER_CODE); JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ClosureStub); if (!ep) return false; ic.hasJsFunCheck = true; @@ -1254,19 +1254,18 @@ class CallCompiler : public BaseCompiler { if (!generateIonStub()) THROWV(NULL); } #endif return NULL; } - AutoAssertNoGC nogc; JS_ASSERT(fun); - JSScript *script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); JS_ASSERT(script); uint32_t flags = callingNew ? StackFrame::CONSTRUCTING : 0; if (!ic.hit) { ic.hit = true; return ucr.codeAddr; } @@ -1428,17 +1427,17 @@ void ic::GenerateArgumentCheckStub(VMFrame &f) { AutoAssertNoGC nogc; JS_ASSERT(f.cx->typeInferenceEnabled()); JITScript *jit = f.jit(); StackFrame *fp = f.fp(); JSFunction *fun = fp->fun(); - JSScript *script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); if (jit->argsCheckPool) jit->resetArgsCheck(); Assembler masm; Vector<Jump> mismatches(f.cx); if (!f.fp()->isConstructing()) {
--- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -765,17 +765,17 @@ struct GetPropHelper { return testForGet(); } }; namespace js { namespace mjit { inline void -MarkNotIdempotent(JSScript *script, jsbytecode *pc) +MarkNotIdempotent(UnrootedScript script, jsbytecode *pc) { if (!script->hasAnalysis()) return; analyze::Bytecode *code = script->analysis()->maybeCode(pc); if (!code) return; code->notIdempotent = true; } @@ -1075,17 +1075,17 @@ class GetPropCompiler : public PICStubCo if (tempRegs.hasReg(pic.objReg)) { tempRegs.takeReg(pic.objReg); } else { holdObjReg = tempRegs.takeAnyReg().reg(); masm.move(pic.objReg, holdObjReg); } RegisterID t0 = tempRegs.takeAnyReg().reg(); - masm.bumpStubCount(f.script().get(nogc), f.pc(), t0); + masm.bumpStubCount(f.script(), f.pc(), t0); /* * Use three values above sp on the stack for use by the call to store * the object and id being passed into the call as handles and to store * the resulting value. Temporary slots are used by GETPROP for this, * plus there is extra room on the stack reserved for a callee frame. */ int32_t initialFrameDepth = f.regs.sp - f.fp()->slots() + 3; @@ -1189,17 +1189,17 @@ class GetPropCompiler : public PICStubCo if (tempRegs.hasReg(pic.objReg)) { tempRegs.takeReg(pic.objReg); } else { holdObjReg = tempRegs.takeAnyReg().reg(); masm.move(pic.objReg, holdObjReg); } RegisterID t0 = tempRegs.takeAnyReg().reg(); - masm.bumpStubCount(f.script().get(nogc), f.pc(), t0); + masm.bumpStubCount(f.script(), f.pc(), t0); /* * A JSNative has the following signature: * * JSBool native(JSContext *cx, unsigned argc, Value *vp); * * Since we are calling a getter, argc is always 0. vp must point to two * values, the callee and the holder. We use vp == sp to avoid clobbering @@ -1268,20 +1268,17 @@ class GetPropCompiler : public PICStubCo SkipRoot skip(cx, &masm); Label start; Jump shapeGuardJump; Jump argsLenGuard; bool setStubShapeOffset = true; if (obj->isDenseArray()) { - { - RawScript script = f.script().unsafeGet(); - MarkNotIdempotent(script, f.pc()); - } + MarkNotIdempotent(f.script(), f.pc()); start = masm.label(); shapeGuardJump = masm.branchPtr(Assembler::NotEqual, Address(pic.objReg, JSObject::offsetOfShape()), ImmPtr(obj->lastProperty())); /* * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case: @@ -1387,20 +1384,17 @@ class GetPropCompiler : public PICStubCo } pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start); } else { pic.secondShapeGuard = 0; } if (shape && !shape->hasDefaultGetter()) { - { - RawScript script = f.script().unsafeGet(); - MarkNotIdempotent(script, f.pc()); - } + MarkNotIdempotent(f.script(), f.pc()); if (shape->hasGetterValue()) { generateNativeGetterStub(masm, shape, start, shapeMismatches); } else { jsid userid; if (!shape->getUserId(cx, &userid)) return error(); generateGetterStub(masm, shape, userid, start, shapeMismatches); @@ -1495,34 +1489,29 @@ class GetPropCompiler : public PICStubCo GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f); RecompilationMonitor monitor(cx); LookupStatus status = getprop.lookupAndTest(); if (status != Lookup_Cacheable && status != Lookup_NoProperty) { /* Don't touch the IC if it may have been destroyed. */ if (!monitor.recompiled()) pic.hadUncacheable = true; - RawScript script = f.script().unsafeGet(); - MarkNotIdempotent(script, f.pc()); + MarkNotIdempotent(f.script(), f.pc()); return status; } // Mark as not idempotent to avoid recompilation in Ion Monkey // GetPropertyCache. - if (!obj->hasIdempotentProtoChain()) { - RawScript script = f.script().unsafeGet(); - MarkNotIdempotent(script, f.pc()); - } + if (!obj->hasIdempotentProtoChain()) + MarkNotIdempotent(f.script(), f.pc()); // The property is missing, Mark as not idempotent to avoid // recompilation in Ion Monkey GetPropertyCache. - if (!getprop.holder) { - RawScript script = f.script().unsafeGet(); - MarkNotIdempotent(script, f.pc()); - } + if (!getprop.holder) + MarkNotIdempotent(f.script(), f.pc()); if (hadGC()) return Lookup_Uncacheable; if (obj == getprop.holder && getprop.shape->hasDefaultGetter() && !pic.inlinePathPatched) {
--- a/js/src/methodjit/Retcon.cpp +++ b/js/src/methodjit/Retcon.cpp @@ -345,17 +345,17 @@ ClearAllFrames(JSCompartment *compartmen for (VMFrame *f = compartment->rt->jaegerRuntime().activeFrame(); f != NULL; f = f->previous) { if (f->entryfp->compartment() != compartment) continue; - Recompiler::patchFrame(compartment, f, f->fp()->script().get(nogc)); + Recompiler::patchFrame(compartment, f, f->fp()->script()); // Clear ncode values from all frames associated with the VMFrame. // Patching the VMFrame's return address will cause all its frames to // finish in the interpreter, unless the interpreter enters one of the // intermediate frames at a loop boundary (where EnterMethodJIT will // overwrite ncode). However, leaving stale values for ncode in stack // frames can confuse the recompiler, which may see the VMFrame before // it has resumed execution.
--- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -861,17 +861,17 @@ stubs::TriggerIonCompile(VMFrame &f) } #endif void JS_FASTCALL stubs::RecompileForInline(VMFrame &f) { AutoAssertNoGC nogc; ExpandInlineFrames(f.cx->compartment); - Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), f.script().get(nogc)); + Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), f.script()); f.jit()->destroyChunk(f.cx->runtime->defaultFreeOp(), f.chunkIndex(), /* resetUses = */ false); } void JS_FASTCALL stubs::Trap(VMFrame &f, uint32_t trapTypes) { Value rval; @@ -1329,17 +1329,17 @@ FindNativeCode(VMFrame &f, jsbytecode *t return NULL; } void * JS_FASTCALL stubs::LookupSwitch(VMFrame &f, jsbytecode *pc) { AutoAssertNoGC nogc; jsbytecode *jpc = pc; - JSScript *script = f.fp()->script().get(nogc); + UnrootedScript script = f.fp()->script(); /* This is correct because the compiler adjusts the stack beforehand. */ Value lval = f.regs.sp[-1]; if (!lval.isPrimitive()) return FindNativeCode(f, pc + GET_JUMP_OFFSET(pc)); JS_ASSERT(pc[0] == JSOP_LOOKUPSWITCH); @@ -1655,17 +1655,17 @@ stubs::CheckArgumentTypes(VMFrame &f) #ifdef DEBUG void JS_FASTCALL stubs::AssertArgumentTypes(VMFrame &f) { AutoAssertNoGC nogc; StackFrame *fp = f.fp(); JSFunction *fun = fp->fun(); - RawScript script = fun->nonLazyScript().get(nogc); + UnrootedScript script = fun->nonLazyScript(); /* * Don't check the type of 'this' for constructor frames, the 'this' value * has not been constructed yet. */ if (!fp->isConstructing()) { Type type = GetValueType(f.cx, fp->thisValue()); if (!TypeScript::ThisTypes(script)->hasType(type)) @@ -1700,17 +1700,17 @@ stubs::InvariantFailure(VMFrame &f, void * recompilation we will return to the call's rejoin point. */ void *repatchCode = f.scratch; JS_ASSERT(repatchCode); void **frameAddr = f.returnAddressLocation(); *frameAddr = repatchCode; /* Recompile the outermost script, and don't hoist any bounds checks. */ - RawScript script = f.fp()->script().get(nogc); + UnrootedScript script = f.fp()->script(); JS_ASSERT(!script->failedBoundsCheck); script->failedBoundsCheck = true; ExpandInlineFrames(f.cx->compartment); mjit::Recompiler::clearStackReferences(f.cx->runtime->defaultFreeOp(), script); mjit::ReleaseScriptCode(f.cx->runtime->defaultFreeOp(), script);
--- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -1536,17 +1536,17 @@ TrapHandler(JSContext *cx, JSScript *, j { JSString *str = JSVAL_TO_STRING(closure); ScriptFrameIter iter(cx); JS_ASSERT(!iter.done()); /* Debug-mode currently disables Ion compilation. */ JSStackFrame *caller = Jsvalify(iter.interpFrame()); - RawScript script = iter.script().unsafeGet(); + RootedScript script(cx, iter.script()); size_t length; const jschar *chars = JS_GetStringCharsAndLength(cx, str, &length); if (!chars) return JSTRAP_ERROR; if (!JS_EvaluateUCInStackFrame(cx, caller, chars, length, script->filename,
--- a/js/src/vm/ArgumentsObject-inl.h +++ b/js/src/vm/ArgumentsObject-inl.h @@ -69,33 +69,33 @@ ArgumentsObject::setArg(unsigned i, cons inline const Value & ArgumentsObject::element(uint32_t i) const { AutoAssertNoGC nogc; JS_ASSERT(!isElementDeleted(i)); const Value &v = data()->args[i]; if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT)) { CallObject &callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall(); - for (AliasedFormalIter fi(callobj.callee().nonLazyScript().get(nogc)); ; fi++) { + for (AliasedFormalIter fi(callobj.callee().nonLazyScript()); ; fi++) { if (fi.frameIndex() == i) return callobj.aliasedVar(fi); } } return v; } inline void ArgumentsObject::setElement(uint32_t i, const Value &v) { AutoAssertNoGC nogc; JS_ASSERT(!isElementDeleted(i)); HeapValue &lhs = data()->args[i]; if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT)) { CallObject &callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall(); - for (AliasedFormalIter fi(callobj.callee().nonLazyScript().get(nogc)); ; fi++) { + for (AliasedFormalIter fi(callobj.callee().nonLazyScript()); ; fi++) { if (fi.frameIndex() == i) { callobj.setAliasedVar(fi, v); return; } } } lhs = v; }
--- a/js/src/vm/ArgumentsObject.cpp +++ b/js/src/vm/ArgumentsObject.cpp @@ -43,18 +43,17 @@ CopyStackFrameArguments(const StackFrame while (src != end) (dst++)->init(*src++); } } /* static */ void ArgumentsObject::MaybeForwardToCallObject(StackFrame *fp, JSObject *obj, ArgumentsData *data) { - AutoAssertNoGC nogc; - RawScript script = fp->script().get(nogc); + UnrootedScript script = fp->script(); if (fp->fun()->isHeavyweight() && script->argsObjAliasesFormals()) { obj->initFixedSlot(MAYBE_CALL_SLOT, ObjectValue(fp->callObj())); for (AliasedFormalIter fi(script); fi; fi++) data->args[fi.frameIndex()] = MagicValue(JS_FORWARD_TO_CALL_OBJECT); } } struct CopyStackFrameArgs
--- a/js/src/vm/Debugger.cpp +++ b/js/src/vm/Debugger.cpp @@ -1193,17 +1193,17 @@ Debugger::onSingleStep(JSContext *cx, Va * anyway, to make sure the count has the correct non-zero value. * * The converse --- ensuring that we do receive traps when we should --- can * be done with unit tests. */ { AutoAssertNoGC nogc; uint32_t stepperCount = 0; - JSScript *trappingScript = fp->script().get(nogc); + UnrootedScript trappingScript = fp->script(); GlobalObject *global = &fp->global(); if (GlobalObject::DebuggerVector *debuggers = global->getDebuggers()) { for (Debugger **p = debuggers->begin(); p != debuggers->end(); p++) { Debugger *dbg = *p; for (FrameMap::Range r = dbg->frames.all(); !r.empty(); r.popFront()) { StackFrame *frame = r.front().key; JSObject *frameobj = r.front().value; if (frame->script() == trappingScript && @@ -3491,17 +3491,17 @@ DebuggerFrame_getScript(JSContext *cx, u return true; } static JSBool DebuggerFrame_getOffset(JSContext *cx, unsigned argc, Value *vp) { THIS_FRAME(cx, argc, vp, "get offset", args, thisobj, fp); AutoAssertNoGC nogc; - RawScript script = fp->script().get(nogc); + UnrootedScript script = fp->script(); jsbytecode *pc = fp->pcQuadratic(cx); JS_ASSERT(script->code <= pc); JS_ASSERT(pc < script->code + script->length); size_t offset = pc - script->code; args.rval().setNumber(double(offset)); return true; }
--- a/js/src/vm/SPSProfiler.cpp +++ b/js/src/vm/SPSProfiler.cpp @@ -362,17 +362,17 @@ SPSProfiler::discardMJITCode(mjit::JITSc mjit::JITChunk *chunk, void* address) { AutoAssertNoGC nogc; if (!jminfo.initialized()) return; unregisterScript(jscr->script, chunk); for (unsigned i = 0; i < chunk->nInlineFrames; i++) - unregisterScript(chunk->inlineFrames()[i].fun->nonLazyScript().get(nogc), chunk); + unregisterScript(chunk->inlineFrames()[i].fun->nonLazyScript(), chunk); } void SPSProfiler::unregisterScript(JSScript *script, mjit::JITChunk *chunk) { JITInfoMap::Ptr ptr = jminfo.lookup(script); if (!ptr) return;
--- a/js/src/vm/ScopeObject.cpp +++ b/js/src/vm/ScopeObject.cpp @@ -76,22 +76,21 @@ StaticScopeIter::type() const StaticBlockObject & StaticScopeIter::block() const { JS_ASSERT(type() == BLOCK); return obj->asStaticBlock(); } -JSScript * +UnrootedScript StaticScopeIter::funScript() const { - AutoAssertNoGC nogc; JS_ASSERT(type() == FUNCTION); - return obj->toFunction()->nonLazyScript().get(nogc); + return obj->toFunction()->nonLazyScript(); } /*****************************************************************************/ StaticScopeIter js::ScopeCoordinateToStaticScope(JSScript *script, jsbytecode *pc) { JS_ASSERT(pc >= script->code && pc < script->code + script->length); @@ -1201,17 +1200,17 @@ class DebugScopeProxy : public BaseProxy return false; AutoAssertNoGC nogc; unsigned i = shape->shortid(); if (block.staticBlock().isAliased(i)) return false; if (maybefp) { - RawScript script = maybefp->script().get(nogc); + UnrootedScript script = maybefp->script(); unsigned local = block.slotToLocalIndex(script->bindings, shape->slot()); if (action == GET) *vp = maybefp->unaliasedLocal(local); else maybefp->unaliasedLocal(local) = *vp; JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script)); } else { if (action == GET)
--- a/js/src/vm/ScopeObject.h +++ b/js/src/vm/ScopeObject.h @@ -66,17 +66,17 @@ class StaticScopeIter /* Return whether this static scope will be on the dynamic scope chain. */ bool hasDynamicScopeObject() const; Shape *scopeShape() const; enum Type { BLOCK, FUNCTION, NAMED_LAMBDA }; Type type() const; StaticBlockObject &block() const; - JSScript *funScript() const; + UnrootedScript funScript() const; }; /*****************************************************************************/ /* * A "scope coordinate" describes how to get from head of the scope chain to a * given lexically-enclosing variable. A scope coordinate has two dimensions: * - hops: the number of scope objects on the scope chain to skip
--- a/js/src/vm/Stack-inl.h +++ b/js/src/vm/Stack-inl.h @@ -574,26 +574,26 @@ ContextStack::currentScript(jsbytecode * #endif #ifdef JS_METHODJIT mjit::CallSite *inlined = regs.inlined(); if (inlined) { mjit::JITChunk *chunk = fp->jit()->chunk(regs.pc); JS_ASSERT(inlined->inlineIndex < chunk->nInlineFrames); mjit::InlineFrame *frame = &chunk->inlineFrames()[inlined->inlineIndex]; - RawScript script = frame->fun->nonLazyScript().get(nogc); + UnrootedScript script = frame->fun->nonLazyScript(); if (!allowCrossCompartment && script->compartment() != cx_->compartment) return NULL; if (ppc) *ppc = script->code + inlined->pcOffset; return script; } #endif - RawScript script = fp->script().get(nogc); + UnrootedScript script = fp->script(); if (!allowCrossCompartment && script->compartment() != cx_->compartment) return NULL; if (ppc) *ppc = fp->pcQuadratic(*this); return script; }
--- a/js/src/vm/Stack.h +++ b/js/src/vm/Stack.h @@ -4,16 +4,17 @@ * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef Stack_h__ #define Stack_h__ #include "jsfun.h" +#include "jsscript.h" #ifdef JS_ION #include "ion/IonFrameIterator.h" #endif #include "jsautooplen.h" struct JSContext; struct JSCompartment; @@ -604,21 +605,21 @@ class StackFrame * point of the outermost call. Inlined frame invariants: * * - Inlined frames have the same scope chain as the outer frame. * - Inlined frames have the same strictness as the outer frame. * - Inlined frames can only make calls to other JIT frames associated with * the same VMFrame. Other calls force expansion of the inlined frames. */ - js::Return<JSScript*> script() const { + UnrootedScript script() const { return isFunctionFrame() ? isEvalFrame() ? u.evalScript - : (JSScript*)fun()->nonLazyScript().unsafeGet() + : (RawScript)fun()->nonLazyScript() : exec.script; } /* * Get the frame's current bytecode, assuming 'this' is in 'stack'. Beware, * as the name implies, pcQuadratic can lead to quadratic behavior in loops * such as: * @@ -1194,18 +1195,17 @@ class FrameRegs void prepareToRun(StackFrame &fp, JSScript *script) { pc = script->code; sp = fp.slots() + script->nfixed; fp_ = &fp; inlined_ = NULL; } void setToEndOfScript() { - AutoAssertNoGC nogc; - RawScript script = fp()->script().get(nogc); + UnrootedScript script = fp()->script(); sp = fp()->base(); pc = script->code + script->length - JSOP_STOP_LENGTH; JS_ASSERT(*pc == JSOP_STOP); } /* For expandInlineFrames: */ void expandInline(StackFrame *innerfp, jsbytecode *innerpc) { pc = innerpc; @@ -1795,17 +1795,17 @@ class StackIter * When entering IonMonkey, the top interpreter frame (pushed by the caller) * is kept on the stack as bookkeeping (with runningInIon() set). The * contents of the frame are ignored by Ion code (and GC) and thus * immediately become garbage and must not be touched directly. */ StackFrame *interpFrame() const { JS_ASSERT(isScript() && !isIon()); return fp_; } jsbytecode *pc() const { JS_ASSERT(isScript()); return pc_; } - js::Return<JSScript*> script() const { JS_ASSERT(isScript()); return script_; } + UnrootedScript script() const { JS_ASSERT(isScript()); return script_; } JSFunction *callee() const; Value calleev() const; unsigned numActualArgs() const; JSObject *scopeChain() const; // Ensure that thisv is correct, see ComputeThis. bool computeThis() const;