author | Jon Coppeard <jcoppeard@mozilla.com> |
Tue, 30 Apr 2013 11:18:18 +0100 | |
changeset 141267 | 6c23b6504db07b80012bd0b78b60b9ecee6d2d5f |
parent 141266 | 807ac4ab5971a1fab77ec8fde2427a1761c2ac36 |
child 141268 | 41b415cfb8606f8aca98655ad025754d57a35326 |
push id | 2579 |
push user | akeybl@mozilla.com |
push date | Mon, 24 Jun 2013 18:52:47 +0000 |
treeherder | mozilla-beta@b69b7de8a05a [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | terrence |
bugs | 866789 |
milestone | 23.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/builtin/Eval.cpp +++ b/js/src/builtin/Eval.cpp @@ -260,17 +260,17 @@ EvalKernel(JSContext *cx, const CallArgs if (!stableStr) return false; StableCharPtr chars = stableStr->chars(); size_t length = stableStr->length(); JSPrincipals *principals = PrincipalsForCompiledCode(args, cx); - JSScript *callerScript = caller ? caller.script() : NULL; + RootedScript callerScript(cx, caller ? caller.script() : NULL); EvalJSONResult ejr = TryEvalJSON(cx, callerScript, chars, length, args.rval()); if (ejr != EvalJSON_NotJSON) return ejr == EvalJSON_Success; EvalScriptGuard esg(cx); if (evalType == DIRECT_EVAL && caller.isNonEvalFunctionFrame()) esg.lookupInEvalCache(stableStr, callerScript, pc); @@ -284,17 +284,16 @@ EvalKernel(JSContext *cx, const CallArgs : NOT_CALLED_FROM_JSOP_EVAL); CompileOptions options(cx); options.setFileAndLine(filename, lineno) .setCompileAndGo(true) .setNoScriptRval(false) .setPrincipals(principals) .setOriginPrincipals(originPrincipals); - RootedScript callerScript(cx, caller ? caller.script() : NULL); RawScript compiled = frontend::CompileScript(cx, scopeobj, callerScript, options, chars.get(), length, stableStr, staticLevel); if (!compiled) return false; esg.setNewScript(compiled); }
--- a/js/src/builtin/TestingFunctions.cpp +++ b/js/src/builtin/TestingFunctions.cpp @@ -663,17 +663,17 @@ CountHeap(JSContext *cx, unsigned argc, { jsval v; int32_t traceKind; JSString *str; JSCountHeapTracer countTracer; JSCountHeapNode *node; size_t counter; - Value startValue = UndefinedValue(); + RootedValue startValue(cx, UndefinedValue()); if (argc > 0) { v = JS_ARGV(cx, vp)[0]; if (JSVAL_IS_TRACEABLE(v)) { startValue = v; } else if (!JSVAL_IS_NULL(v)) { JS_ReportError(cx, "the first argument is not null or a heap-allocated " "thing");
--- a/js/src/ctypes/CTypes.cpp +++ b/js/src/ctypes/CTypes.cpp @@ -3742,17 +3742,17 @@ CType::ToSource(JSContext* cx, unsigned } JSBool CType::HasInstance(JSContext* cx, JSHandleObject obj, JSMutableHandleValue v, JSBool* bp) { JS_ASSERT(CType::IsCType(obj)); jsval slot = JS_GetReservedSlot(obj, SLOT_PROTO); - JSObject* prototype = &slot.toObject(); + RootedObject prototype(cx, &slot.toObject()); JS_ASSERT(prototype); JS_ASSERT(CData::IsCDataProto(prototype)); *bp = JS_FALSE; if (JSVAL_IS_PRIMITIVE(v)) return JS_TRUE; RootedObject proto(cx, &v.toObject()); @@ -4767,17 +4767,17 @@ StructType::DefineInternal(JSContext* cx // to get GC safety for free, since if anything in this function fails we // do not want to mutate 'typeObj'.) AutoPtr<FieldInfoHash> fields(cx->new_<FieldInfoHash>()); Array<jsval, 16> fieldRootsArray; if (!fields || !fields->init(len) || !fieldRootsArray.appendN(JSVAL_VOID, len)) { JS_ReportOutOfMemory(cx); return JS_FALSE; } - js::AutoArrayRooter fieldRoots(cx, fieldRootsArray.length(), + js::AutoArrayRooter fieldRoots(cx, fieldRootsArray.length(), fieldRootsArray.begin()); // Process the field types. size_t structSize, structAlign; if (len != 0) { structSize = 0; structAlign = 0; @@ -7113,17 +7113,17 @@ JSBool CDataFinalizer::Methods::Forget(JSContext* cx, unsigned argc, jsval *vp) { CallArgs args = CallArgsFromVp(argc, vp); if (args.length() != 0) { JS_ReportError(cx, "CDataFinalizer.prototype.forget takes no arguments"); return JS_FALSE; } - JSObject *obj = JS_THIS_OBJECT(cx, vp); + RootedObject obj(cx, JS_THIS_OBJECT(cx, vp)); if (!obj) return JS_FALSE; if (!CDataFinalizer::IsCDataFinalizer(obj)) { return TypeError(cx, "a CDataFinalizer", OBJECT_TO_JSVAL(obj)); } CDataFinalizer::Private *p = (CDataFinalizer::Private *) JS_GetPrivate(obj);
--- a/js/src/ion/AsmJS.cpp +++ b/js/src/ion/AsmJS.cpp @@ -3119,17 +3119,17 @@ CheckStoreArray(FunctionCompiler &f, Par *def = rhsDef; *type = rhsType; return true; } static bool CheckAssignName(FunctionCompiler &f, ParseNode *lhs, ParseNode *rhs, MDefinition **def, Type *type) { - PropertyName *name = lhs->name(); + Rooted<PropertyName *> name(f.cx(), lhs->name()); MDefinition *rhsDef; Type rhsType; if (!CheckExpr(f, rhs, Use::NoCoercion, &rhsDef, &rhsType)) return false; if (const FunctionCompiler::Local *lhsVar = f.lookupLocal(name)) { if (!(rhsType <= lhsVar->type))
--- a/js/src/ion/BaselineIC.cpp +++ b/js/src/ion/BaselineIC.cpp @@ -7596,19 +7596,19 @@ ICSetElem_DenseAdd::ICSetElem_DenseAdd(I JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH); extra_ = protoChainDepth; } template <size_t ProtoChainDepth> ICUpdatedStub * ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes) { + RootedTypeObject objType(cx, obj_->getType(cx)); Rooted<IonCode *> stubCode(cx, getStubCode()); - return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, obj_->getType(cx), - shapes); + return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, objType, shapes); } ICSetElem_TypedArray::ICSetElem_TypedArray(IonCode *stubCode, HandleShape shape, uint32_t type, bool expectOutOfBounds) : ICStub(SetElem_TypedArray, stubCode), shape_(shape) { extra_ = uint8_t(type);
--- a/js/src/jsatom.cpp +++ b/js/src/jsatom.cpp @@ -245,18 +245,18 @@ AtomizeAndTakeOwnership(JSContext *cx, c } /* * If a GC occurs at js_NewStringCopy then |p| will still have the correct * hash, allowing us to avoid rehashing it. Even though the hash is * unchanged, we need to re-lookup the table position because a last-ditch * GC will potentially free some table entries. */ - AtomHasher::Lookup lookup(tbchars, length); - AtomSet::AddPtr p = cx->runtime->atoms.lookupForAdd(lookup); + AtomSet& atoms = cx->runtime->atoms; + AtomSet::AddPtr p = atoms.lookupForAdd(AtomHasher::Lookup(tbchars, length)); SkipRoot skipHash(cx, &p); /* Prevent the hash from being poisoned. */ if (p) { RawAtom atom = p->asPtr(); p->setTagged(bool(ib)); js_free((void*)tbchars); return atom; } @@ -265,17 +265,18 @@ AtomizeAndTakeOwnership(JSContext *cx, c RawFlatString flat = js_NewString<CanGC>(cx, const_cast<jschar*>(tbchars), length); if (!flat) { js_free((void*)tbchars); return NULL; } RawAtom atom = flat->morphAtomizedStringIntoAtom(); - if (!cx->runtime->atoms.relookupOrAdd(p, lookup, AtomStateEntry(atom, bool(ib)))) { + if (!atoms.relookupOrAdd(p, AtomHasher::Lookup(tbchars, length), + AtomStateEntry(atom, bool(ib)))) { JS_ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */ return NULL; } return atom; } /* |tbchars| must not point into an inline or short string. */ @@ -288,34 +289,36 @@ AtomizeAndCopyChars(JSContext *cx, const return s; /* * If a GC occurs at js_NewStringCopy then |p| will still have the correct * hash, allowing us to avoid rehashing it. Even though the hash is * unchanged, we need to re-lookup the table position because a last-ditch * GC will potentially free some table entries. */ - AtomHasher::Lookup lookup(tbchars, length); - AtomSet::AddPtr p = cx->runtime->atoms.lookupForAdd(lookup); + + AtomSet& atoms = cx->runtime->atoms; + AtomSet::AddPtr p = atoms.lookupForAdd(AtomHasher::Lookup(tbchars, length)); SkipRoot skipHash(cx, &p); /* Prevent the hash from being poisoned. */ if (p) { RawAtom atom = p->asPtr(); p->setTagged(bool(ib)); return atom; } AutoEnterAtomsCompartment ac(cx); RawFlatString flat = js_NewStringCopyN<allowGC>(cx, tbchars, length); if (!flat) return NULL; RawAtom atom = flat->morphAtomizedStringIntoAtom(); - if (!cx->runtime->atoms.relookupOrAdd(p, lookup, AtomStateEntry(atom, bool(ib)))) { + if (!atoms.relookupOrAdd(p, AtomHasher::Lookup(tbchars, length), + AtomStateEntry(atom, bool(ib)))) { JS_ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */ return NULL; } return atom; } template <AllowGC allowGC>
--- a/js/src/jsclone.cpp +++ b/js/src/jsclone.cpp @@ -874,24 +874,24 @@ JSStructuredCloneReader::readTypedArray( // Push a placeholder onto the allObjs list to stand in for the typed array uint32_t placeholderIndex = allObjs.length(); Value dummy = JSVAL_NULL; if (!allObjs.append(dummy)) return false; // Read the ArrayBuffer object and its contents (but no properties) - Value v; + RootedValue v(context()); uint32_t byteOffset; if (v1Read) { - if (!readV1ArrayBuffer(arrayType, nelems, &v)) + if (!readV1ArrayBuffer(arrayType, nelems, v.address())) return false; byteOffset = 0; } else { - if (!startRead(&v)) + if (!startRead(v.address())) return false; uint64_t n; if (!in.read(&n)) return false; byteOffset = n; } RootedObject buffer(context(), &v.toObject()); RootedObject obj(context(), NULL);
--- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -571,20 +571,20 @@ js::GetIterator(JSContext *cx, HandleObj JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NOT_ITERABLE, bytes); js_free(bytes); return false; } if (!Invoke(cx, ObjectOrNullValue(obj), method, 0, NULL, vp.address())) return false; - RawObject obj = ToObject(cx, vp); - if (!obj) + RawObject resultObj = ToObject(cx, vp); + if (!resultObj) return false; - vp.setObject(*obj); + vp.setObject(*resultObj); return true; } Vector<RawShape, 8> shapes(cx); uint32_t key = 0; bool keysOnly = (flags == JSITER_ENUMERATE);
--- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -3285,18 +3285,21 @@ JS_FRIEND_API(JSObject *) js::NewProxyObject(JSContext *cx, BaseProxyHandler *handler, const Value &priv_, JSObject *proto_, JSObject *parent_, ProxyCallable callable) { return NewProxyObject(cx, handler, priv_, TaggedProto(proto_), parent_, callable); } static JSObject * NewProxyObject(JSContext *cx, BaseProxyHandler *handler, const Value &priv_, JSObject *proto_, - JSObject *parent_, JSObject *call, JSObject *construct) + JSObject *parent_, JSObject *call_, JSObject *construct_) { + RootedObject call(cx, call_); + RootedObject construct(cx, construct_); + JS_ASSERT_IF(construct, cx->compartment == construct->compartment()); JS_ASSERT_IF(call && cx->compartment != call->compartment(), priv_ == ObjectValue(*call)); JSObject *proxy = NewProxyObject(cx, handler, priv_, TaggedProto(proto_), parent_, call || construct ? ProxyIsCallable : ProxyNotCallable); if (!proxy) return NULL;
--- a/js/src/jswrapper.cpp +++ b/js/src/jswrapper.cpp @@ -492,17 +492,17 @@ CrossCompartmentWrapper::nativeCall(JSCo return false; *dst = source.get(); // Handle |this| specially. When we rewrap on the other side of the // membrane, we might apply a same-compartment security wrapper that // will stymie this whole process. If that happens, unwrap the wrapper. // This logic can go away when same-compartment security wrappers go away. if ((src == srcArgs.base() + 1) && dst->isObject()) { - JSObject *thisObj = &dst->toObject(); + RootedObject thisObj(cx, &dst->toObject()); if (thisObj->isWrapper() && !Wrapper::wrapperHandler(thisObj)->isSafeToUnwrap()) { JS_ASSERT(!IsCrossCompartmentWrapper(thisObj)); *dst = ObjectValue(*Wrapper::wrappedObject(thisObj)); } } }