author | Terrence Cole <terrence@mozilla.com> |
Tue, 14 Feb 2012 15:19:55 -0800 | |
changeset 87139 | 2e89173cb52e89af2d6661d6d929d9359302a772 |
parent 87128 | c892c49074abe43cc59e2d29757105a664bc6594 |
child 87140 | 2a8ceeb27f7c8a6a43ef01d55d827ddccb887e33 |
push id | 22083 |
push user | bmo@edmorley.co.uk |
push date | Sat, 18 Feb 2012 11:19:19 +0000 |
treeherder | mozilla-central@20478b673212 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | billm |
bugs | 727281 |
milestone | 13.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/frontend/Parser.cpp +++ b/js/src/frontend/Parser.cpp @@ -252,17 +252,17 @@ Parser::newFunctionBox(JSObject *obj, Pa return funbox; } void Parser::trace(JSTracer *trc) { ObjectBox *objbox = traceListHead; while (objbox) { - MarkObjectRoot(trc, objbox->object, "parser.object"); + MarkObjectRoot(trc, &objbox->object, "parser.object"); if (objbox->isFunctionBox) static_cast<FunctionBox *>(objbox)->bindings.trace(trc); objbox = objbox->traceLink; } for (TreeContext *tc = this->tc; tc; tc = tc->parent) tc->trace(trc); }
--- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1024,17 +1024,17 @@ class AutoEnumStateRooter : private Auto friend void AutoGCRooter::trace(JSTracer *trc); const Value &state() const { return stateValue; } Value *addr() { return &stateValue; } protected: void trace(JSTracer *trc); - JSObject * const obj; + JSObject *obj; private: Value stateValue; JS_DECL_USE_GUARD_OBJECT_NOTIFIER }; template<class T> class AutoVectorRooter : protected AutoGCRooter
--- a/js/src/jsatom.cpp +++ b/js/src/jsatom.cpp @@ -381,25 +381,30 @@ js_FinishCommonAtoms(JSContext *cx) void js_TraceAtomState(JSTracer *trc) { JSRuntime *rt = trc->runtime; JSAtomState *state = &rt->atomState; if (rt->gcKeepAtoms) { - for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) - MarkStringRoot(trc, r.front().asPtr(), "locked_atom"); + for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) { + JSAtom *tmp = r.front().asPtr(); + MarkStringRoot(trc, &tmp, "locked_atom"); + JS_ASSERT(tmp == r.front().asPtr()); + } } else { for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) { AtomStateEntry entry = r.front(); if (!entry.isTagged()) continue; - MarkStringRoot(trc, entry.asPtr(), "interned_atom"); + JSAtom *tmp = entry.asPtr(); + MarkStringRoot(trc, &tmp, "interned_atom"); + JS_ASSERT(tmp == entry.asPtr()); } } } void js_SweepAtomState(JSRuntime *rt) { JSAtomState *state = &rt->atomState;
--- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -1273,17 +1273,17 @@ JSContext::sizeOfIncludingThis(JSMallocS void JSContext::mark(JSTracer *trc) { /* Stack frames and slots are traced by StackSpace::mark. */ /* Mark other roots-by-definition in the JSContext. */ if (globalObject && !hasRunOption(JSOPTION_UNROOTED_GLOBAL)) - MarkObjectRoot(trc, globalObject, "global object"); + MarkObjectRoot(trc, &globalObject, "global object"); if (isExceptionPending()) MarkValueRoot(trc, &exception, "exception"); if (autoGCRooters) autoGCRooters->traceAll(trc); if (sharpObjectMap.depth > 0) js_TraceSharpMap(trc, &sharpObjectMap);
--- a/js/src/jscompartment.cpp +++ b/js/src/jscompartment.cpp @@ -430,31 +430,37 @@ JSCompartment::markTypes(JSTracer *trc) * Mark all scripts, type objects and singleton JS objects in the * compartment. These can be referred to directly by type sets, which we * cannot modify while code which depends on these type sets is active. */ JS_ASSERT(activeAnalysis); for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); - MarkScriptRoot(trc, script, "mark_types_script"); + MarkScriptRoot(trc, &script, "mark_types_script"); + JS_ASSERT(script == i.get<JSScript>()); } for (size_t thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) { for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) { JSObject *object = i.get<JSObject>(); - if (object->hasSingletonType()) - MarkObjectRoot(trc, object, "mark_types_singleton"); + if (object->hasSingletonType()) { + MarkObjectRoot(trc, &object, "mark_types_singleton"); + JS_ASSERT(object == i.get<JSObject>()); + } } } - for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) - MarkTypeObjectRoot(trc, i.get<types::TypeObject>(), "mark_types_scan"); + for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) { + types::TypeObject *type = i.get<types::TypeObject>(); + MarkTypeObjectRoot(trc, &type, "mark_types_scan"); + JS_ASSERT(type == i.get<types::TypeObject>()); + } } void JSCompartment::sweep(JSContext *cx, bool releaseTypes) { /* Remove dead wrappers from the table. */ for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key) &&
--- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -414,24 +414,24 @@ exn_trace(JSTracer *trc, JSObject *obj) JSExnPrivate *priv; JSStackTraceElem *elem; size_t vcount, i; HeapValue *vp; priv = GetExnPrivate(obj); if (priv) { if (priv->message) - MarkString(trc, priv->message, "exception message"); + MarkString(trc, &priv->message, "exception message"); if (priv->filename) - MarkString(trc, priv->filename, "exception filename"); + MarkString(trc, &priv->filename, "exception filename"); elem = priv->stackElems; for (vcount = i = 0; i != priv->stackDepth; ++i, ++elem) { if (elem->funName) - MarkString(trc, elem->funName, "stack trace function name"); + MarkString(trc, &elem->funName, "stack trace function name"); if (IS_GC_MARKING_TRACER(trc) && elem->filename) js_MarkScriptFilename(elem->filename); vcount += elem->argc; } vp = GetStackTraceValueBuffer(priv); for (i = 0; i != vcount; ++i, ++vp) MarkValue(trc, vp, "stack trace argument"); }
--- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -534,17 +534,17 @@ args_trace(JSTracer *trc, JSObject *obj) * invocation. To distinguish the two cases (which imply different access * paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR * flag, which is only set on the StackFrame kept in the generator object's * JSGenerator. */ #if JS_HAS_GENERATORS StackFrame *fp = argsobj.maybeStackFrame(); if (fp && fp->isFloatingGenerator()) - MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object"); + MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object"); #endif } /* * The classes below collaborate to lazily reflect and synchronize actual * argument values, argument count, and callee function object stored in a * StackFrame with their corresponding property values in the frame's * arguments object. @@ -931,17 +931,17 @@ static void call_trace(JSTracer *trc, JSObject *obj) { JS_ASSERT(obj->isCall()); /* Mark any generator frame, as for arguments objects. */ #if JS_HAS_GENERATORS StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp && fp->isFloatingGenerator()) - MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object"); + MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object"); #endif } JS_PUBLIC_DATA(Class) js::CallClass = { "Call", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) | JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS, @@ -1460,17 +1460,17 @@ JSFunction::trace(JSTracer *trc) toExtended()->extendedSlots, "nativeReserved"); } if (atom) MarkStringUnbarriered(trc, atom, "atom"); if (isInterpreted()) { if (script()) - MarkScript(trc, script(), "script"); + MarkScript(trc, &script(), "script"); if (environment()) MarkObjectUnbarriered(trc, environment(), "fun_callscope"); } } static void fun_trace(JSTracer *trc, JSObject *obj) {
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1870,17 +1870,17 @@ AutoIdArray::trace(JSTracer *trc) { JS_ASSERT(tag == IDARRAY); gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); } void AutoEnumStateRooter::trace(JSTracer *trc) { - gc::MarkObjectRoot(trc, obj, "JS::AutoEnumStateRooter.obj"); + gc::MarkObjectRoot(trc, &obj, "JS::AutoEnumStateRooter.obj"); } inline void AutoGCRooter::trace(JSTracer *trc) { switch (tag) { case JSVAL: MarkValueRoot(trc, &static_cast<AutoValueRooter *>(this)->val, "JS::AutoValueRooter.val"); @@ -1911,54 +1911,62 @@ AutoGCRooter::trace(JSTracer *trc) MarkValueRoot(trc, &desc.set, "PropDesc::set"); } return; } case DESCRIPTOR : { PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this); if (desc.obj) - MarkObjectRoot(trc, desc.obj, "Descriptor::obj"); + MarkObjectRoot(trc, &desc.obj, "Descriptor::obj"); MarkValueRoot(trc, &desc.value, "Descriptor::value"); - if ((desc.attrs & JSPROP_GETTER) && desc.getter) - MarkObjectRoot(trc, CastAsObject(desc.getter), "Descriptor::get"); - if (desc.attrs & JSPROP_SETTER && desc.setter) - MarkObjectRoot(trc, CastAsObject(desc.setter), "Descriptor::set"); + if ((desc.attrs & JSPROP_GETTER) && desc.getter) { + JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.getter); + MarkObjectRoot(trc, &tmp, "Descriptor::get"); + desc.getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp); + } + if (desc.attrs & JSPROP_SETTER && desc.setter) { + JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.setter); + MarkObjectRoot(trc, &tmp, "Descriptor::set"); + desc.setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp); + } return; } case NAMESPACES: { JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array; MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector"); js_XMLArrayCursorTrace(trc, array.cursors); return; } case XML: js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml); return; case OBJECT: - if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj) - MarkObjectRoot(trc, obj, "JS::AutoObjectRooter.obj"); + if (static_cast<AutoObjectRooter *>(this)->obj) + MarkObjectRoot(trc, &static_cast<AutoObjectRooter *>(this)->obj, + "JS::AutoObjectRooter.obj"); return; case ID: MarkIdRoot(trc, static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_"); return; case VALVECTOR: { AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector; MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector"); return; } case STRING: - if (JSString *str = static_cast<AutoStringRooter *>(this)->str) - MarkStringRoot(trc, str, "JS::AutoStringRooter.str"); + if (static_cast<AutoStringRooter *>(this)->str) + MarkStringRoot(trc, &static_cast<AutoStringRooter *>(this)->str, + "JS::AutoStringRooter.str"); return; case IDVECTOR: { AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector; MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector"); return; } @@ -2018,19 +2026,19 @@ MarkRuntime(JSTracer *trc) for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) gc_root_traversal(trc, r.front()); for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront()) gc_lock_traversal(r.front(), trc); if (rt->scriptPCCounters) { - const ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters; + ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters; for (size_t i = 0; i < vec.length(); i++) - MarkScriptRoot(trc, vec[i].script, "scriptPCCounters"); + MarkScriptRoot(trc, &vec[i].script, "scriptPCCounters"); } js_TraceAtomState(trc); rt->staticStrings.trace(trc); JSContext *iter = NULL; while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) acx->mark(trc); @@ -2044,18 +2052,20 @@ MarkRuntime(JSTracer *trc) if (c->watchpointMap) c->watchpointMap->markAll(trc); } /* Do not discard scripts with counters while profiling. */ if (rt->profilingScripts) { for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get<JSScript>(); - if (script->pcCounters) - MarkScriptRoot(trc, script, "profilingScripts"); + if (script->pcCounters) { + MarkScriptRoot(trc, &script, "profilingScripts"); + JS_ASSERT(script == i.get<JSScript>()); + } } } } #ifdef JS_METHODJIT /* We need to expand inline frames before stack scanning. */ for (CompartmentsIter c(rt); !c.done(); c.next()) mjit::ExpandInlineFrames(c);
--- a/js/src/jsgcmark.cpp +++ b/js/src/jsgcmark.cpp @@ -123,28 +123,28 @@ static void MarkUnbarriered(JSTracer *trc, T *thing, const char *name) { JS_SET_TRACING_NAME(trc, name); MarkInternal(trc, thing); } template <typename T> static void -Mark(JSTracer *trc, const HeapPtr<T> &thing, const char *name) +Mark(JSTracer *trc, HeapPtr<T> *thing, const char *name) { JS_SET_TRACING_NAME(trc, name); - MarkInternal(trc, thing.get()); + MarkInternal(trc, thing->get()); } template <typename T> static void -MarkRoot(JSTracer *trc, T *thing, const char *name) +MarkRoot(JSTracer *trc, T **thingp, const char *name) { JS_SET_TRACING_NAME(trc, name); - MarkInternal(trc, thing); + MarkInternal(trc, *thingp); } template <typename T> static void MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name) { for (size_t i = 0; i < len; ++i) { if (T *obj = vec[i]) { @@ -161,25 +161,25 @@ MarkRootRange(JSTracer *trc, size_t len, for (size_t i = 0; i < len; ++i) { JS_SET_TRACING_INDEX(trc, name, i); MarkInternal(trc, vec[i]); } } #define DeclMarkerImpl(base, type) \ void \ -Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name) \ +Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name) \ { \ Mark<type>(trc, thing, name); \ } \ \ void \ -Mark##base##Root(JSTracer *trc, type *thing, const char *name) \ +Mark##base##Root(JSTracer *trc, type **thingp, const char *name) \ { \ - MarkRoot<type>(trc, thing, name); \ + MarkRoot<type>(trc, thingp, name); \ } \ \ void \ Mark##base##Unbarriered(JSTracer *trc, type *thing, const char *name) \ { \ MarkUnbarriered<type>(trc, thing, name); \ } \ \ @@ -347,20 +347,20 @@ MarkValueRootRange(JSTracer *trc, size_t static void MarkObject(JSTracer *trc, const HeapPtr<GlobalObject, JSScript *> &thing, const char *name) { JS_SET_TRACING_NAME(trc, name); MarkInternal(trc, thing.get()); } void -MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name) +MarkShape(JSTracer *trc, HeapPtr<const Shape> *thing, const char *name) { JS_SET_TRACING_NAME(trc, name); - MarkInternal(trc, const_cast<Shape *>(thing.get())); + MarkInternal(trc, const_cast<Shape *>(thing->get())); } void MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name) { JS_SET_TRACING_NAME(trc, name); MarkValueInternal(trc, v); } @@ -624,17 +624,17 @@ PushValueArray(GCMarker *gcmarker, JSObj */ gcmarker->delayMarkingChildren(obj); } } void MarkChildren(JSTracer *trc, JSObject *obj) { - MarkTypeObject(trc, obj->typeFromGC(), "type"); + MarkTypeObject(trc, &obj->typeFromGC(), "type"); Shape *shape = obj->lastProperty(); MarkShapeUnbarriered(trc, shape, "shape"); Class *clasp = shape->getObjectClass(); if (clasp->trace) clasp->trace(trc, obj); @@ -705,22 +705,22 @@ MarkChildren(JSTracer *trc, JSScript *sc if (script->types) script->types->trace(trc); if (script->hasAnyBreakpointsOrStepMode()) script->markTrapClosures(trc); } static void -MarkChildren(JSTracer *trc, const Shape *shape) +MarkChildren(JSTracer *trc, Shape *shape) { MarkBaseShapeUnbarriered(trc, shape->base(), "base"); MarkId(trc, shape->maybePropid(), "propid"); if (shape->previous()) - MarkShape(trc, shape->previous(), "parent"); + MarkShape(trc, &shape->previousRef(), "parent"); } static inline void MarkBaseShapeGetterSetter(JSTracer *trc, BaseShape *base) { if (base->hasGetterObject()) MarkObjectUnbarriered(trc, base->getterObject(), "getter"); if (base->hasSetterObject()) @@ -824,28 +824,28 @@ MarkChildren(JSTracer *trc, types::TypeO for (unsigned i = 0; i < count; i++) { types::Property *prop = type->getProperty(i); if (prop) MarkId(trc, prop->id, "type_prop"); } } if (type->proto) - MarkObject(trc, type->proto, "type_proto"); + MarkObject(trc, &type->proto, "type_proto"); if (type->singleton && !type->lazy()) - MarkObject(trc, type->singleton, "type_singleton"); + MarkObject(trc, &type->singleton, "type_singleton"); if (type->newScript) { - MarkObject(trc, type->newScript->fun, "type_new_function"); - MarkShape(trc, type->newScript->shape, "type_new_shape"); + MarkObject(trc, &type->newScript->fun, "type_new_function"); + MarkShape(trc, &type->newScript->shape, "type_new_shape"); } if (type->interpretedFunction) - MarkObject(trc, type->interpretedFunction, "type_function"); + MarkObject(trc, &type->interpretedFunction, "type_function"); } #ifdef JS_HAS_XML_SUPPORT static void MarkChildren(JSTracer *trc, JSXML *xml) { js_TraceXML(trc, xml); }
--- a/js/src/jsgcmark.h +++ b/js/src/jsgcmark.h @@ -40,18 +40,18 @@ namespace gc { * forgiving, since it doesn't demand a HeapPtr as an argument. Its use * should always be accompanied by a comment explaining how write barriers * are implemented for the given field. * * Additionally, the functions MarkObjectRange and MarkObjectRootRange are * defined for marking arrays of object pointers. */ #define DeclMarker(base, type) \ -void Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name); \ -void Mark##base##Root(JSTracer *trc, type *thing, const char *name); \ +void Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name); \ +void Mark##base##Root(JSTracer *trc, type **thingp, const char *name); \ void Mark##base##Unbarriered(JSTracer *trc, type *thing, const char *name); \ void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name); \ void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name); DeclMarker(BaseShape, BaseShape) DeclMarker(Object, ArgumentsObject) DeclMarker(Object, GlobalObject) DeclMarker(Object, JSObject) @@ -113,17 +113,17 @@ MarkValueRootRange(JSTracer *trc, Value { MarkValueRootRange(trc, end - begin, begin, name); } /*** Special Cases ***/ /* TypeNewObject contains a HeapPtr<const Shape> that needs a unique cast. */ void -MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name); +MarkShape(JSTracer *trc, HeapPtr<const Shape> *thing, const char *name); /* Direct value access used by the write barriers and the methodjit */ void MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name); /* * Mark a value that may be in a different compartment from the compartment * being GC'd. (Although it won't be marked if it's in the wrong compartment.) @@ -154,23 +154,23 @@ MarkCycleCollectorChildren(JSTracer *trc inline void Mark(JSTracer *trc, HeapValue *v, const char *name) { MarkValue(trc, v, name); } inline void -Mark(JSTracer *trc, const HeapPtr<JSObject> &o, const char *name) +Mark(JSTracer *trc, HeapPtr<JSObject> *o, const char *name) { MarkObject(trc, o, name); } inline void -Mark(JSTracer *trc, const HeapPtr<JSXML> &xml, const char *name) +Mark(JSTracer *trc, HeapPtr<JSXML> *xml, const char *name) { MarkXML(trc, xml, name); } inline bool IsMarked(const Value &v) { if (v.isMarkable())
--- a/js/src/jsinferinlines.h +++ b/js/src/jsinferinlines.h @@ -736,17 +736,17 @@ TypeScript::SetArgument(JSContext *cx, J SetArgument(cx, script, arg, type); } } void TypeScript::trace(JSTracer *trc) { if (hasScope() && global) - gc::MarkObject(trc, global, "script_global"); + gc::MarkObject(trc, &global, "script_global"); /* Note: nesting does not keep anything alive. */ } ///////////////////////////////////////////////////////////////////// // TypeCompartment ///////////////////////////////////////////////////////////////////// @@ -1338,17 +1338,17 @@ TypeNewScript::writeBarrierPre(TypeNewSc { #ifdef JSGC_INCREMENTAL if (!newScript) return; JSCompartment *comp = newScript->fun->compartment(); if (comp->needsBarrier()) { MarkObjectUnbarriered(comp->barrierTracer(), newScript->fun, "write barrier"); - MarkShape(comp->barrierTracer(), newScript->shape, "write barrier"); + MarkShape(comp->barrierTracer(), &newScript->shape, "write barrier"); } #endif } inline void TypeNewScript::writeBarrierPost(TypeNewScript *newScript, void *addr) { }
--- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -143,19 +143,19 @@ Class js::ElementIteratorClass = { }; static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2; void NativeIterator::mark(JSTracer *trc) { for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++) - MarkString(trc, *str, "prop"); + MarkString(trc, str, "prop"); if (obj) - MarkObject(trc, obj, "obj"); + MarkObject(trc, &obj, "obj"); } static void iterator_finalize(JSContext *cx, JSObject *obj) { JS_ASSERT(obj->isIterator()); NativeIterator *ni = obj->getNativeIterator();
--- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -409,18 +409,21 @@ js_TraceSharpMap(JSTracer *trc, JSSharpO * confusing js_EnterSharpObject. So to address the problem we simply * mark all objects from map->table. * * An alternative "proper" solution is to use JSTempValueRooter in * MarkSharpObjects with code to remove during finalization entries * with otherwise unreachable objects. But this is way too complex * to justify spending efforts. */ - for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront()) - MarkObjectRoot(trc, r.front().key, "sharp table entry"); + for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront()) { + JSObject *tmp = r.front().key; + MarkObjectRoot(trc, &tmp, "sharp table entry"); + JS_ASSERT(tmp == r.front().key); + } } #if JS_HAS_TOSOURCE static JSBool obj_toSource(JSContext *cx, uintN argc, Value *vp) { bool comma = false; const jschar *vchars;
--- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -863,17 +863,17 @@ struct JSObject : js::gc::Cell inline js::types::TypeObject *getType(JSContext *cx); js::types::TypeObject *type() const { JS_ASSERT(!hasLazyType()); return type_; } - const js::HeapPtr<js::types::TypeObject> &typeFromGC() const { + js::HeapPtr<js::types::TypeObject> &typeFromGC() { /* Direct field access for use by GC. */ return type_; } static inline size_t offsetOfType() { return offsetof(JSObject, type_); } inline js::HeapPtrTypeObject *addressOfType() { return &type_; } inline void setType(js::types::TypeObject *newType);
--- a/js/src/jsscope.h +++ b/js/src/jsscope.h @@ -557,16 +557,20 @@ struct Shape : public js::gc::Cell JS_ASSERT(!(flags & NON_NATIVE) == getObjectClass()->isNative()); return !(flags & NON_NATIVE); } const HeapPtrShape &previous() const { return parent; } + HeapPtrShape &previousRef() { + return parent; + } + class Range { protected: friend struct Shape; const Shape *cursor; public: Range(const Shape *shape) : cursor(shape) { }
--- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -314,17 +314,17 @@ Bindings::makeImmutable() JS_ASSERT(lastBinding); JS_ASSERT(!lastBinding->inDictionary()); } void Bindings::trace(JSTracer *trc) { if (lastBinding) - MarkShape(trc, lastBinding, "shape"); + MarkShape(trc, &lastBinding, "shape"); } #ifdef JS_CRASH_DIAGNOSTICS void CheckScript(JSScript *script, JSScript *prev) { if (script->cookie1[0] != JS_SCRIPT_COOKIE || script->cookie2[0] != JS_SCRIPT_COOKIE) {
--- a/js/src/jswatchpoint.cpp +++ b/js/src/jswatchpoint.cpp @@ -197,45 +197,49 @@ bool WatchpointMap::markIteratively(JSTracer *trc) { bool marked = false; for (Map::Range r = map.all(); !r.empty(); r.popFront()) { Map::Entry &e = r.front(); bool objectIsLive = !IsAboutToBeFinalized(e.key.object); if (objectIsLive || e.value.held) { if (!objectIsLive) { - MarkObject(trc, e.key.object, "held Watchpoint object"); + HeapPtrObject tmp(e.key.object); + MarkObject(trc, &tmp, "held Watchpoint object"); + JS_ASSERT(tmp == e.key.object); marked = true; } const HeapId &id = e.key.id; JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id)); MarkId(trc, id, "WatchKey::id"); if (e.value.closure && IsAboutToBeFinalized(e.value.closure)) { - MarkObject(trc, e.value.closure, "Watchpoint::closure"); + MarkObject(trc, &e.value.closure, "Watchpoint::closure"); marked = true; } } } return marked; } void WatchpointMap::markAll(JSTracer *trc) { for (Map::Range r = map.all(); !r.empty(); r.popFront()) { Map::Entry &e = r.front(); - MarkObject(trc, e.key.object, "held Watchpoint object"); + HeapPtrObject tmp(e.key.object); + MarkObject(trc, &tmp, "held Watchpoint object"); + JS_ASSERT(tmp == e.key.object); const HeapId &id = e.key.id; JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id)); MarkId(trc, id, "WatchKey::id"); - MarkObject(trc, e.value.closure, "Watchpoint::closure"); + MarkObject(trc, &e.value.closure, "Watchpoint::closure"); } } void WatchpointMap::sweepAll(JSRuntime *rt) { if (rt->gcCurrentCompartment) { if (WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap)
--- a/js/src/jsweakmap.h +++ b/js/src/jsweakmap.h @@ -199,29 +199,29 @@ class WeakMap : public HashMap<Key, Valu Range nondeterministicAll() { return Base::all(); } private: void nonMarkingTrace(JSTracer *trc) { ValueMarkPolicy vp(trc); for (Range r = Base::all(); !r.empty(); r.popFront()) - vp.mark(r.front().value); + vp.mark(&r.front().value); } bool markIteratively(JSTracer *trc) { KeyMarkPolicy kp(trc); ValueMarkPolicy vp(trc); bool markedAny = false; for (Range r = Base::all(); !r.empty(); r.popFront()) { const Key &k = r.front().key; Value &v = r.front().value; /* If the entry is live, ensure its key and value are marked. */ if (kp.isMarked(k)) { - markedAny |= vp.mark(v); + markedAny |= vp.mark(&v); } JS_ASSERT_IF(kp.isMarked(k), vp.isMarked(v)); } return markedAny; } void sweep(JSTracer *trc) { KeyMarkPolicy kp(trc); @@ -259,52 +259,52 @@ class DefaultMarkPolicy<HeapValue> { JSTracer *tracer; public: DefaultMarkPolicy(JSTracer *t) : tracer(t) { } bool isMarked(const HeapValue &x) { if (x.isMarkable()) return !IsAboutToBeFinalized(x); return true; } - bool mark(HeapValue &x) { - if (isMarked(x)) + bool mark(HeapValue *x) { + if (isMarked(*x)) return false; - js::gc::MarkValue(tracer, &x, "WeakMap entry"); + js::gc::MarkValue(tracer, x, "WeakMap entry"); return true; } }; template <> class DefaultMarkPolicy<HeapPtrObject> { private: JSTracer *tracer; public: DefaultMarkPolicy(JSTracer *t) : tracer(t) { } bool isMarked(const HeapPtrObject &x) { return !IsAboutToBeFinalized(x); } - bool mark(HeapPtrObject &x) { - if (isMarked(x)) + bool mark(HeapPtrObject *x) { + if (isMarked(*x)) return false; js::gc::MarkObject(tracer, x, "WeakMap entry"); return true; } }; template <> class DefaultMarkPolicy<HeapPtrScript> { private: JSTracer *tracer; public: DefaultMarkPolicy(JSTracer *t) : tracer(t) { } bool isMarked(const HeapPtrScript &x) { return !IsAboutToBeFinalized(x); } - bool mark(HeapPtrScript &x) { - if (isMarked(x)) + bool mark(HeapPtrScript *x) { + if (isMarked(*x)) return false; js::gc::MarkScript(tracer, x, "WeakMap entry"); return true; } }; // Default trace policies
--- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -861,26 +861,26 @@ attr_identity(const JSXML *xmla, const J return qname_identity(xmla->name, xmlb->name); } void js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSXML> *cursor) { for (; cursor; cursor = cursor->next) { if (cursor->root) - MarkXML(trc, (const HeapPtr<JSXML> &)cursor->root, "cursor_root"); + MarkXML(trc, &(HeapPtr<JSXML> &)cursor->root, "cursor_root"); } } void js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSObject> *cursor) { for (; cursor; cursor = cursor->next) { if (cursor->root) - MarkObject(trc, (const HeapPtr<JSObject> &)cursor->root, "cursor_root"); + MarkObject(trc, &(HeapPtr<JSObject> &)cursor->root, "cursor_root"); } } template<class T> static HeapPtr<T> * ReallocateVector(HeapPtr<T> *vector, size_t count) { #if JS_BITS_PER_WORD == 32 @@ -7323,36 +7323,36 @@ void JSXML::writeBarrierPost(JSXML *xml, void *addr) { } void js_TraceXML(JSTracer *trc, JSXML *xml) { if (xml->object) - MarkObject(trc, xml->object, "object"); + MarkObject(trc, &xml->object, "object"); if (xml->name) - MarkObject(trc, xml->name, "name"); + MarkObject(trc, &xml->name, "name"); if (xml->parent) - MarkXML(trc, xml->parent, "xml_parent"); + MarkXML(trc, &xml->parent, "xml_parent"); if (JSXML_HAS_VALUE(xml)) { if (xml->xml_value) - MarkString(trc, xml->xml_value, "value"); + MarkString(trc, &xml->xml_value, "value"); return; } MarkXMLRange(trc, xml->xml_kids.length, xml->xml_kids.vector, "xml_kids"); js_XMLArrayCursorTrace(trc, xml->xml_kids.cursors); if (xml->xml_class == JSXML_CLASS_LIST) { if (xml->xml_target) - MarkXML(trc, xml->xml_target, "target"); + MarkXML(trc, &xml->xml_target, "target"); if (xml->xml_targetprop) - MarkObject(trc, xml->xml_targetprop, "targetprop"); + MarkObject(trc, &xml->xml_targetprop, "targetprop"); } else { MarkObjectRange(trc, xml->xml_namespaces.length, xml->xml_namespaces.vector, "xml_namespaces"); js_XMLArrayCursorTrace(trc, xml->xml_namespaces.cursors); MarkXMLRange(trc, xml->xml_attrs.length, xml->xml_attrs.vector, "xml_attrs"); js_XMLArrayCursorTrace(trc, xml->xml_attrs.cursors); @@ -7893,21 +7893,21 @@ struct JSXMLFilter static void xmlfilter_trace(JSTracer *trc, JSObject *obj) { JSXMLFilter *filter = (JSXMLFilter *) obj->getPrivate(); if (!filter) return; JS_ASSERT(filter->list); - MarkXML(trc, filter->list, "list"); + MarkXML(trc, &filter->list, "list"); if (filter->result) - MarkXML(trc, filter->result, "result"); + MarkXML(trc, &filter->result, "result"); if (filter->kid) - MarkXML(trc, filter->kid, "kid"); + MarkXML(trc, &filter->kid, "kid"); /* * We do not need to trace the cursor as that would be done when * tracing the filter->list. */ } static void
--- a/js/src/vm/Debugger.cpp +++ b/js/src/vm/Debugger.cpp @@ -1058,34 +1058,43 @@ Debugger::markKeysInCompartment(JSTracer * enumerating WeakMap keys. However in this case we need access, so we * make a base-class reference. Range is public in HashMap. */ typedef HashMap<HeapPtrObject, HeapPtrObject, DefaultHasher<HeapPtrObject>, RuntimeAllocPolicy> ObjectMap; const ObjectMap &objStorage = objects; for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) { const HeapPtrObject &key = r.front().key; - if (key->compartment() == comp && IsAboutToBeFinalized(key)) - gc::MarkObject(tracer, key, "cross-compartment WeakMap key"); + if (key->compartment() == comp && IsAboutToBeFinalized(key)) { + HeapPtrObject tmp(key); + gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key"); + JS_ASSERT(tmp == key); + } } const ObjectMap &envStorage = environments; for (ObjectMap::Range r = envStorage.all(); !r.empty(); r.popFront()) { const HeapPtrObject &key = r.front().key; - if (key->compartment() == comp && IsAboutToBeFinalized(key)) - js::gc::MarkObject(tracer, key, "cross-compartment WeakMap key"); + if (key->compartment() == comp && IsAboutToBeFinalized(key)) { + HeapPtrObject tmp(key); + js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key"); + JS_ASSERT(tmp == key); + } } typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy> ScriptMap; const ScriptMap &scriptStorage = scripts; for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) { const HeapPtrScript &key = r.front().key; - if (key->compartment() == comp && IsAboutToBeFinalized(key)) - gc::MarkScript(tracer, key, "cross-compartment WeakMap key"); + if (key->compartment() == comp && IsAboutToBeFinalized(key)) { + HeapPtrScript tmp(key); + gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key"); + JS_ASSERT(tmp == key); + } } } /* * Ordinarily, WeakMap keys and values are marked because at some point it was * discovered that the WeakMap was live; that is, some object containing the * WeakMap was marked during mark phase. * @@ -1171,42 +1180,41 @@ Debugger::markAllIteratively(GCMarker *t Debugger *dbg = *p; /* * dbg is a Debugger with at least one debuggee. Check three things: * - dbg is actually in a compartment being GC'd * - it isn't already marked * - it actually has hooks that might be called */ - const HeapPtrObject &dbgobj = dbg->toJSObject(); + HeapPtrObject &dbgobj = dbg->toJSObjectRef(); if (comp && comp != dbgobj->compartment()) continue; bool dbgMarked = !IsAboutToBeFinalized(dbgobj); if (!dbgMarked && dbg->hasAnyLiveHooks()) { /* * obj could be reachable only via its live, enabled * debugger hooks, which may yet be called. */ - MarkObject(trc, dbgobj, "enabled Debugger"); + MarkObject(trc, &dbgobj, "enabled Debugger"); markedAny = true; dbgMarked = true; } if (dbgMarked) { /* Search for breakpoints to mark. */ for (Breakpoint *bp = dbg->firstBreakpoint(); bp; bp = bp->nextInDebugger()) { if (!IsAboutToBeFinalized(bp->site->script)) { /* * The debugger and the script are both live. * Therefore the breakpoint handler is live. */ - const HeapPtrObject &handler = bp->getHandler(); - if (IsAboutToBeFinalized(handler)) { - MarkObject(trc, bp->getHandler(), "breakpoint handler"); + if (IsAboutToBeFinalized(bp->getHandler())) { + MarkObject(trc, &bp->getHandlerRef(), "breakpoint handler"); markedAny = true; } } } } } } } @@ -1219,30 +1227,30 @@ Debugger::traceObject(JSTracer *trc, JSO if (Debugger *dbg = Debugger::fromJSObject(obj)) dbg->trace(trc); } void Debugger::trace(JSTracer *trc) { if (uncaughtExceptionHook) - MarkObject(trc, uncaughtExceptionHook, "hooks"); + MarkObject(trc, &uncaughtExceptionHook, "hooks"); /* * Mark Debugger.Frame objects. These are all reachable from JS, because the * corresponding StackFrames are still on the stack. * * (Once we support generator frames properly, we will need * weakly-referenced Debugger.Frame objects as well, for suspended generator * frames.) */ for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) { - const HeapPtrObject &frameobj = r.front().value; + HeapPtrObject &frameobj = r.front().value; JS_ASSERT(frameobj->getPrivate()); - MarkObject(trc, frameobj, "live Debugger.Frame"); + MarkObject(trc, &frameobj, "live Debugger.Frame"); } /* Trace the weak map from JSScript instances to Debugger.Script objects. */ scripts.trace(trc); /* Trace the referent -> Debugger.Object weak map. */ objects.trace(trc);
--- a/js/src/vm/Debugger.h +++ b/js/src/vm/Debugger.h @@ -231,16 +231,17 @@ class Debugger { inline Breakpoint *firstBreakpoint() const; public: Debugger(JSContext *cx, JSObject *dbg); ~Debugger(); bool init(JSContext *cx); inline const js::HeapPtrObject &toJSObject() const; + inline js::HeapPtrObject &toJSObjectRef(); static inline Debugger *fromJSObject(JSObject *obj); static Debugger *fromChildJSObject(JSObject *obj); /*********************************** Methods for interaction with the GC. */ /* * A Debugger object is live if: * * the Debugger JSObject is live (Debugger::trace handles this case); OR @@ -426,16 +427,17 @@ class Breakpoint { public: static Breakpoint *fromDebuggerLinks(JSCList *links); static Breakpoint *fromSiteLinks(JSCList *links); Breakpoint(Debugger *debugger, BreakpointSite *site, JSObject *handler); void destroy(JSContext *cx); Breakpoint *nextInDebugger(); Breakpoint *nextInSite(); const HeapPtrObject &getHandler() const { return handler; } + HeapPtrObject &getHandlerRef() { return handler; } }; Debugger * Debugger::fromLinks(JSCList *links) { unsigned char *p = reinterpret_cast<unsigned char *>(links); return reinterpret_cast<Debugger *>(p - offsetof(Debugger, link)); } @@ -450,16 +452,23 @@ Debugger::firstBreakpoint() const const js::HeapPtrObject & Debugger::toJSObject() const { JS_ASSERT(object); return object; } +js::HeapPtrObject & +Debugger::toJSObjectRef() +{ + JS_ASSERT(object); + return object; +} + Debugger * Debugger::fromJSObject(JSObject *obj) { JS_ASSERT(js::GetObjectClass(obj) == &jsclass); return (Debugger *) obj->getPrivate(); } bool
--- a/js/src/vm/RegExpStatics.h +++ b/js/src/vm/RegExpStatics.h @@ -103,17 +103,17 @@ class RegExpStatics continue; int start = get(i, 0); int limit = get(i, 1); JS_ASSERT(mpiLen >= size_t(limit) && limit >= start && start >= 0); } #endif } - /* + /* * Since the first pair indicates the whole match, the paren pair * numbers have to be in the range [1, pairCount). */ void checkParenNum(size_t pairNum) const { JS_ASSERT(1 <= pairNum); JS_ASSERT(pairNum < pairCount()); } @@ -200,21 +200,21 @@ class RegExpStatics /* Returns whether results for a non-empty match are present. */ bool matched() const { JS_ASSERT(pairCount() > 0); JS_ASSERT_IF(get(0, 1) == -1, get(1, 1) == -1); return get(0, 1) - get(0, 0) > 0; } - void mark(JSTracer *trc) const { + void mark(JSTracer *trc) { if (pendingInput) - MarkString(trc, pendingInput, "res->pendingInput"); + MarkString(trc, &pendingInput, "res->pendingInput"); if (matchPairsInput) - MarkString(trc, matchPairsInput, "res->matchPairsInput"); + MarkString(trc, &matchPairsInput, "res->matchPairsInput"); } bool pairIsPresent(size_t pairNum) const { return get(pairNum, 0) >= 0; } /* Value creators. */