Merge tracemonkey to mozilla-central.
Merge tracemonkey to mozilla-central.
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -1685,28 +1685,28 @@ InitArrayObject(JSContext *cx, JSObject
}
#ifdef JS_TRACER
static JSString* FASTCALL
Array_p_join(JSContext* cx, JSObject* obj, JSString *str)
{
JSAutoTempValueRooter tvr(cx);
if (!array_join_sub(cx, obj, TO_STRING, str, tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return NULL;
}
return JSVAL_TO_STRING(tvr.value());
}
static JSString* FASTCALL
Array_p_toString(JSContext* cx, JSObject* obj)
{
JSAutoTempValueRooter tvr(cx);
if (!array_join_sub(cx, obj, TO_STRING, NULL, tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return NULL;
}
return JSVAL_TO_STRING(tvr.value());
}
#endif
/*
* Perl-inspired join, reverse, and sort.
@@ -2312,17 +2312,17 @@ static jsval FASTCALL
Array_p_push1(JSContext* cx, JSObject* obj, jsval v)
{
JSAutoTempValueRooter tvr(cx, v);
if (OBJ_IS_DENSE_ARRAY(cx, obj)
? array_push1_dense(cx, obj, v, tvr.addr())
: array_push_slowly(cx, obj, 1, tvr.addr(), tvr.addr())) {
return tvr.value();
}
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_VOID;
}
#endif
static JSBool
array_push(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj;
@@ -2384,17 +2384,17 @@ static jsval FASTCALL
Array_p_pop(JSContext* cx, JSObject* obj)
{
JSAutoTempValueRooter tvr(cx);
if (OBJ_IS_DENSE_ARRAY(cx, obj)
? array_pop_dense(cx, obj, tvr.addr())
: array_pop_slowly(cx, obj, tvr.addr())) {
return tvr.value();
}
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_VOID;
}
#endif
static JSBool
array_pop(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj;
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -58,16 +58,22 @@
#include "jsbuiltins.h"
#include "jstracer.h"
using namespace avmplus;
using namespace nanojit;
extern jsdouble js_NaN;
+JS_FRIEND_API(void)
+js_SetTraceableNativeFailed(JSContext *cx)
+{
+ js_SetBuiltinError(cx);
+}
+
/*
* NB: bool FASTCALL is not compatible with Nanojit's calling convention usage.
* Do not use bool FASTCALL, use JSBool only!
*/
jsdouble FASTCALL
js_dmod(jsdouble a, jsdouble b)
{
--- a/js/src/jsbuiltins.h
+++ b/js/src/jsbuiltins.h
@@ -126,17 +126,17 @@ struct JSTraceableNative {
*
* - If a traceable native's return type ends with _FAIL, it always runs to
* completion. It can either succeed or fail with an error or exception;
* on success, it may or may not stay on trace. There may be side effects
* in any case. If the call succeeds but bails off trace, we resume in the
* interpreter at the next opcode.
*
* _FAIL builtins indicate failure or bailing off trace by setting bits in
- * cx->builtinStatus.
+ * cx->interpState->builtinStatus.
*
* - If a traceable native's return type contains _RETRY, it can either
* succeed, fail with a JS exception, or tell the caller to bail off trace
* and retry the call from the interpreter. The last case happens if the
* builtin discovers that it can't do its job without examining the JS
* stack, reentering the interpreter, accessing properties of the global
* object, etc.
*
@@ -149,17 +149,17 @@ struct JSTraceableNative {
*
* BOOL_RETRY: JSVAL_TO_BOOLEAN(JSVAL_VOID)
* INT32_RETRY: any negative value
* STRING_RETRY: NULL
* OBJECT_RETRY_NULL: NULL
* JSVAL_RETRY: JSVAL_ERROR_COOKIE
*
* _RETRY function calls are faster than _FAIL calls. Each _RETRY call
- * saves a write to cx->bailExit and a read from cx->builtinStatus.
+ * saves two writes to cx->bailExit and a read from state->builtinStatus.
*
* - All other traceable natives are infallible (e.g. Date.now, Math.log).
*
* Special builtins known to the tracer can have their own idiosyncratic
* error codes.
*
* When a traceable native returns a value indicating failure, we fall off
* trace. If an exception is pending, it is thrown; otherwise, we assume the
@@ -392,16 +392,20 @@ js_Int32ToId(JSContext* cx, int32 index,
return JS_TRUE;
}
JSString* str = js_NumberToString(cx, index);
if (!str)
return JS_FALSE;
return js_ValueToStringId(cx, STRING_TO_JSVAL(str), id);
}
+/* Extern version of js_SetBuiltinError. */
+extern JS_FRIEND_API(void)
+js_SetTraceableNativeFailed(JSContext *cx);
+
#else
#define JS_DEFINE_CALLINFO_1(linkage, rt, op, at0, cse, fold)
#define JS_DEFINE_CALLINFO_2(linkage, rt, op, at0, at1, cse, fold)
#define JS_DEFINE_CALLINFO_3(linkage, rt, op, at0, at1, at2, cse, fold)
#define JS_DEFINE_CALLINFO_4(linkage, rt, op, at0, at1, at2, at3, cse, fold)
#define JS_DEFINE_CALLINFO_5(linkage, rt, op, at0, at1, at2, at3, at4, cse, fold)
#define JS_DECLARE_CALLINFO(name)
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -295,24 +295,16 @@ typedef enum JSDestroyContextMode {
typedef enum JSRuntimeState {
JSRTS_DOWN,
JSRTS_LAUNCHING,
JSRTS_UP,
JSRTS_LANDING
} JSRuntimeState;
-#ifdef JS_TRACER
-typedef enum JSBuiltinStatus {
- JSBUILTIN_OK = 0,
- JSBUILTIN_BAILED = 1,
- JSBUILTIN_ERROR = 2
-} JSBuiltinStatus;
-#endif
-
typedef enum JSBuiltinFunctionId {
JSBUILTIN_ObjectToIterator,
JSBUILTIN_CallIteratorNext,
JSBUILTIN_GetProperty,
JSBUILTIN_GetElement,
JSBUILTIN_SetProperty,
JSBUILTIN_SetElement,
JSBUILTIN_LIMIT
@@ -1003,23 +995,16 @@ struct JSContext {
#ifdef JS_TRACER
/*
* State for the current tree execution. bailExit is valid if the tree has
* called back into native code via a _FAIL builtin and has not yet bailed,
* else garbage (NULL in debug builds).
*/
InterpState *interpState;
VMSideExit *bailExit;
-
- /*
- * Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
- * JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
- * if an error or exception occurred. Cleared on side exit.
- */
- uint32 builtinStatus;
#endif
};
#ifdef JS_THREADSAFE
# define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
#endif
#ifdef __cplusplus
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -2200,36 +2200,19 @@ js_NewFlatClosure(JSContext *cx, JSFunct
JS_ASSERT(nslots == JS_INITIAL_NSLOTS);
nslots += fun_reserveSlots(cx, closure);
if (!js_ReallocSlots(cx, closure, nslots, JS_TRUE))
return NULL;
JSUpvarArray *uva = JS_SCRIPT_UPVARS(fun->u.i.script);
JS_ASSERT(uva->length <= size_t(closure->dslots[-1]));
- for (uint32 i = 0, n = uva->length; i < n; i++) {
- uint32 cookie = uva->vector[i];
-
- uintN upvarLevel = fun->u.i.script->staticLevel - UPVAR_FRAME_SKIP(cookie);
- JS_ASSERT(upvarLevel <= JS_DISPLAY_SIZE);
- JSStackFrame *fp2 = cx->display[upvarLevel];
-
- uintN slot = UPVAR_FRAME_SLOT(cookie);
- jsval *vp;
- if (fp2->fun && slot < fp2->fun->nargs) {
- vp = fp2->argv;
- } else {
- if (fp2->fun)
- slot -= fp2->fun->nargs;
- JS_ASSERT(slot < fp2->script->nslots);
- vp = fp2->slots;
- }
-
- closure->dslots[i] = vp[slot];
- }
+ uintN level = fun->u.i.script->staticLevel;
+ for (uint32 i = 0, n = uva->length; i < n; i++)
+ closure->dslots[i] = js_GetUpvar(cx, level, uva->vector[i]);
return closure;
}
JSFunction *
js_DefineFunction(JSContext *cx, JSObject *obj, JSAtom *atom, JSNative native,
uintN nargs, uintN attrs)
{
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -2049,16 +2049,44 @@ js_DoIncDec(JSContext *cx, const JSCodeS
if (!js_NewNumberInRootedValue(cx, d, vp2))
return JS_FALSE;
if (!(cs->format & JOF_POST))
*vp = *vp2;
return JS_TRUE;
}
+jsval
+js_GetUpvar(JSContext *cx, uintN level, uintN cookie)
+{
+ level -= UPVAR_FRAME_SKIP(cookie);
+ JS_ASSERT(level < JS_DISPLAY_SIZE);
+
+ JSStackFrame *fp = cx->display[level];
+ JS_ASSERT(fp->script);
+
+ uintN slot = UPVAR_FRAME_SLOT(cookie);
+ jsval *vp;
+
+ if (!fp->fun) {
+ vp = fp->slots + fp->script->nfixed;
+ } else if (slot < fp->fun->nargs) {
+ vp = fp->argv;
+ } else if (slot == CALLEE_UPVAR_SLOT) {
+ vp = &fp->argv[-2];
+ slot = 0;
+ } else {
+ slot -= fp->fun->nargs;
+ JS_ASSERT(slot < fp->script->nslots);
+ vp = fp->slots;
+ }
+
+ return vp[slot];
+}
+
#ifdef DEBUG
JS_STATIC_INTERPRET JS_REQUIRES_STACK void
js_TraceOpcode(JSContext *cx)
{
FILE *tracefp;
JSStackFrame *fp;
JSFrameRegs *regs;
@@ -2486,16 +2514,90 @@ JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(IN
__SUNPRO_C >= 0x570)
# define JS_THREADED_INTERP 1
# else
# define JS_THREADED_INTERP 0
# endif
#endif
/*
+ * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
+ * single-thread DEBUG js shell testing to verify property cache hits.
+ */
+#if defined DEBUG && !defined JS_THREADSAFE
+
+# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
+ JS_BEGIN_MACRO \
+ if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
+ entry)) { \
+ goto error; \
+ } \
+ JS_END_MACRO
+
+static bool
+AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs,
+ ptrdiff_t pcoff, JSObject *start, JSObject *found,
+ JSPropCacheEntry *entry)
+{
+ uint32 sample = cx->runtime->gcNumber;
+
+ JSAtom *atom;
+ if (pcoff >= 0)
+ GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom);
+ else
+ atom = cx->runtime->atomState.lengthAtom;
+
+ JSObject *obj, *pobj;
+ JSProperty *prop;
+ bool ok;
+
+ if (JOF_OPMODE(*regs.pc) == JOF_NAME) {
+ ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop);
+ } else {
+ obj = start;
+ ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
+ }
+ if (!ok)
+ return false;
+ if (!prop)
+ return true;
+ if (cx->runtime->gcNumber != sample ||
+ PCVCAP_SHAPE(entry->vcap) != OBJ_SHAPE(pobj)) {
+ OBJ_DROP_PROPERTY(cx, pobj, prop);
+ return true;
+ }
+ JS_ASSERT(prop);
+ JS_ASSERT(pobj == found);
+
+ JSScopeProperty *sprop = (JSScopeProperty *) prop;
+ if (PCVAL_IS_SLOT(entry->vword)) {
+ JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop->slot);
+ } else if (PCVAL_IS_SPROP(entry->vword)) {
+ JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop);
+ } else {
+ jsval v;
+ JS_ASSERT(PCVAL_IS_OBJECT(entry->vword));
+ JS_ASSERT(entry->vword != PCVAL_NULL);
+ JS_ASSERT(SCOPE_IS_BRANDED(OBJ_SCOPE(pobj)));
+ JS_ASSERT(SPROP_HAS_STUB_GETTER(sprop));
+ JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(pobj)));
+ v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
+ JS_ASSERT(VALUE_IS_FUNCTION(cx, v));
+ JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v));
+ }
+
+ OBJ_DROP_PROPERTY(cx, pobj, prop);
+ return true;
+}
+
+#else
+# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
+#endif
+
+/*
* Ensure that the intrepreter switch can close call-bytecode cases in the
* same way as non-call bytecodes.
*/
JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH);
JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH);
JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH);
JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH);
JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH);
@@ -3469,68 +3571,16 @@ js_Interpret(JSContext *cx)
LOCKED_OBJ_WRITE_BARRIER(cx, obj, (sprop)->slot, *vp); \
} else { \
if (!js_NativeSet(cx, obj, sprop, vp)) \
goto error; \
} \
JS_END_MACRO
/*
- * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
- * single-thread DEBUG js shell testing to verify property cache hits.
- */
-#if defined DEBUG && !defined JS_THREADSAFE
-# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
- do { \
- JSAtom *atom_; \
- JSObject *obj_, *pobj_; \
- JSProperty *prop_; \
- JSScopeProperty *sprop_; \
- uint32 sample_ = rt->gcNumber; \
- if (pcoff >= 0) \
- GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom_); \
- else \
- atom_ = rt->atomState.lengthAtom; \
- if (JOF_OPMODE(op) == JOF_NAME) { \
- ok = js_FindProperty(cx, ATOM_TO_JSID(atom_), &obj_, &pobj_, \
- &prop_); \
- } else { \
- obj_ = obj; \
- ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom_), &pobj_, \
- &prop_); \
- } \
- if (!ok) \
- goto error; \
- if (rt->gcNumber != sample_) \
- break; \
- JS_ASSERT(prop_); \
- JS_ASSERT(pobj_ == pobj); \
- sprop_ = (JSScopeProperty *) prop_; \
- if (PCVAL_IS_SLOT(entry->vword)) { \
- JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop_->slot); \
- } else if (PCVAL_IS_SPROP(entry->vword)) { \
- JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop_); \
- } else { \
- jsval v_; \
- JS_ASSERT(PCVAL_IS_OBJECT(entry->vword)); \
- JS_ASSERT(entry->vword != PCVAL_NULL); \
- JS_ASSERT(SCOPE_IS_BRANDED(OBJ_SCOPE(pobj))); \
- JS_ASSERT(SPROP_HAS_STUB_GETTER(sprop_)); \
- JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop_, OBJ_SCOPE(pobj_))); \
- v_ = LOCKED_OBJ_GET_SLOT(pobj_, sprop_->slot); \
- JS_ASSERT(VALUE_IS_FUNCTION(cx, v_)); \
- JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v_)); \
- } \
- OBJ_DROP_PROPERTY(cx, pobj_, prop_); \
- } while (0)
-#else
-# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
-#endif
-
-/*
* Skip the JSOP_POP typically found after a JSOP_SET* opcode, where oplen is
* the constant length of the SET opcode sequence, and spdec is the constant
* by which to decrease the stack pointer to pop all of the SET op's operands.
*
* NB: unlike macros that could conceivably be replaced by functions (ignoring
* goto error), where a call should not have to be braced in order to expand
* correctly (e.g., in if (cond) FOO(); else BAR()), these three macros lack
* JS_{BEGIN,END}_MACRO brackets. They are also indented so as to align with
@@ -5655,39 +5705,24 @@ js_Interpret(JSContext *cx)
JS_ASSERT(slot < script->nslots);
vp = &fp->slots[slot];
*vp = FETCH_OPND(-1);
END_SET_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_GETUPVAR)
BEGIN_CASE(JSOP_CALLUPVAR)
{
- JSUpvarArray *uva;
- uint32 skip;
- JSStackFrame *fp2;
+ JSUpvarArray *uva = JS_SCRIPT_UPVARS(script);
index = GET_UINT16(regs.pc);
- uva = JS_SCRIPT_UPVARS(script);
JS_ASSERT(index < uva->length);
- skip = UPVAR_FRAME_SKIP(uva->vector[index]);
- fp2 = cx->display[script->staticLevel - skip];
- JS_ASSERT(fp2->script);
-
- slot = UPVAR_FRAME_SLOT(uva->vector[index]);
- if (!fp2->fun) {
- vp = fp2->slots + fp2->script->nfixed;
- } else if (slot < fp2->fun->nargs) {
- vp = fp2->argv;
- } else {
- slot -= fp2->fun->nargs;
- JS_ASSERT(slot < fp2->script->nslots);
- vp = fp2->slots;
- }
-
- PUSH_OPND(vp[slot]);
+
+ rval = js_GetUpvar(cx, script->staticLevel, uva->vector[index]);
+ PUSH_OPND(rval);
+
if (op == JSOP_CALLUPVAR)
PUSH_OPND(JSVAL_NULL);
}
END_CASE(JSOP_GETUPVAR)
BEGIN_CASE(JSOP_GETDSLOT)
BEGIN_CASE(JSOP_CALLDSLOT)
obj = fp->callee;
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -522,16 +522,23 @@ js_CheckRedeclaration(JSContext *cx, JSO
extern JSBool
js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval);
extern JSBool
js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp);
/*
+ * Given an active context, a static scope level, and an upvar cookie, return
+ * the value of the upvar.
+ */
+extern jsval
+js_GetUpvar(JSContext *cx, uintN level, uintN cookie);
+
+/*
* JS_LONE_INTERPRET indicates that the compiler should see just the code for
* the js_Interpret function when compiling jsinterp.cpp. The rest of the code
* from the file should be visible only when compiling jsinvoke.cpp. It allows
* platform builds to optimize selectively js_Interpret when the granularity
* of the optimizations with the given compiler is a compilation unit.
*
* JS_STATIC_INTERPRET is the modifier for functions defined in jsinterp.cpp
* that only js_Interpret calls. When JS_LONE_INTERPRET is true all such
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1692,17 +1692,17 @@ js_HasOwnProperty(JSContext *cx, JSLooku
static JSBool FASTCALL
Object_p_hasOwnProperty(JSContext* cx, JSObject* obj, JSString *str)
{
jsid id;
jsval v;
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(str), &id) ||
!js_HasOwnProperty(cx, obj->map->ops->lookupProperty, obj, id, &v)) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
}
JS_ASSERT(JSVAL_IS_BOOLEAN(v));
return JSVAL_TO_BOOLEAN(v);
}
#endif
@@ -1737,17 +1737,17 @@ obj_propertyIsEnumerable(JSContext *cx,
#ifdef JS_TRACER
static JSBool FASTCALL
Object_p_propertyIsEnumerable(JSContext* cx, JSObject* obj, JSString *str)
{
jsid id = ATOM_TO_JSID(STRING_TO_JSVAL(str));
jsval v;
if (!js_PropertyIsEnumerable(cx, obj, id, &v)) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
}
JS_ASSERT(JSVAL_IS_BOOLEAN(v));
return JSVAL_TO_BOOLEAN(v);
}
#endif
@@ -4482,16 +4482,19 @@ js_SetPropertyHelper(JSContext *cx, JSOb
}
#ifdef __GNUC__ /* suppress bogus gcc warnings */
} else {
scope = NULL;
#endif
}
if (!sprop) {
+ /* We should never add properties to lexical blocks. */
+ JS_ASSERT(OBJ_GET_CLASS(cx, obj) != &js_BlockClass);
+
/*
* Purge the property cache of now-shadowed id in obj's scope chain.
* Do this early, before locking obj to avoid nesting locks.
*/
js_PurgeScopeChain(cx, obj, id);
/* Find or make a property descriptor with the right heritage. */
JS_LOCK_OBJ(cx, obj);
--- a/js/src/jsparse.cpp
+++ b/js/src/jsparse.cpp
@@ -743,23 +743,25 @@ JSCompiler::parse(JSObject *chain)
}
return pn;
}
static inline bool
SetStaticLevel(JSTreeContext *tc, uintN staticLevel)
{
/*
- * Reserve staticLevel 0xffff in order to reserve FREE_UPVAR_COOKIE. This
- * is simpler than error-checking every MAKE_UPVAR_COOKIE, and practically
- * speaking it leaves more than enough room for upvars. In fact we might
- * want to split cookies with fewer bits for skip and more for slot, but
- * only based on evidence.
+ * Reserve FREE_STATIC_LEVEL (0xffff) in order to reserve FREE_UPVAR_COOKIE
+ * (0xffffffff) and other cookies with that level.
+ *
+ * This is a lot simpler than error-checking every MAKE_UPVAR_COOKIE, and
+ * practically speaking it leaves more than enough room for upvars. In fact
+ * we might want to split cookie fields giving fewer bits for skip and more
+ * for slot, but only based on evidence.
*/
- if (staticLevel >= JS_BITMASK(16)) {
+ if (staticLevel >= FREE_STATIC_LEVEL) {
JS_ReportErrorNumber(tc->compiler->context, js_GetErrorMessage, NULL,
JSMSG_TOO_DEEP, js_function_str);
return false;
}
tc->staticLevel = staticLevel;
return true;
}
@@ -2193,30 +2195,30 @@ LeaveFunction(JSParseNode *fn, JSTreeCon
while ((ale = iter()) != NULL) {
JSAtom *atom = ALE_ATOM(ale);
JSDefinition *dn = ALE_DEFN(ale);
JS_ASSERT(dn->isPlaceholder());
if (atom == funAtom && lambda != 0) {
dn->pn_op = JSOP_CALLEE;
- dn->pn_cookie = MAKE_UPVAR_COOKIE(funtc->staticLevel, 0);
+ dn->pn_cookie = MAKE_UPVAR_COOKIE(funtc->staticLevel, CALLEE_UPVAR_SLOT);
dn->pn_dflags |= PND_BOUND;
/*
* If this named function expression uses its own name other
* than to call itself, flag this function as using arguments,
* as if it had used arguments.callee instead of its own name.
*
* This abuses the plain sense of TCF_FUN_USES_ARGUMENTS, but
* we are out of tcflags bits at the moment. If it deoptimizes
* code unfairly (see JSCompiler::setFunctionKinds, where this
* flag is interpreted in its broader sense, not only to mean
* "this function might leak arguments.callee"), we can perhaps
- * try to work harder to add a TCF_FUN_CALLS_ITSELF flag and
+ * try to work harder to add a TCF_FUN_LEAKS_ITSELF flag and
* use that more precisely, both here and for unnamed function
* expressions.
*/
if (dn->isFunArg())
fn->pn_funbox->tcflags |= TCF_FUN_USES_ARGUMENTS;
continue;
}
@@ -7870,17 +7872,16 @@ PrimaryExpr(JSContext *cx, JSTokenStream
*/
if (!afterDot && !(ts->flags & TSF_DESTRUCTURING) && !tc->inStatement(STMT_WITH)) {
pn->pn_op = JSOP_ARGUMENTS;
pn->pn_dflags |= PND_BOUND;
}
} else if (!afterDot && !(ts->flags & TSF_DESTRUCTURING)) {
JSAtomListElement *ale = NULL;
JSTreeContext *tcx = tc;
- bool hit_named_lambda = false;
JSDefinition *dn;
do {
JSStmtInfo *stmt = js_LexicalLookup(tcx, pn->pn_atom, NULL);
if (stmt && stmt->type == STMT_WITH)
goto losing_with;
ale = tcx->decls.lookup(pn->pn_atom);
@@ -7893,20 +7894,19 @@ PrimaryExpr(JSContext *cx, JSTokenStream
break;
ale = NULL;
#else
break;
#endif
}
/* If this id names the current lambda's name, we are done. */
- if ((tc->flags & TCF_IN_FUNCTION) &&
- (tc->fun->flags & JSFUN_LAMBDA) &&
- tc->fun->atom == pn->pn_atom) {
- hit_named_lambda = true;
+ if ((tcx->flags & TCF_IN_FUNCTION) &&
+ (tcx->fun->flags & JSFUN_LAMBDA) &&
+ tcx->fun->atom == pn->pn_atom) {
break;
}
} while ((tcx = tcx->parent) != NULL);
if (!ale) {
ale = tc->lexdeps.lookup(pn->pn_atom);
if (!ale) {
/*
@@ -7945,24 +7945,27 @@ PrimaryExpr(JSContext *cx, JSTokenStream
* For an upvar reference, map pn->pn_atom to dn in tc->upvars. The
* subtleties here include:
*
* (a) tcx could be null, meaning we add an upvar speculatively for
* what looks like a free variable reference (it will be removed if
* a backward definition appears later; see NewBindingNode/Define).
*
* (b) If pn names the named function expression whose body we are
- * parsing, there's no way an upvar could be referenced here.
+ * parsing, there's no way an upvar above tcx's static level could
+ * be referenced here. However, we add to upvars anyway, to treat
+ * the function's name as an upvar in case it is used in a nested
+ * function.
*
* (a) is is an optimization to handle forward upvar refs. Without
* it, if we add only a lexdep, then inner functions making forward
* refs to upvars will lose track of those upvars as their lexdeps
* entries are propagated upward to their parent functions.
*/
- if (tcx != tc && !hit_named_lambda) {
+ if (tcx != tc) {
ale = tc->upvars.add(tc->compiler, pn->pn_atom);
if (!ale)
return NULL;
ALE_SET_DEFN(ale, dn);
}
/* Here we handle the backward function reference case. */
if (js_PeekToken(cx, ts) != TOK_LP)
--- a/js/src/jsparse.h
+++ b/js/src/jsparse.h
@@ -724,22 +724,30 @@ JSParseNode::isFunArg() const
return ((JSDefinition *)this)->isFunArg();
#endif
return test(PND_FUNARG);
}
inline void
JSParseNode::setFunArg()
{
- if (pn_defn) {
- ((JSDefinition *)this)->pn_dflags |= PND_FUNARG;
- } else if (pn_used) {
+ /*
+ * pn_defn NAND pn_used must be true, per this chart:
+ *
+ * pn_defn pn_used
+ * 0 0 anonymous function used implicitly, e.g. by
+ * hidden yield in a genexp
+ * 0 1 a use of a definition or placeholder
+ * 1 0 a definition or placeholder
+ * 1 1 error: this case must not be possible
+ */
+ JS_ASSERT(!(pn_defn & pn_used));
+ if (pn_used)
pn_lexdef->pn_dflags |= PND_FUNARG;
- pn_dflags |= PND_FUNARG;
- }
+ pn_dflags |= PND_FUNARG;
}
struct JSObjectBox {
JSObjectBox *traceLink;
JSObjectBox *emitLink;
JSObject *object;
};
@@ -779,17 +787,17 @@ struct JSFunctionBoxQueue {
vector[head++ & lengthMask] = funbox;
funbox->queued = true;
}
}
JSFunctionBox *pull() {
if (tail == head)
return NULL;
- JS_ASSERT(tail != head);
+ JS_ASSERT(tail < head);
JSFunctionBox *funbox = vector[tail++ & lengthMask];
funbox->queued = false;
return funbox;
}
};
#define NUM_TEMP_FREELISTS 6U /* 32 to 2048 byte size classes (32 bit) */
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -81,16 +81,18 @@ typedef struct JSObjectArray {
uint32 length; /* count of indexed objects */
} JSObjectArray;
typedef struct JSUpvarArray {
uint32 *vector; /* array of indexed upvar cookies */
uint32 length; /* count of indexed upvar cookies */
} JSUpvarArray;
+#define CALLEE_UPVAR_SLOT 0xffff
+#define FREE_STATIC_LEVEL 0xffff
#define FREE_UPVAR_COOKIE 0xffffffff
#define MAKE_UPVAR_COOKIE(skip,slot) ((skip) << 16 | (slot))
#define UPVAR_FRAME_SKIP(cookie) ((uint32)(cookie) >> 16)
#define UPVAR_FRAME_SLOT(cookie) ((uint16)(cookie))
#define JS_OBJECT_ARRAY_SIZE(length) \
(offsetof(JSObjectArray, vector) + sizeof(JSObject *) * (length))
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -2293,17 +2293,17 @@ TraceRecorder::guard(bool expected, LIns
#ifdef DEBUG
LIns* guard =
#endif
lir->insGuard(expected ? LIR_xf : LIR_xt, cond, exit);
#ifdef DEBUG
if (guard) {
GuardRecord* lr = guard->record();
VMSideExit* e = (VMSideExit*)lr->exit;
- debug_only_v(printf(" lr=%p exitType=%d\n", (SideExit*)e, e->exitType);)
+ debug_only_v(printf(" lr=%p exitType=%d\n", (void*)e, e->exitType);)
} else {
debug_only_v(printf(" redundant guard, eliminated\n");)
}
#endif
}
/* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
and generate a side exit with type exitType to jump to if the condition does not hold. */
@@ -3616,17 +3616,17 @@ js_AttemptToExtendTree(JSContext* cx, VM
c->parent = f;
anchor->target = c;
c->root = f;
}
debug_only_v(printf("trying to attach another branch to the tree (hits = %d)\n", c->hits());)
int32_t& hits = c->hits();
- if (outer || hits++ >= HOTEXIT && hits <= HOTEXIT+MAXEXIT) {
+ if (outer || (hits++ >= HOTEXIT && hits <= HOTEXIT+MAXEXIT)) {
/* start tracing secondary trace from this point */
c->lirbuf = f->lirbuf;
unsigned stackSlots;
unsigned ngslots;
uint8* typeMap;
TypeMap fullMap;
if (exitedFrom == NULL) {
/* If we are coming straight from a simple side exit, just use that exit's type map
@@ -3966,17 +3966,16 @@ LeaveTree(InterpState&, VMSideExit* lr);
/**
* Executes a tree.
*/
static JS_REQUIRES_STACK VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
VMSideExit** innermostNestedGuardp)
{
JS_ASSERT(f->root == f && f->code() && f->vmprivate);
- JS_ASSERT(cx->builtinStatus == 0);
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
TreeInfo* ti = (TreeInfo*)f->vmprivate;
unsigned ngslots = ti->globalSlots->length();
uint16* gslots = ti->globalSlots->data();
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
@@ -3994,16 +3993,17 @@ js_ExecuteTree(JSContext* cx, Fragment*
InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
state->cx = cx;
state->inlineCallCountp = &inlineCallCount;
state->innermostNestedGuardp = innermostNestedGuardp;
state->outermostTree = ti;
state->lastTreeExitGuard = NULL;
state->lastTreeCallGuard = NULL;
state->rpAtLastTreeCall = NULL;
+ state->builtinStatus = 0;
/* Setup the native global frame. */
double* global = (double*)(state+1);
/* Setup the native stack frame. */
double stack_buffer[MAX_NATIVE_STACK_SLOTS];
state->stackBase = stack_buffer;
state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
@@ -4115,32 +4115,31 @@ LeaveTree(InterpState& state, VMSideExit
if (state.innermostNestedGuardp)
*state.innermostNestedGuardp = nested;
JS_ASSERT(nested);
JS_ASSERT(nested->exitType == NESTED_EXIT);
JS_ASSERT(state.lastTreeExitGuard);
JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
}
- int32_t bs = cx->builtinStatus;
- cx->builtinStatus = 0;
+ int32_t bs = state.builtinStatus;
bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
if (bailed) {
/*
* Deep-bail case.
*
* A _FAIL native already called LeaveTree. We already reconstructed
* the interpreter stack, in pre-call state, with pc pointing to the
* CALL/APPLY op, for correctness. Then we continued in native code.
*/
if (!(bs & JSBUILTIN_ERROR)) {
/*
* The native succeeded (no exception or error). After it returned, the
* trace stored the return value (at the top of the native stack) and
- * then immediately flunked the guard on cx->builtinStatus.
+ * then immediately flunked the guard on state->builtinStatus.
*
* Now LeaveTree has been called again from the tail of
* js_ExecuteTree. We are about to return to the interpreter. Adjust
* the top stack frame to resume on the next op.
*/
JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || *cx->fp->regs->pc == JSOP_APPLY);
uintN argc = GET_ARGC(cx->fp->regs->pc);
cx->fp->regs->pc += JSOP_CALL_LENGTH;
@@ -4938,17 +4937,17 @@ js_DeepBail(JSContext *cx)
/* It's a bug if a non-FAIL_STATUS builtin gets here. */
JS_ASSERT(cx->bailExit);
JS_TRACE_MONITOR(cx).onTrace = false;
JS_TRACE_MONITOR(cx).prohibitRecording = true;
LeaveTree(*cx->interpState, cx->bailExit);
cx->bailExit = NULL;
- cx->builtinStatus |= JSBUILTIN_BAILED;
+ cx->interpState->builtinStatus |= JSBUILTIN_BAILED;
}
JS_REQUIRES_STACK jsval&
TraceRecorder::argval(unsigned n) const
{
JS_ASSERT(n < cx->fp->fun->nargs);
return cx->fp->argv[n];
}
@@ -7455,17 +7454,17 @@ GetProperty(JSContext *cx, uintN argc, j
static jsval FASTCALL
GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
{
JSAutoTempIdRooter idr(cx);
JSAutoTempValueRooter tvr(cx);
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), idr.addr()) ||
!OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
*tvr.addr() = JSVAL_ERROR_COOKIE;
}
return tvr.value();
}
static JSBool
GetElement(JSContext *cx, uintN argc, jsval *vp)
{
@@ -7484,21 +7483,21 @@ GetElement(JSContext *cx, uintN argc, js
static jsval FASTCALL
GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
{
JSAutoTempValueRooter tvr(cx);
JSAutoTempIdRooter idr(cx);
if (!js_Int32ToId(cx, index, idr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_ERROR_COOKIE;
}
if (!OBJ_GET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
*tvr.addr() = JSVAL_ERROR_COOKIE;
}
return tvr.value();
}
JS_DEFINE_TRCINFO_1(GetProperty,
(4, (static, JSVAL_FAIL, GetProperty_tn, CONTEXT, PC, THIS, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(GetElement,
@@ -7599,17 +7598,17 @@ SetProperty(JSContext *cx, uintN argc, j
static JSBool FASTCALL
SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
{
JSAutoTempValueRooter tvr(cx, v);
JSAutoTempIdRooter idr(cx);
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), idr.addr()) ||
!OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
}
return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
}
static JSBool
SetElement(JSContext *cx, uintN argc, jsval *vp)
{
jsval *argv;
@@ -7630,17 +7629,17 @@ SetElement(JSContext *cx, uintN argc, js
static JSBool FASTCALL
SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
{
JSAutoTempIdRooter idr(cx);
JSAutoTempValueRooter tvr(cx, v);
if (!js_Int32ToId(cx, index, idr.addr()) ||
!OBJ_SET_PROPERTY(cx, obj, idr.id(), tvr.addr())) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
}
return JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID);
}
JS_DEFINE_TRCINFO_1(SetProperty,
(4, (extern, BOOL_FAIL, SetProperty_tn, CONTEXT, THIS, STRING, JSVAL, 0, 0)))
JS_DEFINE_TRCINFO_1(SetElement,
(4, (extern, BOOL_FAIL, SetElement_tn, CONTEXT, THIS, INT32, JSVAL, 0, 0)))
@@ -8040,17 +8039,17 @@ TraceRecorder::record_FastNativeCallComp
type is jsval, snapshot() will also indicate in the type map that the
element on top of the stack is a boxed value which doesn't need to be
boxed if the type guard generated by unbox_jsval() fails. */
if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
// Keep cx->bailExit null when it's invalid.
lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
- LIns* status = lir->insLoad(LIR_ld, cx_ins, (int) offsetof(JSContext, builtinStatus));
+ LIns* status = lir->insLoad(LIR_ld, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
if (pendingTraceableNative == generatedTraceableNative) {
LIns* ok_ins = v_ins;
/*
* Custom implementations of Iterator.next() throw a StopIteration exception.
* Catch and clear it and set the return value to JSVAL_HOLE in this case.
*/
if (uintptr_t(cx->fp->regs->pc - nextiter_imacros.custom_iter_next) <
@@ -8078,17 +8077,17 @@ TraceRecorder::record_FastNativeCallComp
JS_STATIC_ASSERT((1 - JS_FALSE) << 1 == JSBUILTIN_ERROR);
status = lir->ins2(LIR_or,
status,
lir->ins2i(LIR_lsh,
lir->ins2i(LIR_xor,
lir->ins2i(LIR_and, ok_ins, 1),
1),
1));
- lir->insStorei(status, cx_ins, (int) offsetof(JSContext, builtinStatus));
+ lir->insStorei(status, lirbuf->state, (int) offsetof(InterpState, builtinStatus));
}
guard(true,
lir->ins_eq0(status),
STATUS_EXIT);
}
bool ok = true;
if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
@@ -9753,17 +9752,17 @@ ObjectToIterator(JSContext *cx, uintN ar
static JSObject* FASTCALL
ObjectToIterator_tn(JSContext* cx, jsbytecode* pc, JSObject *obj, int32 flags)
{
jsval v = OBJECT_TO_JSVAL(obj);
JSBool ok = js_ValueToIterator(cx, flags, &v);
if (!ok) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return NULL;
}
return JSVAL_TO_OBJECT(v);
}
static JSBool
CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
{
@@ -9772,17 +9771,17 @@ CallIteratorNext(JSContext *cx, uintN ar
static jsval FASTCALL
CallIteratorNext_tn(JSContext* cx, jsbytecode* pc, JSObject* iterobj)
{
JSAutoTempValueRooter tvr(cx);
JSBool ok = js_CallIteratorNext(cx, iterobj, tvr.addr());
if (!ok) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
+ js_SetBuiltinError(cx);
return JSVAL_ERROR_COOKIE;
}
return tvr.value();
}
JS_DEFINE_TRCINFO_1(ObjectToIterator,
(4, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
JS_DEFINE_TRCINFO_1(CallIteratorNext,
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -336,42 +336,57 @@ public:
return typeMap.data();
}
};
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
# define EXECUTE_TREE_TIMER
#endif
+typedef enum JSBuiltinStatus {
+ JSBUILTIN_BAILED = 1,
+ JSBUILTIN_ERROR = 2
+} JSBuiltinStatus;
+
struct InterpState
{
double *sp; // native stack pointer, stack[0] is spbase[0]
void *rp; // call stack pointer
JSContext *cx; // current VM context handle
double *eos; // first unusable word after the native stack
void *eor; // first unusable word after the call stack
VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
// call exit guard mismatched
void* rpAtLastTreeCall; // value of rp at innermost tree call guard
TreeInfo* outermostTree; // the outermost tree we initially invoked
double* stackBase; // native stack base
FrameInfo** callstackBase; // call stack base
uintN* inlineCallCountp; // inline call count counter
- VMSideExit** innermostNestedGuardp;
+ VMSideExit** innermostNestedGuardp;
void* stackMark;
VMSideExit* innermost;
#ifdef EXECUTE_TREE_TIMER
uint64 startTime;
#endif
-#ifdef DEBUG
- bool jsframe_pop_blocks_set_on_entry;
-#endif
+
+ /*
+ * Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
+ * JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
+ * if an error or exception occurred.
+ */
+ uint32 builtinStatus;
};
+static JS_INLINE void
+js_SetBuiltinError(JSContext *cx)
+{
+ cx->interpState->builtinStatus |= JSBUILTIN_ERROR;
+}
+
enum JSMonitorRecordingStatus {
JSMRS_CONTINUE,
JSMRS_STOP,
JSMRS_IMACRO
};
class TraceRecorder : public avmplus::GCObject {
JSContext* cx;
--- a/js/src/jsxdrapi.h
+++ b/js/src/jsxdrapi.h
@@ -199,17 +199,17 @@ JS_XDRFindClassById(JSXDRState *xdr, uin
* Bytecode version number. Increment the subtrahend whenever JS bytecode
* changes incompatibly.
*
* This version number should be XDR'ed once near the front of any file or
* larger storage unit containing XDR'ed bytecode and other data, and checked
* before deserialization of bytecode. If the saved version does not match
* the current version, abort deserialization and invalidate the file.
*/
-#define JSXDR_BYTECODE_VERSION (0xb973c0de - 44)
+#define JSXDR_BYTECODE_VERSION (0xb973c0de - 45)
/*
* Library-private functions.
*/
extern JSBool
js_XDRAtom(JSXDRState *xdr, JSAtom **atomp);
extern JSBool
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -981,32 +981,16 @@ ReadLine(JSContext *cx, uintN argc, jsva
JS_free(cx, buf);
return JS_FALSE;
}
*vp = STRING_TO_JSVAL(str);
return JS_TRUE;
}
-#ifdef JS_TRACER
-static jsval JS_FASTCALL
-Print_tn(JSContext *cx, JSString *str)
-{
- char *bytes = JS_EncodeString(cx, str);
- if (!bytes) {
- cx->builtinStatus |= JSBUILTIN_ERROR;
- return JSVAL_VOID;
- }
- fprintf(gOutFile, "%s\n", bytes);
- JS_free(cx, bytes);
- fflush(gOutFile);
- return JSVAL_VOID;
-}
-#endif
-
static JSBool
Print(JSContext *cx, uintN argc, jsval *vp)
{
jsval *argv;
uintN i;
JSString *str;
char *bytes;
@@ -1074,24 +1058,16 @@ AssertEq(JSContext *cx, uintN argc, jsva
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_ASSERT_EQ_FAILED,
actual, expected);
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
-#ifdef JS_TRACER
-static jsval JS_FASTCALL
-AssertEq_tn(JSContext *cx, jsval v1, jsval v2)
-{
- return (js_StrictlyEqual(cx, v1, v2)) ? JSVAL_VOID : JSVAL_ERROR_COOKIE;
-}
-#endif
-
static JSBool
GC(JSContext *cx, uintN argc, jsval *vp)
{
JSRuntime *rt;
uint32 preBytes;
rt = cx->runtime;
preBytes = rt->gcBytes;
@@ -2898,35 +2874,34 @@ EvalInContext(JSContext *cx, JSObject *o
out:
JS_EndRequest(scx);
WITH_LOCKED_CONTEXT_LIST(
JS_DestroyContextNoGC(scx)
);
return ok;
}
-static int32 JS_FASTCALL
-ShapeOf_tn(JSObject *obj)
-{
- if (!obj)
- return 0;
- if (!OBJ_IS_NATIVE(obj))
- return -1;
- return OBJ_SHAPE(obj);
-}
-
static JSBool
ShapeOf(JSContext *cx, uintN argc, jsval *vp)
{
jsval v = JS_ARGV(cx, vp)[0];
if (!JSVAL_IS_OBJECT(v)) {
JS_ReportError(cx, "shapeOf: object expected");
return JS_FALSE;
}
- return JS_NewNumberValue(cx, ShapeOf_tn(JSVAL_TO_OBJECT(v)), vp);
+ JSObject *obj = JSVAL_TO_OBJECT(v);
+ if (!obj) {
+ *vp = JSVAL_ZERO;
+ return JS_TRUE;
+ }
+ if (!OBJ_IS_NATIVE(obj)) {
+ *vp = INT_TO_JSVAL(-1);
+ return JS_TRUE;
+ }
+ return JS_NewNumberValue(cx, OBJ_SHAPE(obj), vp);
}
#ifdef JS_THREADSAFE
/*
* Check that t1 comes strictly before t2. The function correctly deals with
* PRIntervalTime wrap-around between t2 and t1 assuming that t2 and t1 stays
* within INT32_MAX from each other. We use MAX_TIMEOUT_INTERVAL to enforce
@@ -3458,20 +3433,16 @@ Elapsed(JSContext *cx, uintN argc, jsval
if (data)
d = js_IntervalNow() - data->startTime;
return JS_NewNumberValue(cx, d, vp);
}
JS_ReportError(cx, "Wrong number of arguments");
return JS_FALSE;
}
-JS_DEFINE_TRCINFO_1(AssertEq, (3, (static, JSVAL_RETRY, AssertEq_tn, CONTEXT, JSVAL, JSVAL, 0, 0)))
-JS_DEFINE_TRCINFO_1(Print, (2, (static, JSVAL_FAIL, Print_tn, CONTEXT, STRING, 0, 0)))
-JS_DEFINE_TRCINFO_1(ShapeOf, (1, (static, INT32, ShapeOf_tn, OBJECT, 0, 0)))
-
#ifdef XP_UNIX
#include <fcntl.h>
#include <sys/stat.h>
/*
* Returns a JS_malloc'd string (that the caller needs to JS_free)
* containing the directory (non-leaf) part of |from| prepended to |leaf|.
@@ -3588,20 +3559,20 @@ Snarf(JSContext *cx, JSObject *obj, uint
}
/* We use a mix of JS_FS and JS_FN to test both kinds of natives. */
static JSFunctionSpec shell_functions[] = {
JS_FS("version", Version, 0,0,0),
JS_FS("options", Options, 0,0,0),
JS_FS("load", Load, 1,0,0),
JS_FN("readline", ReadLine, 0,0),
- JS_TN("print", Print, 0,0, Print_trcinfo),
+ JS_FN("print", Print, 0,0),
JS_FS("help", Help, 0,0,0),
JS_FS("quit", Quit, 0,0,0),
- JS_TN("assertEq", AssertEq, 2,0, AssertEq_trcinfo),
+ JS_FN("assertEq", AssertEq, 2,0),
JS_FN("gc", GC, 0,0),
JS_FN("gcparam", GCParameter, 2,0),
JS_FN("countHeap", CountHeap, 0,0),
#ifdef JS_GC_ZEAL
JS_FN("gczeal", GCZeal, 1,0),
#endif
JS_FS("trap", Trap, 3,0,0),
JS_FS("untrap", Untrap, 2,0,0),
@@ -3627,17 +3598,17 @@ static JSFunctionSpec shell_functions[]
JS_FS("clear", Clear, 0,0,0),
JS_FN("intern", Intern, 1,0),
JS_FS("clone", Clone, 1,0,0),
JS_FS("seal", Seal, 1,0,1),
JS_FN("getpda", GetPDA, 1,0),
JS_FN("getslx", GetSLX, 1,0),
JS_FN("toint32", ToInt32, 1,0),
JS_FS("evalcx", EvalInContext, 1,0,0),
- JS_TN("shapeOf", ShapeOf, 1,0, ShapeOf_trcinfo),
+ JS_FN("shapeOf", ShapeOf, 1,0),
#ifdef MOZ_SHARK
JS_FS("startShark", js_StartShark, 0,0,0),
JS_FS("stopShark", js_StopShark, 0,0,0),
JS_FS("connectShark", js_ConnectShark, 0,0,0),
JS_FS("disconnectShark",js_DisconnectShark, 0,0,0),
#endif
#ifdef MOZ_CALLGRIND
JS_FS("startCallgrind", js_StartCallgrind, 0,0,0),
--- a/js/src/xpconnect/src/qsgen.py
+++ b/js/src/xpconnect/src/qsgen.py
@@ -866,17 +866,17 @@ def getTraceInfoType(type):
def getTraceInfoDefaultReturn(type):
traceType = traceTypeMap.get(type) or traceTypeMap.get("_default")
assert traceType
return traceType[2]
def getFailureString(retval, indent):
assert indent > 0
ret = " " * (4 * indent)
- ret += "cx->builtinStatus |= JSBUILTIN_ERROR;\n"
+ ret += "js_SetTraceableNativeFailed(cx);\n"
ret += " " * (4 * indent)
ret += "return %s;\n" % retval
ret += " " * (4 * (indent - 1))
ret += "}\n"
return ret
def writeFailure(f, retval, indent):
f.write(getFailureString(retval, indent))