Merge.
Merge.
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1418,27 +1418,16 @@ JSBool
js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
uintN argc, jsval *argv, jsval *rval)
{
jsval *invokevp;
void *mark;
JSBool ok;
js_LeaveTrace(cx);
-
-#ifdef JS_TRACER
- /*
- * The JIT requires that the scope chain here is equal to its global
- * object. Disable the JIT for this call if this condition is not true.
- */
- uint32 oldOptions = cx->options;
- if ((oldOptions & JSOPTION_JIT) && obj != JS_GetGlobalForObject(cx, obj))
- cx->options &= ~JSOPTION_JIT;
-#endif
-
invokevp = js_AllocStack(cx, 2 + argc, &mark);
if (!invokevp)
return JS_FALSE;
invokevp[0] = fval;
invokevp[1] = OBJECT_TO_JSVAL(obj);
memcpy(invokevp + 2, argv, argc * sizeof *argv);
@@ -1458,23 +1447,16 @@ js_InternalInvoke(JSContext *cx, JSObjec
ok = JS_FALSE;
} else {
cx->weakRoots.lastInternalResult = *rval;
}
}
}
js_FreeStack(cx, mark);
-
-#ifdef JS_TRACER
- /* Possibly re-enable JIT, if disabled above. */
- if (oldOptions & JSOPTION_JIT)
- cx->options |= JSOPTION_JIT;
-#endif
-
return ok;
}
JSBool
js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
JSAccessMode mode, uintN argc, jsval *argv, jsval *rval)
{
JSSecurityCallbacks *callbacks;
@@ -1522,26 +1504,16 @@ js_Execute(JSContext *cx, JSObject *chai
JSInterpreterHook hook;
void *hookData, *mark;
JSStackFrame *oldfp, frame;
JSObject *obj, *tmp;
JSBool ok;
js_LeaveTrace(cx);
-#ifdef JS_TRACER
- /*
- * The JIT requires that the scope chain here is equal to its global
- * object. Disable the JIT for this call if this condition is not true.
- */
- uint32 oldOptions = cx->options;
- if ((oldOptions & JSOPTION_JIT) && chain != JS_GetGlobalForObject(cx, chain))
- cx->options &= ~JSOPTION_JIT;
-#endif
-
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_EXECUTE_START_ENABLED())
jsdtrace_execute_start(script);
#endif
hook = cx->debugHooks->executeHook;
hookData = mark = NULL;
oldfp = js_GetTopStackFrame(cx);
@@ -1656,23 +1628,16 @@ out2:
oldfp->dormantNext = NULL;
}
out:
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_EXECUTE_DONE_ENABLED())
jsdtrace_execute_done(script);
#endif
-
-#ifdef JS_TRACER
- /* Possibly re-enable JIT, if disabled above. */
- if (oldOptions & JSOPTION_JIT)
- cx->options |= JSOPTION_JIT;
-#endif
-
return ok;
}
JSBool
js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
JSObject **objp, JSProperty **propp)
{
JSObject *obj2;
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -3937,35 +3937,16 @@ js_LookupPropertyWithFlags(JSContext *cx
}
out:
*objp = NULL;
*propp = NULL;
return protoIndex;
}
-/*
- * We cache name lookup results only for the global object or for native
- * non-global objects without prototype or with prototype that never mutates,
- * see bug 462734 and bug 487039.
- */
-static inline bool
-IsCacheableNonGlobalScope(JSObject *obj)
-{
- JS_ASSERT(STOBJ_GET_PARENT(obj));
-
- JSClass *clasp = STOBJ_GET_CLASS(obj);
- bool cacheable = (clasp == &js_CallClass ||
- clasp == &js_BlockClass ||
- clasp == &js_DeclEnvClass);
-
- JS_ASSERT_IF(cacheable, obj->map->ops->lookupProperty == js_LookupProperty);
- return cacheable;
-}
-
JSPropCacheEntry *
js_FindPropertyHelper(JSContext *cx, jsid id, JSBool cacheResult,
JSObject **objp, JSObject **pobjp, JSProperty **propp)
{
JSObject *scopeChain, *obj, *parent, *pobj;
JSPropCacheEntry *entry;
int scopeIndex, protoIndex;
JSProperty *prop;
@@ -3974,17 +3955,17 @@ js_FindPropertyHelper(JSContext *cx, jsi
scopeChain = js_GetTopStackFrame(cx)->scopeChain;
/* Scan entries on the scope chain that we can cache across. */
entry = JS_NO_PROP_CACHE_FILL;
obj = scopeChain;
parent = OBJ_GET_PARENT(cx, obj);
for (scopeIndex = 0;
parent
- ? IsCacheableNonGlobalScope(obj)
+ ? js_IsCacheableNonGlobalScope(obj)
: obj->map->ops->lookupProperty == js_LookupProperty;
++scopeIndex) {
protoIndex =
js_LookupPropertyWithFlags(cx, obj, id, cx->resolveFlags,
&pobj, &prop);
if (protoIndex < 0)
return NULL;
@@ -4073,17 +4054,17 @@ js_FindIdentifierBase(JSContext *cx, JSO
JSObject *obj = scopeChain;
/*
* Loop over cacheable objects on the scope chain until we find a
* property. We also stop when we reach the global object skipping any
* farther checks or lookups. For details see the JSOP_BINDNAME case of
* js_Interpret.
*/
- for (int scopeIndex = 0; IsCacheableNonGlobalScope(obj); scopeIndex++) {
+ for (int scopeIndex = 0; js_IsCacheableNonGlobalScope(obj); scopeIndex++) {
JSObject *pobj;
JSProperty *prop;
int protoIndex = js_LookupPropertyWithFlags(cx, obj, id,
cx->resolveFlags,
&pobj, &prop);
if (protoIndex < 0)
return NULL;
if (prop) {
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -714,16 +714,38 @@ js_LookupProperty(JSContext *cx, JSObjec
* Specialized subroutine that allows caller to preset JSRESOLVE_* flags and
* returns the index along the prototype chain in which *propp was found, or
* the last index if not found, or -1 on error.
*/
extern int
js_LookupPropertyWithFlags(JSContext *cx, JSObject *obj, jsid id, uintN flags,
JSObject **objp, JSProperty **propp);
+
+/*
+ * We cache name lookup results only for the global object or for native
+ * non-global objects without prototype or with prototype that never mutates,
+ * see bug 462734 and bug 487039.
+ */
+static inline bool
+js_IsCacheableNonGlobalScope(JSObject *obj)
+{
+ extern JS_FRIEND_DATA(JSClass) js_CallClass;
+ extern JS_FRIEND_DATA(JSClass) js_DeclEnvClass;
+ JS_ASSERT(STOBJ_GET_PARENT(obj));
+
+ JSClass *clasp = STOBJ_GET_CLASS(obj);
+ bool cacheable = (clasp == &js_CallClass ||
+ clasp == &js_BlockClass ||
+ clasp == &js_DeclEnvClass);
+
+ JS_ASSERT_IF(cacheable, obj->map->ops->lookupProperty == js_LookupProperty);
+ return cacheable;
+}
+
/*
* If cacheResult is false, return JS_NO_PROP_CACHE_FILL on success.
*/
extern JSPropCacheEntry *
js_FindPropertyHelper(JSContext *cx, jsid id, JSBool cacheResult,
JSObject **objp, JSObject **pobjp, JSProperty **propp);
/*
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -5402,18 +5402,50 @@ ExecuteTree(JSContext* cx, Fragment* f,
VMSideExit** innermostNestedGuardp)
{
#ifdef MOZ_TRACEVIS
TraceVisStateObj tvso(cx, S_EXECUTE);
#endif
JS_ASSERT(f->root == f && f->code() && f->vmprivate);
+ /*
+ * The JIT records and expects to execute with two scope-chain
+ * assumptions baked-in:
+ *
+ * 1. That the bottom of the scope chain is global, in the sense of
+ * JSCLASS_IS_GLOBAL.
+ *
+ * 2. That the scope chain between fp and the global is free of
+ * "unusual" native objects such as HTML forms or other funny
+ * things.
+ *
+ * #2 is checked here while following the scope-chain links, via
+ * js_IsCacheableNonGlobalScope, which consults a whitelist of known
+ * class types; once a global is found, it's checked for #1. Failing
+ * either check causes an early return from execution.
+ */
+ JSObject* parent;
+ JSObject* child = cx->fp->scopeChain;
+ while ((parent = OBJ_GET_PARENT(cx, child)) != NULL) {
+ if (!js_IsCacheableNonGlobalScope(child)) {
+ debug_only_print0(LC_TMTracer,"Blacklist: non-cacheable object on scope chain.\n");
+ Blacklist((jsbytecode*) f->root->ip);
+ return NULL;
+ }
+ child = parent;
+ }
+ JSObject* globalObj = child;
+ if (!(OBJ_GET_CLASS(cx, globalObj)->flags & JSCLASS_IS_GLOBAL)) {
+ debug_only_print0(LC_TMTracer, "Blacklist: non-global at root of scope chain.\n");
+ Blacklist((jsbytecode*) f->root->ip);
+ return NULL;
+ }
+
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
- JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
TreeInfo* ti = (TreeInfo*)f->vmprivate;
unsigned ngslots = ti->globalSlots->length();
uint16* gslots = ti->globalSlots->data();
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
/* Make sure the global object is sane. */
JS_ASSERT_IF(ngslots != 0,
OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
--- a/js/src/nanojit/LIR.h
+++ b/js/src/nanojit/LIR.h
@@ -135,20 +135,20 @@ namespace nanojit
verbose_only ( const char* _name; )
uint32_t FASTCALL _count_args(uint32_t mask) const;
uint32_t get_sizes(ArgSize*) const;
inline bool isIndirect() const {
return _address < 256;
}
- inline uint32_t FASTCALL count_args() const {
+ inline uint32_t count_args() const {
return _count_args(ARGSIZE_MASK_ANY);
}
- inline uint32_t FASTCALL count_iargs() const {
+ inline uint32_t count_iargs() const {
return _count_args(ARGSIZE_MASK_INT);
}
// fargs = args - iargs
};
/*
* Record for extra data used to compile switches as jump tables.
*/
--- a/js/src/nanojit/avmplus.h
+++ b/js/src/nanojit/avmplus.h
@@ -534,29 +534,29 @@ namespace avmplus {
{
if (data)
free(data);
}
const T *getData() const { return data; }
// 'this' steals the guts of 'that' and 'that' gets reset.
- void FASTCALL become(List& that)
+ void become(List& that)
{
this->destroy();
this->data = that.data;
this->len = that.len;
this->capacity = that.capacity;
that.data = 0;
that.len = 0;
that.capacity = 0;
}
- uint32_t FASTCALL add(T value)
+ uint32_t add(T value)
{
if (len >= capacity) {
grow();
}
wb(len++, value);
return len-1;
}
@@ -571,17 +571,17 @@ namespace avmplus {
}
inline T get(uint32_t index) const
{
AvmAssert(index < len);
return *(T*)(data + index);
}
- void FASTCALL set(uint32_t index, T value)
+ void set(uint32_t index, T value)
{
AvmAssert(index < capacity);
if (index >= len)
{
len = index+1;
}
AvmAssert(len <= capacity);
wb(index, value);
@@ -597,88 +597,88 @@ namespace avmplus {
}
inline void clear()
{
zero_range(0, len);
len = 0;
}
- int FASTCALL indexOf(T value) const
+ int indexOf(T value) const
{
for(uint32_t i=0; i<len; i++)
if (get(i) == value)
return i;
return -1;
}
- int FASTCALL lastIndexOf(T value) const
+ int lastIndexOf(T value) const
{
for(int32_t i=len-1; i>=0; i--)
if (get(i) == value)
return i;
return -1;
}
inline T last() const
{
return get(len-1);
}
- T FASTCALL removeLast()
+ T removeLast()
{
if(isEmpty())
return undef_list_val();
T t = get(len-1);
set(len-1, undef_list_val());
len--;
return t;
}
inline T operator[](uint32_t index) const
{
AvmAssert(index < capacity);
return get(index);
}
- void FASTCALL ensureCapacity(uint32_t cap)
+ void ensureCapacity(uint32_t cap)
{
if (cap > capacity) {
if (data == NULL) {
data = (T*)calloc(1, factor(cap));
} else {
data = (T*)realloc(data, factor(cap));
zero_range(capacity, cap - capacity);
}
capacity = cap;
}
}
- void FASTCALL insert(uint32_t index, T value, uint32_t count = 1)
+ void insert(uint32_t index, T value, uint32_t count = 1)
{
AvmAssert(index <= len);
AvmAssert(count > 0);
ensureCapacity(len+count);
memmove(data + index + count, data + index, factor(len - index));
wbzm(index, index+count, value);
len += count;
}
- T FASTCALL removeAt(uint32_t index)
+ T removeAt(uint32_t index)
{
T old = get(index);
// dec the refcount on the one we're removing
wb(index, undef_list_val());
memmove(data + index, data + index + 1, factor(len - index - 1));
len--;
return old;
}
private:
- void FASTCALL grow()
+ void grow()
{
// growth is fast at first, then slows at larger list sizes.
uint32_t newMax = 0;
const uint32_t curMax = capacity;
if (curMax == 0)
newMax = kInitialCapacity;
else if(curMax > 15)
newMax = curMax * 3/2;
@@ -708,45 +708,45 @@ namespace avmplus {
*slot = value;
}
inline void do_wb_gc(GCObject** slot, const GCObject** value)
{
*slot = (GCObject*)*value;
}
- void FASTCALL wb(uint32_t index, T value)
+ void wb(uint32_t index, T value)
{
AvmAssert(index < capacity);
AvmAssert(data != NULL);
T* slot = &data[index];
do_wb_nongc(slot, value);
}
// multiple wb call with the same value, and assumption that existing value is all zero bits,
// like
// for (uint32_t u = index; u < index_end; ++u)
// wb(u, value);
- void FASTCALL wbzm(uint32_t index, uint32_t index_end, T value)
+ void wbzm(uint32_t index, uint32_t index_end, T value)
{
AvmAssert(index < capacity);
AvmAssert(index_end <= capacity);
AvmAssert(index < index_end);
AvmAssert(data != NULL);
T* slot = data + index;
for ( ; index < index_end; ++index, ++slot)
do_wb_nongc(slot, value);
}
inline uint32_t factor(uint32_t index) const
{
return index * sizeof(T);
}
- void FASTCALL zero_range(uint32_t _first, uint32_t _count)
+ void zero_range(uint32_t _first, uint32_t _count)
{
memset(data + _first, 0, factor(_count));
}
// stuff that needs specialization based on the type
static inline T undef_list_val();
private: