Merge.
authorAndreas Gal <gal@mozilla.com>
Thu, 05 Feb 2009 11:18:43 -0800
changeset 24851 4840f66486e705b0c8503650ecd1d7f46a7eb719
parent 24850 2983a8b73e3606f2902c3b6158fdc9dcfe50ce75 (current diff)
parent 24849 d4ed482363b27f7afccab37fd363bc160e8091ad (diff)
child 24852 da50f697779fd6bee3aa95a5421a6c67ca8ee621
child 24854 21494181fdb84b3dad6db774cbfd25be8cc9425c
push idunknown
push userunknown
push dateunknown
milestone1.9.2a1pre
Merge.
js/src/jstracer.cpp
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -1825,28 +1825,26 @@ BindNameToSlot(JSContext *cx, JSCodeGene
     JS_ASSERT(pn->pn_type == TOK_NAME);
     if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS)
         return JS_TRUE;
 
     /* QNAME references can never be optimized to use arg/var storage. */
     if (pn->pn_op == JSOP_QNAMEPART)
         return JS_TRUE;
 
-    /*
-     * We can't optimize if we are compiling a with statement and its body,
-     * or we're in a catch block whose exception variable has the same name
-     * as this node.  FIXME: we should be able to optimize catch vars to be
-     * block-locals.
-     */
     tc = &cg->treeContext;
     atom = pn->pn_atom;
     stmt = js_LexicalLookup(tc, atom, &slot);
     if (stmt) {
-        if (stmt->type == STMT_WITH)
+        /* We can't optimize if we are inside a with statement. */
+        if (stmt->type == STMT_WITH) {
+            JS_ASSERT_IF(tc->flags & TCF_IN_FUNCTION,
+                         tc->flags & TCF_FUN_HEAVYWEIGHT);
             return JS_TRUE;
+        }
 
         JS_ASSERT(stmt->flags & SIF_SCOPE);
         JS_ASSERT(slot >= 0);
         op = PN_OP(pn);
         switch (op) {
           case JSOP_NAME:     op = JSOP_GETLOCAL; break;
           case JSOP_SETNAME:  op = JSOP_SETLOCAL; break;
           case JSOP_INCNAME:  op = JSOP_INCLOCAL; break;
@@ -1862,24 +1860,16 @@ BindNameToSlot(JSContext *cx, JSCodeGene
             if (slot < 0)
                 return JS_FALSE;
             pn->pn_op = op;
             pn->pn_slot = slot;
         }
         return JS_TRUE;
     }
 
-    /*
-     * We can't optimize if var and closure (a local function not in a larger
-     * expression and not at top-level within another's body) collide.
-     * XXX suboptimal: keep track of colliding names and deoptimize only those
-     */
-    if (tc->flags & TCF_FUN_CLOSURE_VS_VAR)
-        return JS_TRUE;
-
     if (!(tc->flags & TCF_IN_FUNCTION)) {
         JSStackFrame *caller;
 
         caller = tc->parseContext->callerFrame;
         if (caller) {
             JS_ASSERT(tc->flags & TCF_COMPILE_N_GO);
             JS_ASSERT(caller->script);
             if (!caller->fun || caller->varobj != tc->u.scopeChain)
@@ -1984,61 +1974,60 @@ BindNameToSlot(JSContext *cx, JSCodeGene
         pn->pn_const = constOp;
         if (op != pn->pn_op) {
             pn->pn_op = op;
             pn->pn_slot = slot;
         }
         return JS_TRUE;
     }
 
-    if (tc->flags & TCF_IN_FUNCTION) {
-        /*
-         * We are compiling a function body and may be able to optimize name
-         * to stack slot. Look for an argument or variable in the function and
-         * rewrite pn_op and update pn accordingly.
-         */
-        localKind = js_LookupLocal(cx, tc->u.fun, atom, &index);
-        if (localKind != JSLOCAL_NONE) {
-            op = PN_OP(pn);
-            if (localKind == JSLOCAL_ARG) {
-                switch (op) {
-                  case JSOP_NAME:     op = JSOP_GETARG; break;
-                  case JSOP_SETNAME:  op = JSOP_SETARG; break;
-                  case JSOP_INCNAME:  op = JSOP_INCARG; break;
-                  case JSOP_NAMEINC:  op = JSOP_ARGINC; break;
-                  case JSOP_DECNAME:  op = JSOP_DECARG; break;
-                  case JSOP_NAMEDEC:  op = JSOP_ARGDEC; break;
-                  case JSOP_FORNAME:  op = JSOP_FORARG; break;
-                  case JSOP_DELNAME:  op = JSOP_FALSE; break;
-                  default: JS_NOT_REACHED("arg");
-                }
-                pn->pn_const = JS_FALSE;
-            } else {
-                JS_ASSERT(localKind == JSLOCAL_VAR ||
-                          localKind == JSLOCAL_CONST);
-                switch (op) {
-                  case JSOP_NAME:     op = JSOP_GETLOCAL; break;
-                  case JSOP_SETNAME:  op = JSOP_SETLOCAL; break;
-                  case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
-                  case JSOP_INCNAME:  op = JSOP_INCLOCAL; break;
-                  case JSOP_NAMEINC:  op = JSOP_LOCALINC; break;
-                  case JSOP_DECNAME:  op = JSOP_DECLOCAL; break;
-                  case JSOP_NAMEDEC:  op = JSOP_LOCALDEC; break;
-                  case JSOP_FORNAME:  op = JSOP_FORLOCAL; break;
-                  case JSOP_DELNAME:  op = JSOP_FALSE; break;
-                  default: JS_NOT_REACHED("local");
-                }
-                pn->pn_const = (localKind == JSLOCAL_CONST);
+    /*
+     * We are compiling a function body and may be able to optimize name to
+     * stack slot. Look for an argument or variable in the function and
+     * rewrite pn_op and update pn accordingly.
+     */
+    JS_ASSERT(tc->flags & TCF_IN_FUNCTION);
+    localKind = js_LookupLocal(cx, tc->u.fun, atom, &index);
+    if (localKind != JSLOCAL_NONE) {
+        op = PN_OP(pn);
+        if (localKind == JSLOCAL_ARG) {
+            switch (op) {
+              case JSOP_NAME:     op = JSOP_GETARG; break;
+              case JSOP_SETNAME:  op = JSOP_SETARG; break;
+              case JSOP_INCNAME:  op = JSOP_INCARG; break;
+              case JSOP_NAMEINC:  op = JSOP_ARGINC; break;
+              case JSOP_DECNAME:  op = JSOP_DECARG; break;
+              case JSOP_NAMEDEC:  op = JSOP_ARGDEC; break;
+              case JSOP_FORNAME:  op = JSOP_FORARG; break;
+              case JSOP_DELNAME:  op = JSOP_FALSE; break;
+              default: JS_NOT_REACHED("arg");
             }
-            pn->pn_op = op;
-            pn->pn_slot = index;
-            return JS_TRUE;
-        }
-        tc->flags |= TCF_FUN_USES_NONLOCALS;
+            pn->pn_const = JS_FALSE;
+        } else {
+            JS_ASSERT(localKind == JSLOCAL_VAR ||
+                      localKind == JSLOCAL_CONST);
+            switch (op) {
+              case JSOP_NAME:     op = JSOP_GETLOCAL; break;
+              case JSOP_SETNAME:  op = JSOP_SETLOCAL; break;
+              case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
+              case JSOP_INCNAME:  op = JSOP_INCLOCAL; break;
+              case JSOP_NAMEINC:  op = JSOP_LOCALINC; break;
+              case JSOP_DECNAME:  op = JSOP_DECLOCAL; break;
+              case JSOP_NAMEDEC:  op = JSOP_LOCALDEC; break;
+              case JSOP_FORNAME:  op = JSOP_FORLOCAL; break;
+              case JSOP_DELNAME:  op = JSOP_FALSE; break;
+              default: JS_NOT_REACHED("local");
+            }
+            pn->pn_const = (localKind == JSLOCAL_CONST);
+        }
+        pn->pn_op = op;
+        pn->pn_slot = index;
+        return JS_TRUE;
     }
+    tc->flags |= TCF_FUN_USES_NONLOCALS;
 
   arguments_check:
     /*
      * Here we either compiling a function body or an eval or debug script
      * inside a function and couldn't optimize pn, so it's not a global or
      * local slot name. We are also outside of any with blocks. Check if we
      * can optimize the predefined arguments variable.
      */
--- a/js/src/jsemit.h
+++ b/js/src/jsemit.h
@@ -183,40 +183,38 @@ struct JSTreeContext {              /* t
     uint16          maxScopeDepth;  /* maximum lexical scope chain depth */
 #endif
 };
 
 #define TCF_IN_FUNCTION        0x01 /* parsing inside function body */
 #define TCF_RETURN_EXPR        0x02 /* function has 'return expr;' */
 #define TCF_RETURN_VOID        0x04 /* function has 'return;' */
 #define TCF_IN_FOR_INIT        0x08 /* parsing init expr of for; exclude 'in' */
-#define TCF_FUN_CLOSURE_VS_VAR 0x10 /* function and var with same name */
+#define TCF_NO_SCRIPT_RVAL     0x10 /* API caller does not want result value
+                                       from global script */
 #define TCF_FUN_USES_NONLOCALS 0x20 /* function refers to non-local names */
 #define TCF_FUN_HEAVYWEIGHT    0x40 /* function needs Call object per call */
 #define TCF_FUN_IS_GENERATOR   0x80 /* parsed yield statement in function */
 #define TCF_HAS_DEFXMLNS      0x100 /* default xml namespace = ...; parsed */
 #define TCF_HAS_FUNCTION_STMT 0x200 /* block contains a function statement */
 #define TCF_GENEXP_LAMBDA     0x400 /* flag lambda from generator expression */
 #define TCF_COMPILE_N_GO      0x800 /* compiler-and-go mode of script, can
                                        optimize name references based on scope
                                        chain */
-#define TCF_NO_SCRIPT_RVAL   0x1000 /* API caller does not want result value
-                                       from global script */
 /*
  * Flags to propagate out of the blocks.
  */
 #define TCF_RETURN_FLAGS        (TCF_RETURN_EXPR | TCF_RETURN_VOID)
 
 /*
  * Flags to propagate from FunctionBody.
  */
 #define TCF_FUN_FLAGS           (TCF_FUN_IS_GENERATOR   |                     \
                                  TCF_FUN_HEAVYWEIGHT    |                     \
-                                 TCF_FUN_USES_NONLOCALS |                     \
-                                 TCF_FUN_CLOSURE_VS_VAR)
+                                 TCF_FUN_USES_NONLOCALS)
 
 /*
  * Flags field, not stored in JSTreeContext.flags, for passing staticDepth
  * into js_CompileScript.
  */
 #define TCF_STATIC_DEPTH_MASK   0xffff0000
 #define TCF_GET_STATIC_DEPTH(f) ((uint32)(f) >> 16)
 #define TCF_PUT_STATIC_DEPTH(d) ((uint16)(d) << 16)
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -879,17 +879,22 @@ call_resolve(JSContext *cx, JSObject *ob
         return JS_TRUE;
 
     if (!js_ValueToStringId(cx, idval, &id))
         return JS_FALSE;
 
     localKind = js_LookupLocal(cx, fun, JSID_TO_ATOM(id), &slot);
     if (localKind != JSLOCAL_NONE) {
         JS_ASSERT((uint16) slot == slot);
-        attrs = JSPROP_PERMANENT | JSPROP_SHARED;
+
+        /*
+         * We follow 10.2.3 of ECMA 262 v3 and make argument and variable
+         * properties of the Call objects enumerable.
+         */
+        attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED;
         if (localKind == JSLOCAL_ARG) {
             JS_ASSERT(slot < fun->nargs);
             getter = js_GetCallArg;
             setter = SetCallArg;
         } else {
             JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
             JS_ASSERT(slot < fun->u.i.nvars);
             getter = js_GetCallVar;
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -5709,16 +5709,23 @@ js_Interpret(JSContext *cx)
                     fp->slots[index] = INT_TO_JSVAL(sprop->slot);
                 }
             }
 
             OBJ_DROP_PROPERTY(cx, obj2, prop);
           END_CASE(JSOP_DEFVAR)
 
           BEGIN_CASE(JSOP_DEFFUN)
+          {
+            JSPropertyOp getter, setter;
+            bool doSet;
+            JSObject *pobj;
+            JSProperty *prop;
+            uint32 old;
+
             /*
              * A top-level function defined in Global or Eval code (see
              * ECMA-262 Ed. 3), or else a SpiderMonkey extension: a named
              * function statement in a compound statement (not at the top
              * statement level of global code, or at the top level of a
              * function body).
              */
             LOAD_FUNCTION(0);
@@ -5747,37 +5754,44 @@ js_Interpret(JSContext *cx)
                     goto error;
             }
 
             /*
              * Protect obj from any GC hiding below OBJ_DEFINE_PROPERTY.  All
              * paths from here must flow through the "Restore fp->scopeChain"
              * code below the OBJ_DEFINE_PROPERTY call.
              */
-            MUST_FLOW_THROUGH("restore");
+            MUST_FLOW_THROUGH("restore_scope");
             fp->scopeChain = obj;
             rval = OBJECT_TO_JSVAL(obj);
 
             /*
              * ECMA requires functions defined when entering Eval code to be
              * impermanent.
              */
             attrs = (fp->flags & JSFRAME_EVAL)
                     ? JSPROP_ENUMERATE
                     : JSPROP_ENUMERATE | JSPROP_PERMANENT;
 
             /*
              * Load function flags that are also property attributes.  Getters
              * and setters do not need a slot, their value is stored elsewhere
              * in the property itself, not in obj slots.
              */
+            setter = getter = JS_PropertyStub;
             flags = JSFUN_GSFLAG2ATTR(fun->flags);
             if (flags) {
+                /* Function cannot be both getter a setter. */
+                JS_ASSERT(flags == JSPROP_GETTER || flags == JSPROP_SETTER);
                 attrs |= flags | JSPROP_SHARED;
                 rval = JSVAL_VOID;
+                if (flags == JSPROP_GETTER)
+                    getter = JS_EXTENSION (JSPropertyOp) obj;
+                else
+                    setter = JS_EXTENSION (JSPropertyOp) obj;
             }
 
             /*
              * We define the function as a property of the variable object and
              * not the current scope chain even for the case of function
              * expression statements and functions defined by eval inside let
              * or with blocks.
              */
@@ -5786,43 +5800,64 @@ js_Interpret(JSContext *cx)
 
             /*
              * Check for a const property of the same name -- or any kind
              * of property if executing with the strict option.  We check
              * here at runtime as well as at compile-time, to handle eval
              * as well as multiple HTML script tags.
              */
             id = ATOM_TO_JSID(fun->atom);
-            ok = js_CheckRedeclaration(cx, parent, id, attrs, NULL, NULL);
-            if (ok) {
-                if (attrs == JSPROP_ENUMERATE) {
-                    JS_ASSERT(fp->flags & JSFRAME_EVAL);
-                    ok = OBJ_SET_PROPERTY(cx, parent, id, &rval);
-                } else {
-                    JS_ASSERT(attrs & JSPROP_PERMANENT);
-
-                    ok = OBJ_DEFINE_PROPERTY(cx, parent, id, rval,
-                                             (flags & JSPROP_GETTER)
-                                             ? JS_EXTENSION (JSPropertyOp) obj
-                                             : JS_PropertyStub,
-                                             (flags & JSPROP_SETTER)
-                                             ? JS_EXTENSION (JSPropertyOp) obj
-                                             : JS_PropertyStub,
-                                             attrs,
-                                             NULL);
+            prop = NULL;
+            ok = js_CheckRedeclaration(cx, parent, id, attrs, &pobj, &prop);
+            if (!ok)
+                goto restore_scope;
+
+            /*
+             * We deviate from 10.1.2 in ECMA 262 v3 and under eval use for
+             * function declarations OBJ_SET_PROPERTY, not OBJ_DEFINE_PROPERTY,
+             * to preserve the JSOP_PERMANENT attribute of existing properties
+             * and make sure that such properties cannot be deleted.
+             *
+             * We also use OBJ_SET_PROPERTY for the existing properties of
+             * Call objects with matching attributes to preserve the native
+             * getters and setters that store the value of the property in the
+             * interpreter frame, see bug 467495.
+             */
+            doSet = (attrs == JSPROP_ENUMERATE);
+            JS_ASSERT_IF(doSet, fp->flags & JSFRAME_EVAL);
+            if (prop) {
+                if (parent == pobj &&
+                    OBJ_GET_CLASS(cx, parent) == &js_CallClass &&
+                    (old = ((JSScopeProperty *) prop)->attrs,
+                     !(old & (JSPROP_GETTER|JSPROP_SETTER)) &&
+                     (old & (JSPROP_ENUMERATE|JSPROP_PERMANENT)) == attrs)) {
+                    /*
+                     * js_CheckRedeclaration must reject attempts to add a
+                     * getter or setter to an existing property without a
+                     * getter or setter.
+                     */
+                    JS_ASSERT(!(attrs & ~(JSPROP_ENUMERATE|JSPROP_PERMANENT)));
+                    JS_ASSERT(!(old & JSPROP_READONLY));
+                    doSet = JS_TRUE;
                 }
+                OBJ_DROP_PROPERTY(cx, pobj, prop);
             }
-
+            ok = doSet
+                 ? OBJ_SET_PROPERTY(cx, parent, id, &rval)
+                 : OBJ_DEFINE_PROPERTY(cx, parent, id, rval, getter, setter,
+                                       attrs, NULL);
+
+          restore_scope:
             /* Restore fp->scopeChain now that obj is defined in fp->varobj. */
-            MUST_FLOW_LABEL(restore)
             fp->scopeChain = obj2;
             if (!ok) {
                 cx->weakRoots.newborn[GCX_OBJECT] = NULL;
                 goto error;
             }
+          }
           END_CASE(JSOP_DEFFUN)
 
           BEGIN_CASE(JSOP_DEFLOCALFUN)
             LOAD_FUNCTION(SLOTNO_LEN);
 
             /*
              * Define a local function (i.e., one nested at the top level of
              * another function), parented by the current scope chain, and
--- a/js/src/jsparse.cpp
+++ b/js/src/jsparse.cpp
@@ -1162,18 +1162,16 @@ FunctionDef(JSContext *cx, JSTokenStream
                                                  ? js_function_str
                                                  : (prevop == JSOP_DEFCONST)
                                                  ? js_const_str
                                                  : js_var_str,
                                                  name)) {
                     return NULL;
                 }
             }
-            if (!AT_TOP_LEVEL(tc) && prevop == JSOP_DEFVAR)
-                tc->flags |= TCF_FUN_CLOSURE_VS_VAR;
         } else {
             ale = js_IndexAtom(cx, funAtom, &tc->decls);
             if (!ale)
                 return NULL;
         }
         ALE_SET_JSOP(ale, JSOP_DEFFUN);
 
         /*
@@ -1640,18 +1638,16 @@ BindVarOrConst(JSContext *cx, BindData *
                                              ? js_function_str
                                              : (prevop == JSOP_DEFCONST)
                                              ? js_const_str
                                              : js_var_str,
                                              name)) {
                 return JS_FALSE;
             }
         }
-        if (op == JSOP_DEFVAR && prevop == JSOP_DEFFUN)
-            tc->flags |= TCF_FUN_CLOSURE_VS_VAR;
     }
     if (!ale) {
         ale = js_IndexAtom(cx, atom, &tc->decls);
         if (!ale)
             return JS_FALSE;
     }
     ALE_SET_JSOP(ale, op);
 
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -67,21 +67,21 @@
 #include "jsscript.h"
 #include "jsdate.h"
 #include "jsstaticcheck.h"
 #include "jstracer.h"
 
 #include "jsautooplen.h"        // generated headers last
 #include "imacros.c.out"
 
-/* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and 
-   the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then 
+/* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
+   the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then
    handle the undefined case properly (bug 457363). */
 #undef JSVAL_IS_BOOLEAN
-#define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0) 
+#define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
 
 /* Use a fake tag to represent boxed values, borrowing from the integer tag
    range since we only use JSVAL_INT to indicate integers. */
 #define JSVAL_BOXED 3
 
 /* Another fake jsval tag, used to distinguish null from object values. */
 #define JSVAL_TNULL 5
 
@@ -235,17 +235,17 @@ static bool did_we_check_sse2 = false;
 #ifdef JS_JIT_SPEW
 bool js_verboseDebug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
 #endif
 
 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
    case cause performance regressions. */
 static Oracle oracle;
 
-/* Blacklists the root peer fragment at a fragment's PC.  This is so blacklisting stays at the 
+/* Blacklists the root peer fragment at a fragment's PC.  This is so blacklisting stays at the
    top of the peer list and not scattered around. */
 void
 js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag, uint32 globalShape);
 
 Tracker::Tracker()
 {
     pagelist = 0;
 }
@@ -344,28 +344,28 @@ static inline bool isInt32(jsval v)
     if (!isNumber(v))
         return false;
     jsdouble d = asNumber(v);
     jsint i;
     return JSDOUBLE_IS_INT(d, i);
 }
 
 /* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */
-static inline uint8 getPromotedType(jsval v) 
+static inline uint8 getPromotedType(jsval v)
 {
     return JSVAL_IS_INT(v) ? JSVAL_DOUBLE : JSVAL_IS_NULL(v) ? JSVAL_TNULL : uint8(JSVAL_TAG(v));
 }
 
 /* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */
 static inline uint8 getCoercedType(jsval v)
 {
     return isInt32(v) ? JSVAL_INT : JSVAL_IS_NULL(v) ? JSVAL_TNULL : uint8(JSVAL_TAG(v));
 }
 
-/* 
+/*
  * Constant seed and accumulate step borrowed from the DJB hash.
  */
 
 #define ORACLE_MASK (ORACLE_SIZE - 1)
 #define FRAGMENT_TABLE_MASK (FRAGMENT_TABLE_SIZE - 1)
 #define HASH_SEED 5381
 
 static inline void
@@ -381,33 +381,33 @@ stackSlotHash(JSContext* cx, unsigned sl
     hash_accum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
     hash_accum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
     hash_accum(h, uintptr_t(slot), ORACLE_MASK);
     return int(h);
 }
 
 JS_REQUIRES_STACK static inline int
 globalSlotHash(JSContext* cx, unsigned slot)
-{    
+{
     uintptr_t h = HASH_SEED;
     JSStackFrame* fp = cx->fp;
 
     while (fp->down)
-        fp = fp->down;        
-
-    hash_accum(h, uintptr_t(fp->script), ORACLE_MASK); 
-    hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))), 
+        fp = fp->down;
+
+    hash_accum(h, uintptr_t(fp->script), ORACLE_MASK);
+    hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))),
                ORACLE_MASK);
     hash_accum(h, uintptr_t(slot), ORACLE_MASK);
     return int(h);
 }
 
 static inline size_t
 hitHash(const void* ip)
-{    
+{
     uintptr_t h = HASH_SEED;
     hash_accum(h, uintptr_t(ip), ORACLE_MASK);
     return size_t(h);
 }
 
 Oracle::Oracle()
 {
     clear();
@@ -417,47 +417,47 @@ Oracle::Oracle()
 int32_t
 Oracle::getHits(const void* ip)
 {
     size_t h = hitHash(ip);
     uint32_t hc = hits[h];
     uint32_t bl = blacklistLevels[h];
 
     /* Clamp ranges for subtraction. */
-    if (bl > 30) 
+    if (bl > 30)
         bl = 30;
     hc &= 0x7fffffff;
-    
+
     return hc - (bl ? (1<<bl) : 0);
 }
 
 /* Fetch and increment the jump-target hit count for the current pc. */
-int32_t 
+int32_t
 Oracle::hit(const void* ip)
 {
     size_t h = hitHash(ip);
     if (hits[h] < 0xffffffff)
         hits[h]++;
-    
+
     return getHits(ip);
 }
 
 /* Reset the hit count for an jump-target and relax the blacklist count. */
-void 
+void
 Oracle::resetHits(const void* ip)
 {
     size_t h = hitHash(ip);
     if (hits[h] > 0)
         hits[h]--;
     if (blacklistLevels[h] > 0)
         blacklistLevels[h]--;
 }
 
 /* Blacklist with saturation. */
-void 
+void
 Oracle::blacklist(const void* ip)
 {
     size_t h = hitHash(ip);
     if (blacklistLevels[h] == 0)
         blacklistLevels[h] = INITIAL_BLACKLIST_LEVEL;
     else if (blacklistLevels[h] < 0xffffffff)
         blacklistLevels[h]++;
 }
@@ -468,17 +468,17 @@ JS_REQUIRES_STACK void
 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
 {
     _globalDontDemote.set(&gc, globalSlotHash(cx, slot));
 }
 
 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
 JS_REQUIRES_STACK bool
 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
-{    
+{
     return _globalDontDemote.get(globalSlotHash(cx, slot));
 }
 
 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
 JS_REQUIRES_STACK void
 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
 {
     _stackDontDemote.set(&gc, stackSlotHash(cx, slot));
@@ -491,53 +491,53 @@ Oracle::isStackSlotUndemotable(JSContext
     return _stackDontDemote.get(stackSlotHash(cx, slot));
 }
 
 /* Clear the oracle. */
 void
 Oracle::clearHitCounts()
 {
     memset(hits, 0, sizeof(hits));
-    memset(blacklistLevels, 0, sizeof(blacklistLevels));    
+    memset(blacklistLevels, 0, sizeof(blacklistLevels));
 }
 
 void
 Oracle::clearDemotability()
 {
     _stackDontDemote.reset();
     _globalDontDemote.reset();
 }
 
-static inline size_t 
+static inline size_t
 fragmentHash(const void *ip, uint32 globalShape)
 {
     uintptr_t h = HASH_SEED;
     hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
     hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
     return size_t(h);
 }
 
 struct VMFragment : public Fragment
 {
-    VMFragment(const void* _ip, uint32 _globalShape) : 
-        Fragment(_ip), 
+    VMFragment(const void* _ip, uint32 _globalShape) :
+        Fragment(_ip),
         next(NULL),
-        globalShape(_globalShape)        
+        globalShape(_globalShape)
     {}
     VMFragment* next;
     uint32 globalShape;
 };
 
 
 static VMFragment*
 getVMFragment(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
 {
     size_t h = fragmentHash(ip, globalShape);
     VMFragment* vf = tm->vmfragments[h];
-    while (vf && 
+    while (vf &&
            ! (vf->globalShape == globalShape &&
               vf->ip == ip)) {
         vf = vf->next;
     }
     return vf;
 }
 
 static Fragment*
@@ -1085,31 +1085,31 @@ js_NativeStackSlots(JSContext *cx, unsig
         fp = fp->down;
         int missing = fp2->fun->nargs - fp2->argc;
         if (missing > 0)
             slots += missing;
     }
     JS_NOT_REACHED("js_NativeStackSlots");
 }
 
-/* 
+/*
  * Capture the type map for the selected slots of the global object and currently pending
- * stack frames. 
+ * stack frames.
  */
 JS_REQUIRES_STACK void
 TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth)
 {
     unsigned ngslots = slots.length();
     uint16* gslots = slots.data();
     setLength(js_NativeStackSlots(cx, callDepth) + ngslots);
     uint8* map = data();
     uint8* m = map;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         uint8 type = getCoercedType(*vp);
-        if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map))) 
+        if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map)))
             type = JSVAL_DOUBLE;
         JS_ASSERT(type != JSVAL_BOXED);
         debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
         JS_ASSERT(uintptr_t(m - map) < length());
         *m++ = type;
     );
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         uint8 type = getCoercedType(*vp);
@@ -1207,17 +1207,17 @@ TraceRecorder::TraceRecorder(JSContext* 
 #ifdef NJ_SOFTFLOAT
     lir = float_filter = new (&gc) SoftFloatFilter(lir);
 #endif
     lir = cse_filter = new (&gc) CseFilter(lir, &gc);
     lir = expr_filter = new (&gc) ExprFilter(lir);
     lir = func_filter = new (&gc) FuncFilter(lir);
     lir->ins0(LIR_start);
 
-    if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment) 
+    if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment)
         lirbuf->state = addName(lir->insParam(0, 0), "state");
 
     lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
     lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
     gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, global)), "gp");
     eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
     eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
@@ -1244,18 +1244,18 @@ TraceRecorder::TraceRecorder(JSContext* 
                            offsetof(JSContext, operationCount));
         }
         guard(false, lir->ins2i(LIR_le, counter, 0), snapshot(TIMEOUT_EXIT));
     }
 
     /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
        is what we expect it to be. */
     if (_anchor && _anchor->exitType == NESTED_EXIT) {
-        LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, 
-                                                offsetof(InterpState, lastTreeExitGuard)), 
+        LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
+                                                offsetof(InterpState, lastTreeExitGuard)),
                                                 "lastTreeExitGuard");
         guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
     }
 }
 
 TreeInfo::~TreeInfo()
 {
     UnstableExit* temp;
@@ -1279,17 +1279,17 @@ TraceRecorder::~TraceRecorder()
         tr = tr->nextRecorderToAbort;
     }
 #endif
     if (fragment) {
         if (wasRootFragment && !fragment->root->code()) {
             JS_ASSERT(!fragment->root->vmprivate);
             delete treeInfo;
         }
-        
+
         if (trashSelf)
             js_TrashTree(cx, fragment->root);
 
         for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
             js_TrashTree(cx, whichTreesToTrash.get(i));
     } else if (wasRootFragment) {
         delete treeInfo;
     }
@@ -1422,18 +1422,18 @@ done:
    execution. */
 void
 TraceRecorder::trackNativeStackUse(unsigned slots)
 {
     if (slots > treeInfo->maxNativeStackSlots)
         treeInfo->maxNativeStackSlots = slots;
 }
 
-/* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of 
-   storing a pointer to them). We now assert instead of type checking, the caller must ensure the 
+/* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of
+   storing a pointer to them). We now assert instead of type checking, the caller must ensure the
    types are compatible. */
 static void
 ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
 {
     unsigned tag = JSVAL_TAG(v);
     switch (type) {
       case JSVAL_INT:
         jsint i;
@@ -1497,23 +1497,23 @@ static bool
 js_ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
 {
     /* We should not be called with a full pool. */
     JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
               MAX_NATIVE_STACK_SLOTS);
 
     /*
      * When the GC runs in js_NewDoubleInRootedValue, it resets
-     * tm->reservedDoublePoolPtr back to tm->reservedDoublePool. 
+     * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
      */
     JSRuntime* rt = cx->runtime;
     uintN gcNumber = rt->gcNumber;
-    jsval* ptr = tm->reservedDoublePoolPtr; 
+    jsval* ptr = tm->reservedDoublePoolPtr;
     while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
-        if (!js_NewDoubleInRootedValue(cx, 0.0, ptr)) 
+        if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
             goto oom;
         if (rt->gcNumber != gcNumber) {
             JS_ASSERT(tm->reservedDoublePoolPtr == tm->reservedDoublePool);
             ptr = tm->reservedDoublePool;
             if (uintN(rt->gcNumber - gcNumber) > uintN(1))
                 goto oom;
             continue;
         }
@@ -1759,17 +1759,17 @@ TraceRecorder::import(LIns* base, ptrdif
         "object", "int", "double", "3", "string", "5", "boolean", "any"
     };
     debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",
                         p, name, typestr[t & 7], t >> 3);)
 #endif
 }
 
 JS_REQUIRES_STACK void
-TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots, 
+TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
                       unsigned callDepth, uint8* typeMap)
 {
     /* If we get a partial list that doesn't have all the types (i.e. recording from a side
        exit that was recorded but we added more global slots later), merge the missing types
        from the entry type map. This is safe because at the loop edge we verify that we
        have compatible types for all globals (entry type and loop edge type match). While
        a different trace of the tree might have had a guard with a different type map for
        these slots we just filled in here (the guard we continue from didn't know about them),
@@ -1966,17 +1966,17 @@ js_IsLoopEdge(jsbytecode* pc, jsbytecode
     switch (*pc) {
       case JSOP_IFEQ:
       case JSOP_IFNE:
         return ((pc + GET_JUMP_OFFSET(pc)) == header);
       case JSOP_IFEQX:
       case JSOP_IFNEX:
         return ((pc + GET_JUMPX_OFFSET(pc)) == header);
       default:
-        JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) || 
+        JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
                   (*pc == JSOP_OR) || (*pc == JSOP_ORX));
     }
     return false;
 }
 
 /* Promote slots if necessary to match the called tree' type map and report error if thats
    impossible. */
 JS_REQUIRES_STACK bool
@@ -1984,36 +1984,36 @@ TraceRecorder::adjustCallerTypes(Fragmen
 {
     uint16* gslots = treeInfo->globalSlots->data();
     unsigned ngslots = treeInfo->globalSlots->length();
     JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
     bool ok = true;
     uint8* map = ti->globalTypeMap();
     uint8* m = map;
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, 
+    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         LIns* i = get(vp);
         bool isPromote = isPromoteInt(i);
-        if (isPromote && *m == JSVAL_DOUBLE) 
+        if (isPromote && *m == JSVAL_DOUBLE)
             lir->insStorei(get(vp), gp_ins, nativeGlobalOffset(vp));
         else if (!isPromote && *m == JSVAL_INT) {
             debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);)
             oracle.markGlobalSlotUndemotable(cx, gslots[n]);
             ok = false;
         }
         ++m;
     );
     JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
     map = ti->stackTypeMap();
     m = map;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
         LIns* i = get(vp);
         bool isPromote = isPromoteInt(i);
         if (isPromote && *m == JSVAL_DOUBLE) {
-            lir->insStorei(get(vp), lirbuf->sp, 
+            lir->insStorei(get(vp), lirbuf->sp,
                            -treeInfo->nativeStackBase + nativeStackOffset(vp));
             /* Aggressively undo speculation so the inner tree will compile if this fails. */
             oracle.markStackSlotUndemotable(cx, unsigned(m - map));
         } else if (!isPromote && *m == JSVAL_INT) {
             debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);)
             ok = false;
             oracle.markStackSlotUndemotable(cx, unsigned(m - map));
         } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
@@ -2138,66 +2138,66 @@ TraceRecorder::snapshot(ExitType exitTyp
 
         /* Now restore the the original pc (after which early returns are ok). */
         MUST_FLOW_LABEL(restore_pc);
         regs->pc = pc - cs.length;
     } else {
         /* If we take a snapshot on a goto, advance to the target address. This avoids inner
            trees returning on a break goto, which the outer recorder then would confuse with
            a break in the outer tree. */
-        if (*pc == JSOP_GOTO) 
+        if (*pc == JSOP_GOTO)
             pc += GET_JUMP_OFFSET(pc);
         else if (*pc == JSOP_GOTOX)
             pc += GET_JUMPX_OFFSET(pc);
     }
     intptr_t ip_adj = ENCODE_IP_ADJ(fp, pc);
 
     JS_STATIC_ASSERT (sizeof(GuardRecord) + sizeof(VMSideExit) < MAX_SKIP_BYTES);
 
     /* Check if we already have a matching side exit. If so use that side exit structure,
        otherwise we have to create our own. */
     VMSideExit** exits = treeInfo->sideExits.data();
     unsigned nexits = treeInfo->sideExits.length();
     if (exitType == LOOP_EXIT) {
         for (unsigned n = 0; n < nexits; ++n) {
             VMSideExit* e = exits[n];
-            if (e->ip_adj == ip_adj && 
+            if (e->ip_adj == ip_adj &&
                 !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) {
                 LIns* data = lir->skip(sizeof(GuardRecord));
                 GuardRecord* rec = (GuardRecord*)data->payload();
                 /* setup guard record structure with shared side exit */
                 memset(rec, 0, sizeof(GuardRecord));
                 VMSideExit* exit = exits[n];
                 rec->exit = exit;
                 exit->addGuard(rec);
                 AUDIT(mergedLoopExits);
                 return data;
             }
         }
     }
 
     if (sizeof(GuardRecord) +
-        sizeof(VMSideExit) + 
+        sizeof(VMSideExit) +
         (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
         /**
          * ::snapshot() is infallible in the sense that callers don't
          * expect errors; but this is a trace-aborting error condition. So
          * mangle the request to consume zero slots, and mark the tree as
          * to-be-trashed. This should be safe as the trace will be aborted
          * before assembly or execution due to the call to
          * trackNativeStackUse above.
          */
         stackSlots = 0;
         ngslots = 0;
         trashSelf = true;
     }
 
     /* We couldn't find a matching side exit, so create our own side exit structure. */
     LIns* data = lir->skip(sizeof(GuardRecord) +
-                           sizeof(VMSideExit) + 
+                           sizeof(VMSideExit) +
                            (stackSlots + ngslots) * sizeof(uint8));
     GuardRecord* rec = (GuardRecord*)data->payload();
     VMSideExit* exit = (VMSideExit*)(rec + 1);
     /* setup guard record structure */
     memset(rec, 0, sizeof(GuardRecord));
     rec->exit = exit;
     /* setup side exit structure */
     memset(exit, 0, sizeof(VMSideExit));
@@ -2247,21 +2247,21 @@ TraceRecorder::guard(bool expected, LIns
  * @param v             Value.
  * @param t             Typemap entry for value.
  * @param stage_val     Outparam for set() address.
  * @param stage_ins     Outparam for set() instruction.
  * @param stage_count   Outparam for set() buffer count.
  * @return              True if types are compatible, false otherwise.
  */
 JS_REQUIRES_STACK bool
-TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins, 
+TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins,
                          unsigned& stage_count)
 {
     if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
-        debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n", 
+        debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n",
                             t,
                             isNumber(v),
                             isPromoteInt(get(&v)),
                             stage_count);)
         if (!isNumber(v))
             return false; /* not a number? type mismatch */
         LIns* i = get(&v);
         /* This is always a type mismatch, we can't close a double to an int. */
@@ -2325,17 +2325,17 @@ TraceRecorder::deduceTypeStability(Fragm
     unsigned ngslots = treeInfo->globalSlots->length();
     uint16* gslots = treeInfo->globalSlots->data();
     JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
 
     if (stable_peer)
         *stable_peer = NULL;
 
     /*
-     * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers 
+     * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers
      * are for calls to set() that will change the exit types.
      */
     bool success;
     unsigned stage_count;
     jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length()));
     LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length()));
 
     /* First run through and see if we can close ourselves - best case! */
@@ -2358,17 +2358,17 @@ TraceRecorder::deduceTypeStability(Fragm
         }
         ++m;
     );
     m = typemap = treeInfo->stackTypeMap();
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
         debug_only_v(printf("%s%d ", vpname, vpnum);)
         if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
             if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
-                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap)); 
+                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
                 demote = true;
             } else {
                 goto checktype_fail_1;
             }
         }
         ++m;
     );
 
@@ -2382,68 +2382,68 @@ checktype_fail_1:
         return true;
     /* If we need to trash, don't bother checking peers. */
     } else if (trashSelf) {
         return false;
     }
 
     demote = false;
 
-    /* At this point the tree is about to be incomplete, so let's see if we can connect to any 
+    /* At this point the tree is about to be incomplete, so let's see if we can connect to any
      * peer fragment that is type stable.
      */
     Fragment* f;
     TreeInfo* ti;
     for (f = root_peer; f != NULL; f = f->peer) {
         debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", f, f->code());)
         if (!f->code())
             continue;
         ti = (TreeInfo*)f->vmprivate;
         /* Don't allow varying stack depths */
         if ((ti->nStackTypes != treeInfo->nStackTypes) ||
-            (ti->typeMap.length() != treeInfo->typeMap.length()) || 
+            (ti->typeMap.length() != treeInfo->typeMap.length()) ||
             (ti->globalSlots->length() != treeInfo->globalSlots->length()))
             continue;
         stage_count = 0;
         success = false;
 
         m = ti->globalTypeMap();
         FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
                 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
                     goto checktype_fail_2;
                 ++m;
             );
-        
+
         m = ti->stackTypeMap();
         FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
                 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
                     goto checktype_fail_2;
                 ++m;
             );
 
         success = true;
 
 checktype_fail_2:
         if (success) {
             /*
-             * There was a successful match.  We don't care about restoring the saved staging, but 
+             * There was a successful match.  We don't care about restoring the saved staging, but
              * we do need to clear the original undemote list.
              */
             for (unsigned i = 0; i < stage_count; i++)
                 set(stage_vals[i], stage_ins[i]);
             if (stable_peer)
                 *stable_peer = f;
             demote = false;
             return false;
         }
     }
 
     /*
-     * If this is a loop trace and it would be stable with demotions, build an undemote list 
-     * and return true.  Our caller should sniff this and trash the tree, recording a new one 
+     * If this is a loop trace and it would be stable with demotions, build an undemote list
+     * and return true.  Our caller should sniff this and trash the tree, recording a new one
      * that will assumedly stabilize.
      */
     if (demote && fragment->kind == LoopTrace) {
         typemap = m = treeInfo->globalTypeMap();
         FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
             if (*m == JSVAL_INT) {
                 JS_ASSERT(isNumber(*vp));
                 if (!isPromoteInt(get(vp)))
@@ -2451,17 +2451,17 @@ checktype_fail_2:
             } else if (*m == JSVAL_DOUBLE) {
                 JS_ASSERT(isNumber(*vp));
                 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
             } else {
                 JS_ASSERT(*m == JSVAL_TAG(*vp));
             }
             m++;
         );
-        
+
         typemap = m = treeInfo->stackTypeMap();
         FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
             if (*m == JSVAL_INT) {
                 JS_ASSERT(isNumber(*vp));
                 if (!isPromoteInt(get(vp)))
                     oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
             } else if (*m == JSVAL_DOUBLE) {
                 JS_ASSERT(isNumber(*vp));
@@ -2503,17 +2503,17 @@ TraceRecorder::compile(JSTraceMonitor* t
     }
     ::compile(fragmento->assm(), fragment);
     if (fragmento->assm()->error() == nanojit::OutOMem)
         return;
     if (fragmento->assm()->error() != nanojit::None) {
         js_BlacklistPC(tm, fragment, treeInfo->globalShape);
         return;
     }
-    if (anchor) 
+    if (anchor)
         fragmento->assm()->patch(anchor);
     JS_ASSERT(fragment->code());
     JS_ASSERT(!fragment->vmprivate);
     if (fragment == fragment->root)
         fragment->vmprivate = treeInfo;
     /* :TODO: windows support */
 #if defined DEBUG && !defined WIN32
     const char* filename = cx->fp->script->filename;
@@ -2522,25 +2522,25 @@ TraceRecorder::compile(JSTraceMonitor* t
             js_FramePCToLineNumber(cx, cx->fp));
     fragmento->labels->add(fragment, sizeof(Fragment), 0, label);
     free(label);
 #endif
     AUDIT(traceCompleted);
 }
 
 static bool
-js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree, 
+js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree,
                          VMSideExit* exit)
 {
     JS_ASSERT(exit->numStackSlots == stableTree->nStackTypes);
 
     /* Must have a matching type unstable exit. */
     if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
         memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) {
-       return false; 
+       return false;
     }
 
     exit->target = stableFrag;
     frago->assm()->patch(exit);
 
     stableTree->dependentTrees.addUnique(exit->from->root);
 
     return true;
@@ -2657,23 +2657,23 @@ TraceRecorder::joinEdgesToEntry(Fragment
             if (!peer->code())
                 continue;
             ti = (TreeInfo*)peer->vmprivate;
             uexit = ti->unstableExits;
             unext = &ti->unstableExits;
             while (uexit != NULL) {
                 bool remove = js_JoinPeersIfCompatible(fragmento, fragment, treeInfo, uexit->exit);
                 JS_ASSERT(!remove || fragment != peer);
-                debug_only_v(if (remove) { 
-                             printf("Joining type-stable trace to target exit %p->%p.\n", 
+                debug_only_v(if (remove) {
+                             printf("Joining type-stable trace to target exit %p->%p.\n",
                                     uexit->fragment, uexit->exit); });
                 if (!remove) {
                     /* See if this exit contains mismatch demotions, which imply trashing a tree.
-                       This is actually faster than trashing the original tree as soon as the 
-                       instability is detected, since we could have compiled a fairly stable 
+                       This is actually faster than trashing the original tree as soon as the
+                       instability is detected, since we could have compiled a fairly stable
                        tree that ran faster with integers. */
                     unsigned stackCount = 0;
                     unsigned globalCount = 0;
                     t1 = treeInfo->stackTypeMap();
                     t2 = getStackTypeMap(uexit->exit);
                     for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {
                         if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
                             stackDemotes[stackCount++] = i;
@@ -2709,18 +2709,18 @@ TraceRecorder::joinEdgesToEntry(Fragment
                 if (remove) {
                     *unext = uexit->next;
                     delete uexit;
                     uexit = *unext;
                 } else {
                     unext = &uexit->next;
                     uexit = uexit->next;
                 }
-            } 
-        } 
+            }
+        }
     }
 
     debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, treeInfo->globalShape);)
 }
 
 /* Emit an always-exit guard and compile the tree (used for break statements. */
 JS_REQUIRES_STACK void
 TraceRecorder::endLoop(JSTraceMonitor* tm)
@@ -2792,17 +2792,17 @@ TraceRecorder::prepareTreeCall(Fragment*
 JS_REQUIRES_STACK void
 TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
 {
     TreeInfo* ti = (TreeInfo*)inner->vmprivate;
     /* Invoke the inner tree. */
     LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
     LIns* ret = lir->insCall(&js_CallTree_ci, args);
     /* Read back all registers, in case the called tree changed any of them. */
-    import(ti, inner_sp_ins, exit->numStackSlots, exit->numGlobalSlots, 
+    import(ti, inner_sp_ins, exit->numStackSlots, exit->numGlobalSlots,
            exit->calldepth, getFullTypeMap(exit));
     /* Restore sp and rp to their original values (we still have them in a register). */
     if (callDepth > 0) {
         lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
         lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
     }
     /* Guard that we come out of the inner tree along the same side exit we came out when
        we called the inner tree at recording time. */
@@ -2815,25 +2815,25 @@ TraceRecorder::emitTreeCall(Fragment* in
 JS_REQUIRES_STACK void
 TraceRecorder::trackCfgMerges(jsbytecode* pc)
 {
     /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
     JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
     jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
     if (sn != NULL) {
         if (SN_TYPE(sn) == SRC_IF) {
-            cfgMerges.add((*pc == JSOP_IFEQ) 
+            cfgMerges.add((*pc == JSOP_IFEQ)
                           ? pc + GET_JUMP_OFFSET(pc)
                           : pc + GET_JUMPX_OFFSET(pc));
-        } else if (SN_TYPE(sn) == SRC_IF_ELSE) 
+        } else if (SN_TYPE(sn) == SRC_IF_ELSE)
             cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
     }
 }
 
-/* Invert the direction of the guard if this is a loop edge that is not 
+/* Invert the direction of the guard if this is a loop edge that is not
    taken (thin loop). */
 JS_REQUIRES_STACK void
 TraceRecorder::flipIf(jsbytecode* pc, bool& cond)
 {
     if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
         switch (*pc) {
           case JSOP_IFEQ:
           case JSOP_IFEQX:
@@ -2848,18 +2848,18 @@ TraceRecorder::flipIf(jsbytecode* pc, bo
           default:
             JS_NOT_REACHED("flipIf");
         }
         /* We are about to walk out of the loop, so terminate it with
            an inverse loop condition. */
         debug_only_v(printf("Walking out of the loop, terminating it anyway.\n");)
         cond = !cond;
         terminate = true;
-        /* If when we get to closeLoop the tree is decided to be type unstable, we need to 
-           reverse this logic because the loop won't be closed after all.  Store the real 
+        /* If when we get to closeLoop the tree is decided to be type unstable, we need to
+           reverse this logic because the loop won't be closed after all.  Store the real
            value of the IP the interpreter expects, so we can use it in our final LIR_x.
          */
         if (*pc == JSOP_IFEQX || *pc == JSOP_IFNEX)
             pc += GET_JUMPX_OFFSET(pc);
         else
             pc += GET_JUMP_OFFSET(pc);
         terminate_ip_adj = ENCODE_IP_ADJ(cx->fp, pc);
     }
@@ -2869,17 +2869,17 @@ TraceRecorder::flipIf(jsbytecode* pc, bo
 JS_REQUIRES_STACK void
 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
 {
     if (x->isconst()) // no need to guard if condition is constant
         return;
     if (*pc == JSOP_IFEQ) {
         flipIf(pc, cond);
         guard(cond, x, BRANCH_EXIT);
-        trackCfgMerges(pc); 
+        trackCfgMerges(pc);
     } else if (*pc == JSOP_IFNE) {
         flipIf(pc, cond);
         guard(cond, x, BRANCH_EXIT);
     }
 }
 
 bool
 TraceRecorder::hasMethod(JSObject* obj, jsid id)
@@ -2962,32 +2962,32 @@ static JS_REQUIRES_STACK bool
 js_DeleteRecorder(JSContext* cx)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Aborting and completing a trace end up here. */
     delete tm->recorder;
     tm->recorder = NULL;
 
-    /* 
+    /*
      * If we ran out of memory, flush the code cache.
      */
     if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem) {
         js_FlushJITCache(cx);
         return false;
     }
 
     return true;
 }
 
 /**
  * Checks whether the shape of the global object has changed.
  */
 static inline bool
-js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj, 
+js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
                           uint32 *shape=NULL, SlotList** slots=NULL)
 {
     if (tm->needFlush) {
         tm->needFlush = JS_FALSE;
         return false;
     }
 
     uint32 globalShape = OBJ_SHAPE(globalObj);
@@ -3032,17 +3032,17 @@ js_CheckGlobalObjectShape(JSContext* cx,
     AUDIT(globalShapeMismatchAtEntry);
     debug_only_v(printf("No global slotlist for global shape %u, flushing cache.\n",
                         globalShape));
     return false;
 }
 
 static JS_REQUIRES_STACK bool
 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
-                 unsigned stackSlots, unsigned ngslots, uint8* typeMap, 
+                 unsigned stackSlots, unsigned ngslots, uint8* typeMap,
                  VMSideExit* expectedInnerExit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(f->root != f || !cx->fp->imacpc);
 
     if (JS_TRACE_MONITOR(cx).prohibitRecording)
         return false;
 
@@ -3261,31 +3261,31 @@ js_SynthesizeFrame(JSContext* cx, const 
     // callee's, including missing arguments. Could we shift everything down to the caller's
     // fp->slots (where vars start) and avoid some of the complexity?
     return (fi.s.spdist - fp->down->script->nfixed) +
            ((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
            script->nfixed;
 }
 
 #ifdef JS_JIT_SPEW
-static void 
+static void
 js_dumpMap(TypeMap const & tm) {
     uint8 *data = tm.data();
     for (unsigned i = 0; i < tm.length(); ++i) {
         printf("typemap[%d] = %c\n", i, typeChar[data[i]]);
     }
 }
 #endif
 
 JS_REQUIRES_STACK bool
-js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, 
+js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer,
               uint32 globalShape, SlotList* globalSlots)
 {
     JS_ASSERT(f->root == f);
-    
+
     /* Avoid recording loops in overlarge scripts. */
     if (cx->fp->script->length >= SCRIPT_PC_ADJ_LIMIT) {
         js_AbortRecording(cx, "script too large");
         return false;
     }
 
     /* Make sure the global type map didn't change on us. */
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
@@ -3321,17 +3321,17 @@ js_RecordTree(JSContext* cx, JSTraceMoni
 
     /* setup the VM-private treeInfo structure for this fragment */
     TreeInfo* ti = new (&gc) TreeInfo(f, globalShape, globalSlots);
 
     /* capture the coerced type of each active slot in the type map */
     ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
     ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
 
-    /* Check for duplicate entry type maps.  This is always wrong and hints at trace explosion 
+    /* Check for duplicate entry type maps.  This is always wrong and hints at trace explosion
        since we are trying to stabilize something without properly connecting peer edges. */
     #ifdef DEBUG
     TreeInfo* ti_other;
     for (Fragment* peer = getLoop(tm, f->root->ip, globalShape); peer != NULL; peer = peer->peer) {
         if (!peer->code() || peer == f)
             continue;
         ti_other = (TreeInfo*)peer->vmprivate;
         JS_ASSERT(ti_other);
@@ -3346,30 +3346,30 @@ js_RecordTree(JSContext* cx, JSTraceMoni
             (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
     ti->maxNativeStackSlots = entryNativeStackSlots;
     ti->maxCallDepth = 0;
     ti->script = cx->fp->script;
 
     /* recording primary trace */
     if (!js_StartRecorder(cx, NULL, f, ti,
                           ti->nStackTypes,
-                          ti->globalSlots->length(), 
+                          ti->globalSlots->length(),
                           ti->typeMap.data(), NULL, outer)) {
         return false;
     }
 
     return true;
 }
 
-JS_REQUIRES_STACK static inline bool 
+JS_REQUIRES_STACK static inline bool
 isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
 {
     if (slot < ti->nStackTypes)
         return oracle.isStackSlotUndemotable(cx, slot);
-    
+
     uint16* gslots = ti->globalSlots->data();
     return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
 JS_REQUIRES_STACK static bool
 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
@@ -3385,34 +3385,34 @@ js_AttemptToStabilizeTree(JSContext* cx,
             oracle.markStackSlotUndemotable(cx, i);
     }
     m = getGlobalTypeMap(exit);
     for (unsigned i = 0; i < exit->numGlobalSlots; i++) {
         if (m[i] == JSVAL_DOUBLE)
             oracle.markGlobalSlotUndemotable(cx, from_ti->globalSlots->data()[i]);
     }
 
-    /* If this exit does not have enough globals, there might exist a peer with more globals that we 
+    /* If this exit does not have enough globals, there might exist a peer with more globals that we
      * can join to.
      */
     bool bound = false;
     for (Fragment* f = from->first; f != NULL; f = f->peer) {
         if (!f->code())
             continue;
         TreeInfo* ti = (TreeInfo*)f->vmprivate;
         JS_ASSERT(exit->numStackSlots == ti->nStackTypes);
         /* Check the minimum number of slots that need to be compared. */
         unsigned checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length());
         m = getFullTypeMap(exit);
         uint8* m2 = ti->typeMap.data();
-        /* Analyze the exit typemap against the peer typemap. 
+        /* Analyze the exit typemap against the peer typemap.
          * Two conditions are important:
          * 1) Typemaps are identical: these peers can be attached.
          * 2) Typemaps do not match, but only contain I->D mismatches.
-         *    In this case, the original tree must be trashed because it 
+         *    In this case, the original tree must be trashed because it
          *    will never connect to any peer.
          */
         bool matched = true;
         bool undemote = false;
         for (uint32 i = 0; i < checkSlots; i++) {
             /* If the types are equal we're okay. */
             if (m[i] == m2[i])
                 continue;
@@ -3422,17 +3422,17 @@ js_AttemptToStabilizeTree(JSContext* cx,
              */
             if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) {
                 undemote = true;
             } else {
                 undemote = false;
                 break;
             }
         }
-        if (matched) {            
+        if (matched) {
             JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
             JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
             /* Capture missing globals on both trees and link the fragments together. */
             if (from != f) {
                 ti->dependentTrees.addUnique(from);
                 ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
             }
             from_ti->typeMap.captureMissingGlobalTypes(cx, *from_ti->globalSlots, from_ti->nStackTypes);
@@ -3470,17 +3470,17 @@ js_AttemptToExtendTree(JSContext* cx, VM
 {
     Fragment* f = anchor->from->root;
     JS_ASSERT(f->vmprivate);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
 
     /* Don't grow trees above a certain size to avoid code explosion due to tail duplication. */
     if (ti->branchCount >= MAX_BRANCHES)
         return false;
-    
+
     Fragment* c;
     if (!(c = anchor->target)) {
         c = JS_TRACE_MONITOR(cx).fragmento->createBranch(anchor, cx->fp->regs->pc);
         c->spawnedFrom = anchor;
         c->parent = f;
         anchor->target = c;
         c->root = f;
     }
@@ -3497,70 +3497,70 @@ js_AttemptToExtendTree(JSContext* cx, VM
         if (exitedFrom == NULL) {
             /* If we are coming straight from a simple side exit, just use that exit's type map
                as starting point. */
             ngslots = anchor->numGlobalSlots;
             stackSlots = anchor->numStackSlots;
             typeMap = getFullTypeMap(anchor);
         } else {
             /* If we side-exited on a loop exit and continue on a nesting guard, the nesting
-               guard (anchor) has the type information for everything below the current scope, 
+               guard (anchor) has the type information for everything below the current scope,
                and the actual guard we exited from has the types for everything in the current
                scope (and whatever it inlined). We have to merge those maps here. */
             VMSideExit* e1 = anchor;
             VMSideExit* e2 = exitedFrom;
             fullMap.add(getStackTypeMap(e1), e1->numStackSlotsBelowCurrentFrame);
             fullMap.add(getStackTypeMap(e2), e2->numStackSlots);
             stackSlots = fullMap.length();
             fullMap.add(getGlobalTypeMap(e2), e2->numGlobalSlots);
             ngslots = e2->numGlobalSlots;
             typeMap = fullMap.data();
-        } 
+        }
         return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots,
                                 ngslots, typeMap, exitedFrom, outer);
     }
     return false;
 }
 
 static JS_REQUIRES_STACK VMSideExit*
-js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, 
+js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
                VMSideExit** innermostNestedGuardp);
 
 static JS_REQUIRES_STACK Fragment*
 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f);
 
 static JS_REQUIRES_STACK bool
 js_CloseLoop(JSContext* cx)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     Fragmento* fragmento = tm->fragmento;
     TraceRecorder* r = tm->recorder;
     JS_ASSERT(fragmento && r);
     bool walkedOutOfLoop = r->walkedOutOfLoop();
-    
+
     if (fragmento->assm()->error()) {
         js_AbortRecording(cx, "Error during recording");
         return false;
     }
 
     bool demote = false;
     Fragment* f = r->getFragment();
     TreeInfo* ti = r->getTreeInfo();
     uint32 globalShape = ti->globalShape;
     SlotList* globalSlots = ti->globalSlots;
     r->closeLoop(tm, demote);
 
-    /* 
+    /*
      * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
      */
     if (!js_DeleteRecorder(cx))
         return false;
 
     /*
-     * If we just walked out of a thin loop, we can't immediately start the 
+     * If we just walked out of a thin loop, we can't immediately start the
      * compiler again here since we didn't return to the loop header.
      */
     if (demote && !walkedOutOfLoop)
         return js_RecordTree(cx, tm, f, NULL, globalShape, globalSlots);
     return false;
 }
 
 JS_REQUIRES_STACK bool
@@ -3581,45 +3581,45 @@ js_RecordLoopEdge(JSContext* cx, TraceRe
     }
     /* If we hit our own loop header, close the loop and compile the trace. */
     if (r->isLoopHeader(cx))
         return js_CloseLoop(cx);
     /* does this branch go to an inner loop? */
     Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc, ti->globalShape);
     Fragment* peer_root = f;
     if (nesting_enabled && f) {
-        
+
         /* Make sure inner tree call will not run into an out-of-memory condition. */
         if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
             !js_ReplenishReservedPool(cx, tm)) {
             js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
-            return false; 
+            return false;
         }
-        
-        /* Make sure the shape of the global object still matches (this might flush 
+
+        /* Make sure the shape of the global object still matches (this might flush
            the JIT cache). */
         JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
         uint32 globalShape = -1;
         SlotList* globalSlots = NULL;
         if (!js_CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
             js_AbortRecording(cx, "Couldn't call inner tree (prep failed)");
             return false;
         }
-        
+
         debug_only_v(printf("Looking for type-compatible peer (%s:%d@%d)\n",
                             cx->fp->script->filename,
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp));)
 
         /* Find an acceptable peer, make sure our types fit. */
         Fragment* empty;
         bool success = false;
 
         f = r->findNestedCompatiblePeer(f, &empty);
-        if (f && f->code()) 
+        if (f && f->code())
             success = r->adjustCallerTypes(f);
 
         if (!success) {
             AUDIT(noCompatInnerTrees);
             debug_only_v(printf("No compatible inner tree (%p).\n", f);)
 
             Fragment* old = getLoop(tm, tm->recorder->getFragment()->root->ip, ti->globalShape);
             if (old == NULL)
@@ -3739,17 +3739,17 @@ TraceRecorder::findNestedCompatiblePeer(
     if (empty)
         *empty = NULL;
     demote = NULL;
 
     tm = &JS_TRACE_MONITOR(cx);
     unsigned int ngslots = treeInfo->globalSlots->length();
     uint16* gslots = treeInfo->globalSlots->data();
 
-    /* We keep a maximum tally - we want to select the peer most likely to work so we don't keep 
+    /* We keep a maximum tally - we want to select the peer most likely to work so we don't keep
      * recording.
      */
     max_demotes = 0;
 
     TreeInfo* ti;
     for (; f != NULL; f = f->peer) {
         if (!f->code()) {
             if (empty)
@@ -3848,17 +3848,17 @@ check_fail:
  * @param cx            Context.
  * @param f             First peer fragment.
  * @param nodemote      If true, will try to find a peer that does not require demotion.
  */
 static JS_REQUIRES_STACK Fragment*
 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f)
 {
     for (; f != NULL; f = f->peer) {
-        if (f->vmprivate == NULL) 
+        if (f->vmprivate == NULL)
             continue;
         debug_only_v(printf("checking vm types %p (ip: %p): ", f, f->ip);)
         if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
             return f;
     }
     return NULL;
 }
 
@@ -3895,17 +3895,17 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     /* Make sure the global object is sane. */
     JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
     /* Make sure our caller replenished the double pool. */
     JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
 
     /* Reserve objects and stack space now, to make leaving the tree infallible. */
     if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
         return NULL;
-    
+
     /* Setup the native global frame. */
     unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
     state.global = (double*)alloca((globalFrameSize+1) * sizeof(double));
 
     /* Setup the native stack frame. */
     double stack_buffer[MAX_NATIVE_STACK_SLOTS];
     state.stackBase = stack_buffer;
     double* entry_sp = &stack_buffer[ti->nativeStackBase/sizeof(double)];
@@ -4089,17 +4089,17 @@ LeaveTree(InterpState& state, VMSideExit
     /* We already synthesized the frames around the innermost guard. Here we just deal
        with additional frames inside the tree we are bailing out from. */
     JS_ASSERT(rp == callstack);
     unsigned calldepth = innermost->calldepth;
     unsigned calldepth_slots = 0;
     for (unsigned n = 0; n < calldepth; ++n) {
         calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
         ++*state.inlineCallCountp;
-#ifdef DEBUG        
+#ifdef DEBUG
         JSStackFrame* fp = cx->fp;
         debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
                             fp->script->filename, js_FramePCToLineNumber(cx, fp),
                             FramePCOffset(fp));)
 #endif
     }
 
     /* Adjust sp and pc relative to the tree we exited from (not the tree we entered into).
@@ -4129,17 +4129,17 @@ LeaveTree(InterpState& state, VMSideExit
     debug_only_v(printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, "
                         "calldepth=%d, cycles=%llu\n",
                         fp->script->filename,
                         js_FramePCToLineNumber(cx, fp),
                         FramePCOffset(fp),
                         js_CodeName[fp->imacpc ? *fp->imacpc : *fp->regs->pc],
                         lr,
                         lr->exitType,
-                        fp->regs->sp - StackBase(fp), 
+                        fp->regs->sp - StackBase(fp),
                         calldepth,
                         cycles));
 
     /* If this trace is part of a tree, later branches might have added additional globals for
        which we don't have any type information available in the side exit. We merge in this
        information from the entry type-map. See also comment in the constructor of TraceRecorder
        why this is always safe to do. */
     TreeInfo* outermostTree = state.outermostTree;
@@ -4191,17 +4191,17 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
 
     /* Is the recorder currently active? */
     if (tm->recorder) {
         jsbytecode* innerLoopHeaderPC = cx->fp->regs->pc;
 
         if (js_RecordLoopEdge(cx, tm->recorder, inlineCallCount))
             return true;
 
-        /* 
+        /*
          * js_RecordLoopEdge will invoke an inner tree if we have a matching one. If we
          * arrive here, that tree didn't run to completion and instead we mis-matched
          * or the inner tree took a side exit other than the loop exit. We are thus
          * no longer guaranteed to be parked on the same loop header js_MonitorLoopEdge
          * was called for. In fact, this might not even be a loop header at all. Hence
          * if the program counter no longer hovers over the inner loop header, return to
          * the interpreter and do not attempt to trigger or record a new tree at this
          * location.
@@ -4211,62 +4211,62 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
     }
     JS_ASSERT(!tm->recorder);
 
     /* Check the pool of reserved doubles (this might trigger a GC). */
     if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
         !js_ReplenishReservedPool(cx, tm)) {
         return false; /* Out of memory, don't try to record now. */
     }
-    
+
     /* Make sure the shape of the global object still matches (this might flush the JIT cache). */
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
     uint32 globalShape = -1;
     SlotList* globalSlots = NULL;
-    
+
     if (!js_CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
         js_FlushJITCache(cx);
-    
+
     jsbytecode* pc = cx->fp->regs->pc;
 
-    if (oracle.getHits(pc) >= 0 && 
+    if (oracle.getHits(pc) >= 0 &&
         oracle.getHits(pc)+1 < HOTLOOP) {
         oracle.hit(pc);
         return false;
     }
 
     Fragment* f = getLoop(tm, pc, globalShape);
     if (!f)
         f = getAnchor(tm, pc, globalShape);
 
     if (!f) {
         js_FlushJITCache(cx);
         return false;
     }
 
-    /* If we have no code in the anchor and no peers, we definitively won't be able to 
+    /* If we have no code in the anchor and no peers, we definitively won't be able to
        activate any trees so, start compiling. */
     if (!f->code() && !f->peer) {
 monitor_loop:
         if (oracle.hit(pc) >= HOTLOOP) {
             /* We can give RecordTree the root peer. If that peer is already taken, it will
                walk the peer list and find us a free slot or allocate a new tree if needed. */
             return js_RecordTree(cx, tm, f->first, NULL, globalShape, globalSlots);
         }
         /* Threshold not reached yet. */
         return false;
     }
-    
+
     debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p, hits=%d)\n",
-                        js_FramePCToLineNumber(cx, cx->fp), 
+                        js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp),
                         f, f->ip, oracle.getHits(f->ip));)
     Fragment* match = js_FindVMCompatiblePeer(cx, f);
     /* If we didn't find a tree that actually matched, keep monitoring the loop. */
-    if (!match) 
+    if (!match)
         goto monitor_loop;
 
     VMSideExit* lr = NULL;
     VMSideExit* innermostNestedGuard = NULL;
 
     lr = js_ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard);
     if (!lr)
         return false;
@@ -4418,27 +4418,27 @@ js_AbortRecording(JSContext* cx, const c
     JS_ASSERT(!f->vmprivate);
     uint32 globalShape = tm->recorder->getTreeInfo()->globalShape;
     js_BlacklistPC(tm, f, globalShape);
     Fragment* outer = tm->recorder->getOuterToBlacklist();
     /* Give outer two chances to stabilize before we start blacklisting. */
     if (outer != NULL && outer->recordAttempts >= 2)
         js_BlacklistPC(tm, outer, globalShape);
 
-    /* 
+    /*
      * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
      */
     if (!js_DeleteRecorder(cx))
         return;
 
     /*
      * If this is the primary trace and we didn't succeed compiling, trash the
      * TreeInfo object.
      */
-    if (!f->code() && (f->root == f)) 
+    if (!f->code() && (f->root == f))
         js_TrashTree(cx, f);
 }
 
 #if defined NANOJIT_IA32
 static bool
 js_CheckForSSE2()
 {
     int features = 0;
@@ -4451,17 +4451,17 @@ js_CheckForSSE2()
         mov features, edx
         popad
     }
 #elif defined __GNUC__
     asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
         "mov $0x01, %%eax\n"
         "cpuid\n"
         "mov %%edx, %0\n"
-        "xchg %%esi, %%ebx\n" 
+        "xchg %%esi, %%ebx\n"
         : "=m" (features)
         : /* We have no inputs */
         : "%eax", "%esi", "%ecx", "%edx"
        );
 #elif defined __SUNPRO_C || defined __SUNPRO_CC
     asm("push %%ebx\n"
         "mov $0x01, %%eax\n"
         "cpuid\n"
@@ -4685,17 +4685,17 @@ TraceRecorder::activeCallOrGlobalSlot(JS
     // Lookup a name in the scope chain, arriving at a property either in the
     // global object or some call object's fp->slots, and import that property
     // into the trace's native stack frame. This could theoretically do *lookup*
     // through the property cache, but there is little performance to be gained
     // by doing so since at trace-execution time the underlying object (call
     // object or global object) will not be consulted at all: the jsval*
     // returned from this function will map (in the tracker) to a LIns* directly
     // defining a slot in the trace's native stack.
-    
+
     JS_ASSERT(obj != globalObj);
 
     JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
     JSObject* obj2;
     JSProperty* prop;
     if (js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop) < 0 || !prop)
         ABORT_TRACE("failed to find name in non-global scope chain");
 
@@ -4725,17 +4725,17 @@ TraceRecorder::activeCallOrGlobalSlot(JS
     if (obj == obj2 && OBJ_GET_CLASS(cx, obj) == &js_CallClass) {
         JSStackFrame* cfp = (JSStackFrame*) JS_GetPrivate(cx, obj);
         if (cfp && FrameInRange(cx->fp, cfp, callDepth)) {
             JSScopeProperty* sprop = (JSScopeProperty*) prop;
             uintN slot = sprop->shortid;
 
             if (setflags && (sprop->attrs & JSPROP_READONLY))
                 ABORT_TRACE("writing to a readonly property");
-            
+
             vp = NULL;
             if (sprop->getter == js_GetCallArg) {
                 JS_ASSERT(slot < cfp->fun->nargs);
                 vp = &cfp->argv[slot];
             } else if (sprop->getter == js_GetCallVar) {
                 JS_ASSERT(slot < cfp->script->nslots);
                 vp = &cfp->slots[slot];
             }
@@ -4792,27 +4792,27 @@ TraceRecorder::alu(LOpcode v, jsdouble v
     if (v == LIR_fadd || v == LIR_fsub) {
         jsdouble r;
         if (v == LIR_fadd)
             r = v0 + v1;
         else
             r = v0 - v1;
         /*
          * Calculate the result of the addition for the current values. If the
-         * value is not within the integer range, don't even try to demote 
+         * value is not within the integer range, don't even try to demote
          * here.
          */
         if (!JSDOUBLE_IS_NEGZERO(r) && (jsint(r) == r) && isPromoteInt(s0) && isPromoteInt(s1)) {
             LIns* d0 = ::demote(lir, s0);
             LIns* d1 = ::demote(lir, s1);
             /*
-             * If the inputs are constant, generate an integer constant for 
+             * If the inputs are constant, generate an integer constant for
              * this operation.
              */
-            if (d0->isconst() && d1->isconst()) 
+            if (d0->isconst() && d1->isconst())
                 return lir->ins1(LIR_i2f, lir->insImm(jsint(r)));
             /*
              * Speculatively generate code that will perform the addition over
              * the integer inputs as an integer addition/subtraction and exit
              * if that fails.
              */
             v = (LOpcode)((int)v & ~LIR64);
             LIns* result = lir->ins2(v, d0, d1);
@@ -4913,37 +4913,37 @@ TraceRecorder::ifop()
         /* Test for boolean is true, negate later if we are testing for false. */
         cond = JSVAL_TO_PSEUDO_BOOLEAN(v) == JS_TRUE;
         x = lir->ins2i(LIR_eq, v_ins, 1);
     } else if (isNumber(v)) {
         jsdouble d = asNumber(v);
         cond = !JSDOUBLE_IS_NaN(d) && d;
         jsdpun u;
         u.d = 0;
-        x = lir->ins2(LIR_and, 
+        x = lir->ins2(LIR_and,
                       lir->ins2(LIR_feq, v_ins, v_ins),
                       lir->ins_eq0(lir->ins2(LIR_feq, v_ins, lir->insImmq(u.u64))));
     } else if (JSVAL_IS_STRING(v)) {
         cond = JSSTRING_LENGTH(JSVAL_TO_STRING(v)) != 0;
         x = lir->ins2(LIR_piand,
-                      lir->insLoad(LIR_ldp, 
-                                   v_ins, 
+                      lir->insLoad(LIR_ldp,
+                                   v_ins,
                                    (int)offsetof(JSString, length)),
                       INS_CONSTPTR(JSSTRING_LENGTH_MASK));
     } else {
         JS_NOT_REACHED("ifop");
         return false;
     }
     flipIf(cx->fp->regs->pc, cond);
     bool expected = cond;
     if (!x->isCond()) {
         x = lir->ins_eq0(x);
         expected = !expected;
     }
-    guard(expected, x, BRANCH_EXIT); 
+    guard(expected, x, BRANCH_EXIT);
     return true;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::switchop()
 {
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
@@ -5209,17 +5209,17 @@ TraceRecorder::equalityHelper(jsval l, j
             args[0] = r_ins, args[1] = cx_ins;
             r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
             r = (r == JSVAL_VOID)
                 ? DOUBLE_TO_JSVAL(cx->runtime->jsNaN)
                 : INT_TO_JSVAL(r == JSVAL_TRUE);
             return equalityHelper(l, r, l_ins, r_ins, negate,
                                   tryBranchAfterCond, rval);
         }
-        
+
         if ((JSVAL_IS_STRING(l) || isNumber(l)) && !JSVAL_IS_PRIMITIVE(r))
             return call_imacro(equality_imacros.any_obj);
         if (!JSVAL_IS_PRIMITIVE(l) && (JSVAL_IS_STRING(r) || isNumber(r)))
             return call_imacro(equality_imacros.obj_any);
 
         l_ins = lir->insImm(0);
         r_ins = lir->insImm(1);
         cond = false;
@@ -5239,17 +5239,17 @@ TraceRecorder::equalityHelper(jsval l, j
      * that.
      */
     if (tryBranchAfterCond && !x->isconst())
         fuseIf(cx->fp->regs->pc + 1, cond, x);
 
     /*
      * We update the stack after the guard. This is safe since the guard bails
      * out at the comparison and the interpreter will therefore re-execute the
-     * comparison. This way the value of the condition doesn't have to be 
+     * comparison. This way the value of the condition doesn't have to be
      * calculated and saved on the stack in most cases.
      */
     set(&rval, x);
     return true;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
@@ -5303,17 +5303,17 @@ TraceRecorder::relational(LOpcode op, bo
             // FALL THROUGH
           case JSVAL_INT:
           case JSVAL_DOUBLE:
           default:
             JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
                            "have been handled at start of method");
             ABORT_TRACE("safety belt");
         }
-    }    
+    }
     if (!JSVAL_IS_NUMBER(r)) {
         LIns* args[] = { r_ins, cx_ins };
         switch (JSVAL_TAG(r)) {
           case JSVAL_BOOLEAN:
             r_ins = lir->insCall(&js_BooleanOrUndefinedToNumber_ci, args);
             break;
           case JSVAL_STRING:
             r_ins = lir->insCall(&js_StringToNumber_ci, args);
@@ -5404,20 +5404,20 @@ TraceRecorder::binary(LOpcode op)
         return call_imacro(binary_imacros.any_obj);
 
     bool intop = !(op & LIR64);
     LIns* a = get(&l);
     LIns* b = get(&r);
 
     bool leftIsNumber = isNumber(l);
     jsdouble lnum = leftIsNumber ? asNumber(l) : 0;
-    
+
     bool rightIsNumber = isNumber(r);
     jsdouble rnum = rightIsNumber ? asNumber(r) : 0;
-    
+
     if ((op >= LIR_sub && op <= LIR_ush) ||  // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
         (op >= LIR_fsub && op <= LIR_fdiv)) { // fsub, fmul, fdiv
         LIns* args[2];
         if (JSVAL_IS_STRING(l)) {
             args[0] = a;
             args[1] = cx_ins;
             a = lir->insCall(&js_StringToNumber_ci, args);
             lnum = js_StringToNumber(cx, JSVAL_TO_STRING(l));
@@ -5869,37 +5869,37 @@ TraceRecorder::guardDenseArrayIndex(JSOb
               exit);
         /* Guard array capacity */
         guard(true,
               lir->ins2(LIR_ult,
                         idx_ins,
                         lir->insLoad(LIR_ldp, dslots_ins, 0 - (int)sizeof(jsval))),
               exit);
     } else {
-        /* If not idx < length, stay on trace (and read value as undefined). */ 
-        LIns* br1 = lir->insBranch(LIR_jf, 
-                                   lir->ins2(LIR_ult, 
-                                             idx_ins, 
+        /* If not idx < length, stay on trace (and read value as undefined). */
+        LIns* br1 = lir->insBranch(LIR_jf,
+                                   lir->ins2(LIR_ult,
+                                             idx_ins,
                                              stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH)),
                                    NULL);
         /* If dslots is NULL, stay on trace (and read value as undefined). */
         LIns* br2 = lir->insBranch(LIR_jt, lir->ins_eq0(dslots_ins), NULL);
         /* If not idx < capacity, stay on trace (and read value as undefined). */
         LIns* br3 = lir->insBranch(LIR_jf,
                                    lir->ins2(LIR_ult,
                                              idx_ins,
                                              lir->insLoad(LIR_ldp, dslots_ins, 0 - (int)sizeof(jsval))),
                                    NULL);
         lir->insGuard(LIR_x, lir->insImm(1), snapshot(exitType));
         LIns* label = lir->ins0(LIR_label);
         br1->target(label);
         br2->target(label);
         br3->target(label);
     }
-    return cond;    
+    return cond;
 }
 
 /*
  * Guard that a computed property access via an element op (JSOP_GETELEM, etc.)
  * does not find an alias to a global variable, or a property without a slot,
  * or a slot-ful property with a getter or setter (depending on op_offset in
  * JSObjectOps). Finally, beware resolve hooks mutating objects. Oh, and watch
  * out for bears too ;-).
@@ -5983,21 +5983,21 @@ TraceRecorder::clearFrameSlotsFromCache(
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_EnterFrame()
 {
     JSStackFrame* fp = cx->fp;
 
     if (++callDepth >= MAX_CALLDEPTH)
         ABORT_TRACE("exceeded maximum call depth");
-    // FIXME: Allow and attempt to inline a single level of recursion until we compile 
+    // FIXME: Allow and attempt to inline a single level of recursion until we compile
     //        recursive calls as independent trees (459301).
     if (fp->script == fp->down->script && fp->down->down && fp->down->down->script == fp->script)
         ABORT_TRACE("recursive call");
-    
+
     debug_only_v(printf("EnterFrame %s, callDepth=%d\n",
                         js_AtomToPrintableString(cx, cx->fp->fun->atom),
                         callDepth);)
     debug_only_v(
         js_Disassemble(cx, cx->fp->script, JS_TRUE, stdout);
         printf("----\n");)
     LIns* void_ins = INS_CONST(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
 
@@ -6317,29 +6317,29 @@ TraceRecorder::record_JSOP_MOD()
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NOT()
 {
     jsval& v = stackval(-1);
     if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
         set(&v, lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)));
         return true;
-    } 
+    }
     if (isNumber(v)) {
         LIns* v_ins = get(&v);
         set(&v, lir->ins2(LIR_or, lir->ins2(LIR_feq, v_ins, lir->insImmq(0)),
                                   lir->ins_eq0(lir->ins2(LIR_feq, v_ins, v_ins))));
         return true;
-    } 
+    }
     if (JSVAL_TAG(v) == JSVAL_OBJECT) {
         set(&v, lir->ins_eq0(get(&v)));
         return true;
     }
     JS_ASSERT(JSVAL_IS_STRING(v));
-    set(&v, lir->ins_eq0(lir->ins2(LIR_piand, 
+    set(&v, lir->ins_eq0(lir->ins2(LIR_piand,
                                    lir->insLoad(LIR_ldp, get(&v), (int)offsetof(JSString, length)),
                                    INS_CONSTPTR(JSSTRING_LENGTH_MASK))));
     return true;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BITNOT()
 {
@@ -6353,17 +6353,17 @@ TraceRecorder::record_JSOP_NEG()
 
     if (!JSVAL_IS_PRIMITIVE(v))
         return call_imacro(unary_imacros.sign);
 
     if (isNumber(v)) {
         LIns* a = get(&v);
 
         /* If we're a promoted integer, we have to watch out for 0s since -0 is a double.
-           Only follow this path if we're not an integer that's 0 and we're not a double 
+           Only follow this path if we're not an integer that's 0 and we're not a double
            that's zero.
          */
         if (isPromoteInt(a) &&
             (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) &&
             (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) &&
             -asNumber(v) == (int)-asNumber(v)) {
             a = lir->ins1(LIR_neg, ::demote(lir, a));
             lir->insGuard(LIR_xt, lir->ins1(LIR_ov, a), snapshot(OVERFLOW_EXIT));
@@ -6581,17 +6581,17 @@ TraceRecorder::functionCall(bool constru
             } else if (argtype == 'f') {
                 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval));
             } else if (argtype == 'p') {
                 if (!getClassPrototype(JSVAL_TO_OBJECT(fval), *argp))
                     return false;
             } else if (argtype == 'R') {
                 *argp = INS_CONSTPTR(cx->runtime);
             } else if (argtype == 'P') {
-                // FIXME: Set pc to imacpc when recording JSOP_CALL inside the 
+                // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
                 //        JSOP_GETELEM imacro (bug 476559).
                 if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
                     *argp = INS_CONSTPTR(fp->imacpc);
                 else
                     *argp = INS_CONSTPTR(pc);
             } else if (argtype == 'D') {  /* this, as a number */
                 if (!isNumber(tval))
                     goto next_specialization;
@@ -7039,17 +7039,17 @@ JS_DEFINE_TRCINFO_1(GetElement,
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETELEM()
 {
     jsval& idx = stackval(-1);
     jsval& lval = stackval(-2);
 
     LIns* obj_ins = get(&lval);
     LIns* idx_ins = get(&idx);
-    
+
     if (JSVAL_IS_STRING(lval) && JSVAL_IS_INT(idx)) {
         int i = JSVAL_TO_INT(idx);
         if ((size_t)i >= JSSTRING_LENGTH(JSVAL_TO_STRING(lval)))
             ABORT_TRACE("Invalid string index in JSOP_GETELEM");
         idx_ins = makeNumberInt32(idx_ins);
         LIns* args[] = { idx_ins, obj_ins, cx_ins };
         LIns* unitstr_ins = lir->insCall(&js_String_getelem_ci, args);
         guard(false, lir->ins_eq0(unitstr_ins), MISMATCH_EXIT);
@@ -7281,18 +7281,18 @@ TraceRecorder::guardCallee(jsval& callee
     LIns* callee_ins = get(&callee);
 
     /*
      * NB: The following guard guards at runtime that the callee is a
      * function. Even if the given value is an object that doesn't have
      * a private slot, the value we're matching against is not forgeable.
      */
     guard(true,
-          lir->ins2(LIR_eq, 
-                    lir->ins2(LIR_piand, 
+          lir->ins2(LIR_eq,
+                    lir->ins2(LIR_piand,
                               stobj_get_fslot(callee_ins, JSSLOT_PRIVATE),
                               INS_CONSTPTR((void*)(~JSVAL_INT))),
                     INS_CONSTPTR(OBJ_GET_PRIVATE(cx, callee_obj))),
           exit);
     guard(true,
           lir->ins2(LIR_eq,
                     stobj_get_fslot(callee_ins, JSSLOT_PARENT),
                     INS_CONSTPTR(OBJ_GET_PARENT(cx, callee_obj))),
@@ -7384,19 +7384,19 @@ TraceRecorder::record_JSOP_APPLY()
     JSStackFrame* fp = cx->fp;
     jsbytecode *pc = fp->regs->pc;
     uintN argc = GET_ARGC(pc);
     jsval* vp = fp->regs->sp - (argc + 2);
     JS_ASSERT(vp >= StackBase(fp));
     jsuint length = 0;
     JSObject* aobj = NULL;
     LIns* aobj_ins = NULL;
-    
+
     JS_ASSERT(!fp->imacpc);
-    
+
     if (!VALUE_IS_FUNCTION(cx, vp[0]))
         return record_JSOP_CALL();
 
     JSObject* obj = JSVAL_TO_OBJECT(vp[0]);
     JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
     if (FUN_INTERPRETED(fun))
         return record_JSOP_CALL();
 
@@ -7422,57 +7422,57 @@ TraceRecorder::record_JSOP_APPLY()
     if (apply && argc >= 2) {
         if (argc != 2)
             ABORT_TRACE("apply with excess arguments");
         if (JSVAL_IS_PRIMITIVE(vp[3]))
             ABORT_TRACE("arguments parameter of apply is primitive");
         aobj = JSVAL_TO_OBJECT(vp[3]);
         aobj_ins = get(&vp[3]);
 
-        /* 
+        /*
          * We expect a dense array for the arguments (the other
          * frequent case is the arguments object, but that we
-         * don't trace at the moment). 
+         * don't trace at the moment).
          */
         if (!guardDenseArray(aobj, aobj_ins))
             ABORT_TRACE("arguments parameter of apply is not a dense array");
-        
+
         /*
          * We trace only apply calls with a certain number of arguments.
          */
         length = jsuint(aobj->fslots[JSSLOT_ARRAY_LENGTH]);
         if (length >= JS_ARRAY_LENGTH(apply_imacro_table))
             ABORT_TRACE("too many arguments to apply");
-        
+
         /*
          * Make sure the array has the same length at runtime.
          */
-        guard(true, 
+        guard(true,
               lir->ins2i(LIR_eq,
                          stobj_get_fslot(aobj_ins, JSSLOT_ARRAY_LENGTH),
-                         length), 
+                         length),
               BRANCH_EXIT);
-        
+
         return call_imacro(apply_imacro_table[length]);
     }
-    
+
     if (argc >= JS_ARRAY_LENGTH(call_imacro_table))
         ABORT_TRACE("too many arguments to call");
-    
+
     return call_imacro(call_imacro_table[argc]);
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_FastNativeCallComplete()
 {
     JS_ASSERT(pendingTraceableNative);
 
     /* At this point the generated code has already called the native function
        and we can no longer fail back to the original pc location (JSOP_CALL)
-       because that would cause the interpreter to re-execute the native 
+       because that would cause the interpreter to re-execute the native
        function, which might have side effects.
 
        Instead, snapshot(), which is invoked from unbox_jsval() below, will see
        that we are currently parked on a traceable native's JSOP_CALL
        instruction, and it will advance the pc to restore by the length of the
        current opcode.  If the native's return type is jsval, snapshot() will
        also indicate in the type map that the element on top of the stack is a
        boxed value which doesn't need to be boxed if the type guard generated
@@ -7484,22 +7484,22 @@ TraceRecorder::record_FastNativeCallComp
         lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
 #endif
         guard(true,
               lir->ins_eq0(
                   lir->insLoad(LIR_ld, cx_ins, (int) offsetof(JSContext, builtinStatus))),
               STATUS_EXIT);
     }
 
-    JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || 
+    JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL ||
               *cx->fp->regs->pc == JSOP_APPLY);
 
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
-    
+
     bool ok = true;
     if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
         unbox_jsval(v, v_ins);
         set(&v, v_ins);
     } else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
         /* Already added i2f in functionCall. */
         JS_ASSERT(JSVAL_IS_NUMBER(v));
     } else {
@@ -7642,23 +7642,23 @@ TraceRecorder::elem(jsval& oval, jsval& 
 
     /* check that the index is within bounds */
     jsint i = JSVAL_TO_INT(idx);
     LIns* idx_ins = makeNumberInt32(get(&idx));
 
     LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
     if (!guardDenseArrayIndex(obj, i, obj_ins, dslots_ins, idx_ins, BRANCH_EXIT)) {
         LIns* rt_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime));
-        guard(true, 
+        guard(true,
               lir->ins_eq0(lir->insLoad(LIR_ldp, rt_ins,
                                         offsetof(JSRuntime, anyArrayProtoHasElement))),
               MISMATCH_EXIT);
         // Return undefined and indicate that we didn't actually read this (addr_ins).
         v_ins = lir->insImm(JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID));
-        addr_ins = NULL; 
+        addr_ins = NULL;
         return true;
     }
 
     // We can't "see through" a hole to a possible Array.prototype property, so
     // we abort here and guard below (after unboxing).
     vp = &obj->dslots[i];
     if (*vp == JSVAL_HOLE)
         ABORT_TRACE("can't see through hole in dense array");
@@ -8144,29 +8144,29 @@ TraceRecorder::record_JSOP_IN()
         x = lir->insCall(&js_HasNamedPropertyInt32_ci, args);
     } else if (JSVAL_IS_STRING(lval)) {
         if (!js_ValueToStringId(cx, lval, &id))
             ABORT_TRACE("left operand of JSOP_IN didn't convert to a string-id");
         LIns* args[] = { get(&lval), obj_ins, cx_ins };
         x = lir->insCall(&js_HasNamedProperty_ci, args);
     } else {
         ABORT_TRACE("string or integer expected");
-    }        
+    }
 
     guard(false, lir->ins2i(LIR_eq, x, JSVAL_TO_PSEUDO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
     x = lir->ins2i(LIR_eq, x, 1);
 
     JSObject* obj2;
     JSProperty* prop;
     if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop))
         ABORT_TRACE("OBJ_LOOKUP_PROPERTY failed in JSOP_IN");
     bool cond = prop != NULL;
     if (prop)
         OBJ_DROP_PROPERTY(cx, obj2, prop);
-    
+
     /* The interpreter fuses comparisons and the following branch,
        so we have to do that here as well. */
     fuseIf(cx->fp->regs->pc + 1, cond, x);
 
     /* We update the stack after the guard. This is safe since
        the guard bails out at the comparison and the interpreter
        will therefore re-execute the comparison. This way the
        value of the condition doesn't have to be calculated and
@@ -8926,17 +8926,17 @@ TraceRecorder::record_JSOP_GENERATOR()
     return false;
 #if 0
     JSStackFrame* fp = cx->fp;
     if (fp->callobj || fp->argsobj || fp->varobj)
         ABORT_TRACE("can't trace hard-case generator");
 
     // Generate a type map for the outgoing frame and stash it in the LIR
     unsigned stackSlots = js_NativeStackSlots(cx, 0/*callDepth*/);
-    if (stackSlots > MAX_SKIP_BYTES) 
+    if (stackSlots > MAX_SKIP_BYTES)
         ABORT_TRACE("generator requires saving too much stack");
     LIns* data = lir->skip(stackSlots * sizeof(uint8));
     uint8* typemap = (uint8 *)data->payload();
     uint8* m = typemap;
     /* Determine the type of a store by looking at the current type of the actual value the
        interpreter is using. For numbers we have to check what kind of store we used last
        (integer or double) to figure out what the side exit show reflect in its typemap. */
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0/*callDepth*/,