Back out ee940e4debd0:7636c7036e2e (bug 659577) for asserting in testBug550743.js
authorPhil Ringnalda <philringnalda@gmail.com>
Fri, 01 Jun 2012 00:12:22 -0700
changeset 95446 727f3e801afb8d835e9bce67f26e56c8caaaff0f
parent 95445 b6609919552621ba64b704911543905e0246c56e
child 95447 11c6d420c23c96331c82281beba92fd51b12883f
push id10121
push userphilringnalda@gmail.com
push dateFri, 01 Jun 2012 07:14:12 +0000
treeherdermozilla-inbound@727f3e801afb [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs659577, 550743
milestone15.0a1
backs outee940e4debd006dee5a58f3d6a48934580c3eadd
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Back out ee940e4debd0:7636c7036e2e (bug 659577) for asserting in testBug550743.js
js/src/frontend/BytecodeEmitter.cpp
js/src/frontend/BytecodeEmitter.h
js/src/frontend/TreeContext-inl.h
js/src/frontend/TreeContext.h
js/src/gc/Barrier.h
js/src/gc/Marking.h
js/src/jit-test/tests/basic/testAliasedLet.js
js/src/jit-test/tests/basic/testFunApplyOverflow.js
js/src/jit-test/tests/basic/testGeneratorDieButScopeAlive.js
js/src/jit-test/tests/debug/Frame-eval-14.js
js/src/js.msg
js/src/jsanalyze.cpp
js/src/jsanalyze.h
js/src/jsapi-tests/testArgumentsObject.cpp
js/src/jsarray.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jsdbgapi.cpp
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsinterpinlines.h
js/src/jsiter.cpp
js/src/jsiter.h
js/src/jsobj.cpp
js/src/jsopcode.cpp
js/src/jsopcode.h
js/src/jsopcode.tbl
js/src/jsscope.cpp
js/src/jsscope.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsscriptinlines.h
js/src/jsstr.cpp
js/src/jsval.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCalls.h
js/src/shell/js.cpp
js/src/vm/ArgumentsObject-inl.h
js/src/vm/ArgumentsObject.cpp
js/src/vm/ArgumentsObject.h
js/src/vm/Debugger.cpp
js/src/vm/ScopeObject-inl.h
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/Stack-inl.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -800,17 +800,17 @@ EmitFunctionOp(JSContext *cx, JSOp op, u
 {
     return EmitIndex32(cx, op, index, bce);
 }
 
 static bool
 EmitObjectOp(JSContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce)
 {
     JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
-    return EmitIndex32(cx, op, bce->objectList.add(objbox), bce);
+    return EmitIndex32(cx, op, bce->objectList.index(objbox), bce);
 }
 
 static bool
 EmitRegExp(JSContext *cx, uint32_t index, BytecodeEmitter *bce)
 {
     return EmitIndex32(cx, JSOP_REGEXP, index, bce);
 }
 
@@ -828,91 +828,53 @@ EmitUnaliasedVarOp(JSContext *cx, JSOp o
     ptrdiff_t off = EmitN(cx, bce, op, sizeof(uint16_t));
     if (off < 0)
         return false;
     SET_UINT16(bce->code(off), slot);
     return true;
 }
 
 static bool
-EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce)
+EmitAliasedVarOp(JSContext *cx, JSOp op, uint16_t binding, JSAtom *atom, BytecodeEmitter *bce)
 {
     JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
 
-    uint32_t maybeBlockIndex = UINT32_MAX;
-    if (bce->sc->blockChain)
-        maybeBlockIndex = bce->objectList.indexOf(bce->sc->blockChain);
+    /*
+     * XXX This is temporary: bug 659577 will need to compute the number of
+     * cloned block objects to hop over.
+     */
+    uint16_t hops = 0;
+
+    jsatomid atomIndex;
+    if (!bce->makeAtomIndex(atom, &atomIndex))
+        return false;
 
     bool decomposed = js_CodeSpec[op].format & JOF_DECOMPOSE;
     unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + (decomposed ? 1 : 0);
-    JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
 
     ptrdiff_t off = EmitN(cx, bce, op, n);
     if (off < 0)
         return false;
 
     jsbytecode *pc = bce->code(off);
-    SET_UINT16(pc, sc.hops);
+    SET_UINT16(pc, hops);
     pc += sizeof(uint16_t);
-    SET_UINT16(pc, sc.slot);
+    SET_UINT16(pc, binding);
     pc += sizeof(uint16_t);
-    SET_UINT32_INDEX(pc, maybeBlockIndex);
+    SET_UINT32_INDEX(pc, atomIndex);
     return true;
 }
 
-static unsigned
-ClonedBlockDepth(BytecodeEmitter *bce)
-{
-    unsigned clonedBlockDepth = 0;
-    for (StaticBlockObject *b = bce->sc->blockChain; b; b = b->enclosingBlock()) {
-        if (b->needsClone())
-            ++clonedBlockDepth;
-    }
-
-    return clonedBlockDepth;
-}
-
 static bool
 EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
 {
-    /*
-     * The contents of the dynamic scope chain (fp->scopeChain) exactly reflect
-     * the needsClone-subset of the block chain. Use this to determine the
-     * number of ClonedBlockObjects on fp->scopeChain to skip to find the scope
-     * object containing the var to which pn is bound. ALIASEDVAR ops cannot
-     * reach across with scopes so ClonedBlockObjects is the only NestedScope
-     * on the scope chain.
-     */
-    ScopeCoordinate sc;
-    if (JOF_OPTYPE(pn->getOp()) == JOF_QARG) {
-        JS_ASSERT(bce->sc->funIsHeavyweight());
-        sc.hops = ClonedBlockDepth(bce);
-        sc.slot = bce->sc->bindings.argToSlot(pn->pn_cookie.slot());
-    } else {
-        JS_ASSERT(JOF_OPTYPE(pn->getOp()) == JOF_LOCAL || pn->isKind(PNK_FUNCTION));
-        unsigned local = pn->pn_cookie.slot();
-        if (local < bce->sc->bindings.numVars()) {
-            JS_ASSERT(bce->sc->funIsHeavyweight());
-            sc.hops = ClonedBlockDepth(bce);
-            sc.slot = bce->sc->bindings.localToSlot(local);
-        } else {
-            unsigned depth = local - bce->sc->bindings.numVars();
-            unsigned hops = 0;
-            StaticBlockObject *b = bce->sc->blockChain;
-            while (!b->containsVarAtDepth(depth)) {
-                if (b->needsClone())
-                    hops++;
-                b = b->enclosingBlock();
-            }
-            sc.hops = hops;
-            sc.slot = depth - b->stackDepth();
-        }
-    }
-
-    return EmitAliasedVarOp(cx, op, sc, bce);
+    uint16_t binding = JOF_OPTYPE(pn->getOp()) == JOF_QARG
+                       ? bce->sc->bindings.argToBinding(pn->pn_cookie.slot())
+                       : bce->sc->bindings.localToBinding(pn->pn_cookie.slot());
+    return EmitAliasedVarOp(cx, op, binding, pn->atom(), bce);
 }
 
 static bool
 EmitVarOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME));
     JS_ASSERT_IF(pn->isKind(PNK_NAME), JOF_OPTYPE(op) == JOF_QARG || JOF_OPTYPE(op) == JOF_LOCAL);
     JS_ASSERT(!pn->pn_cookie.isFree());
@@ -1065,16 +1027,17 @@ EmitEnterBlock(JSContext *cx, BytecodeEm
     if (depthPlusFixed < 0)
         return false;
 
     for (unsigned i = 0; i < blockObj->slotCount(); i++) {
         Definition *dn = blockObj->maybeDefinitionParseNode(i);
 
         /* Beware the empty destructuring dummy. */
         if (!dn) {
+            JS_ASSERT(i + 1 <= blockObj->slotCount());
             blockObj->setAliased(i, bce->sc->bindingsAccessedDynamically());
             continue;
         }
 
         JS_ASSERT(dn->isDefn());
         JS_ASSERT(unsigned(dn->frameSlot() + depthPlusFixed) < JS_BIT(16));
         dn->pn_cookie.set(dn->pn_cookie.level(), uint16_t(dn->frameSlot() + depthPlusFixed));
 #ifdef DEBUG
@@ -2623,23 +2586,22 @@ frontend::EmitFunctionScript(JSContext *
      */
 
     if (bce->sc->funArgumentsHasLocalBinding()) {
         JS_ASSERT(bce->next() == bce->base());  /* See JSScript::argumentsBytecode. */
         bce->switchToProlog();
         if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0)
             return false;
         if (bce->sc->bindingsAccessedDynamically()) {
-            ScopeCoordinate sc;
-            sc.hops = 0;
-            sc.slot = bce->sc->bindings.localToSlot(bce->sc->argumentsLocal());
-            if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
+            JSAtom *atom = cx->runtime->atomState.argumentsAtom;
+            uint16_t binding = bce->sc->bindings.localToBinding(bce->sc->argumentsLocalSlot());
+            if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, binding, atom, bce))
                 return false;
         } else {
-            if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocal(), bce))
+            if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocalSlot(), bce))
                 return false;
         }
         if (Emit1(cx, bce, JSOP_POP) < 0)
             return false;
         bce->switchToMain();
     }
 
     if (bce->sc->funIsGenerator()) {
@@ -4865,17 +4827,17 @@ EmitFunc(JSContext *cx, BytecodeEmitter 
         bce2.sc->staticLevel = bce->sc->staticLevel + 1;
 
         /* We measured the max scope depth when we parsed the function. */
         if (!EmitFunctionScript(cx, &bce2, pn->pn_body))
             return false;
     }
 
     /* Make the function object a literal in the outer script's pool. */
-    unsigned index = bce->objectList.add(pn->pn_funbox);
+    unsigned index = bce->objectList.index(pn->pn_funbox);
 
     /* Emit a bytecode pointing to the closure object in its immediate. */
     if (pn->getOp() != JSOP_NOP) {
         if (pn->pn_funbox->inGenexpLambda && NewSrcNote(cx, bce, SRC_GENEXP) < 0)
             return false;
 
         return EmitFunctionOp(cx, pn->getOp(), index, bce);
     }
@@ -5798,17 +5760,17 @@ EmitObject(JSContext *cx, BytecodeEmitte
     if (obj) {
         /*
          * The object survived and has a predictable shape: update the original
          * bytecode.
          */
         ObjectBox *objbox = bce->parser->newObjectBox(obj);
         if (!objbox)
             return false;
-        unsigned index = bce->objectList.add(objbox);
+        unsigned index = bce->objectList.index(objbox);
         MOZ_STATIC_ASSERT(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH,
                           "newinit and newobject must have equal length to edit in-place");
         EMIT_UINT32_IN_PLACE(offset, JSOP_NEWOBJECT, uint32_t(index));
     }
 
     return true;
 }
 
@@ -6430,17 +6392,17 @@ frontend::EmitTree(JSContext *cx, Byteco
         break;
 
       case PNK_NUMBER:
         ok = EmitNumberOp(cx, pn->pn_dval, bce);
         break;
 
       case PNK_REGEXP:
         JS_ASSERT(pn->isOp(JSOP_REGEXP));
-        ok = EmitRegExp(cx, bce->regexpList.add(pn->pn_objbox), bce);
+        ok = EmitRegExp(cx, bce->regexpList.index(pn->pn_objbox), bce);
         break;
 
 #if JS_HAS_XML_SUPPORT
       case PNK_ANYNAME:
 #endif
       case PNK_TRUE:
       case PNK_FALSE:
       case PNK_THIS:
@@ -6930,34 +6892,24 @@ frontend::FinishTakingTryNotes(BytecodeE
  * threads, or using multiple global objects, or both, for efficiency.
  *
  * In such cases, naively following ECMA leads to wrongful sharing of RegExp
  * objects, which makes for collisions on the lastIndex property (especially
  * for global regexps) and on any ad-hoc properties.  Also, __proto__ refers to
  * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
  */
 unsigned
-CGObjectList::add(ObjectBox *objbox)
+CGObjectList::index(ObjectBox *objbox)
 {
     JS_ASSERT(!objbox->emitLink);
     objbox->emitLink = lastbox;
     lastbox = objbox;
     return length++;
 }
 
-unsigned
-CGObjectList::indexOf(JSObject *obj)
-{
-    JS_ASSERT(length > 0);
-    unsigned index = length - 1;
-    for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink)
-        index--;
-    return index;
-}
-
 void
 CGObjectList::finish(ObjectArray *array)
 {
     JS_ASSERT(length <= INDEX_LIMIT);
     JS_ASSERT(length == array->length);
 
     js::HeapPtrObject *cursor = array->vector + array->length;
     ObjectBox *objbox = lastbox;
--- a/js/src/frontend/BytecodeEmitter.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -44,18 +44,17 @@ struct TryNode {
 };
 
 struct CGObjectList {
     uint32_t            length;     /* number of emitted so far objects */
     ObjectBox           *lastbox;   /* last emitted object */
 
     CGObjectList() : length(0), lastbox(NULL) {}
 
-    unsigned add(ObjectBox *objbox);
-    unsigned indexOf(JSObject *obj);
+    unsigned index(ObjectBox *objbox);
     void finish(ObjectArray *array);
 };
 
 class GCConstList {
     Vector<Value> list;
   public:
     GCConstList(JSContext *cx) : list(cx) {}
     bool append(Value v) { JS_ASSERT_IF(v.isString(), v.toString()->isAtom()); return list.append(v); }
--- a/js/src/frontend/TreeContext-inl.h
+++ b/js/src/frontend/TreeContext-inl.h
@@ -49,18 +49,17 @@ SharedContext::atBodyLevel()
 }
 
 inline bool
 SharedContext::needStrictChecks() {
     return context->hasStrictOption() || inStrictMode();
 }
 
 inline unsigned
-SharedContext::argumentsLocal() const
-{
+SharedContext::argumentsLocalSlot() const {
     PropertyName *arguments = context->runtime->atomState.argumentsAtom;
     unsigned slot;
     DebugOnly<BindingKind> kind = bindings.lookup(context, arguments, &slot);
     JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
     return slot;
 }
 
 inline
--- a/js/src/frontend/TreeContext.h
+++ b/js/src/frontend/TreeContext.h
@@ -135,17 +135,19 @@ struct SharedContext {
     JSContext       *context;
 
     uint32_t        bodyid;         /* block number of program/function body */
     uint32_t        blockidGen;     /* preincremented block number generator */
 
     StmtInfo        *topStmt;       /* top of statement info stack */
     StmtInfo        *topScopeStmt;  /* top lexical scope statement */
     Rooted<StaticBlockObject *> blockChain;
-                                    /* compile time block scope chain */
+                                    /* compile time block scope chain (NB: one
+                                       deeper than the topScopeStmt/downScope
+                                       chain when in head of let block/expr) */
 
   private:
     RootedFunction  fun_;           /* function to store argument and variable
                                        names when inFunction is set */
     RootedObject    scopeChain_;    /* scope chain object for the script */
 
   public:
     unsigned        staticLevel;    /* static compilation unit nesting level */
@@ -181,17 +183,17 @@ struct SharedContext {
     void setFunIsHeavyweight()              { cxFlags.funIsHeavyweight            = true; }
     void setFunIsGenerator()                { cxFlags.funIsGenerator              = true; }
     void setFunMightAliasLocals()           { cxFlags.funMightAliasLocals         = true; }
     void setFunHasExtensibleScope()         { cxFlags.funHasExtensibleScope       = true; }
     void setFunArgumentsHasLocalBinding()   { cxFlags.funArgumentsHasLocalBinding = true; }
     void setFunDefinitelyNeedsArgsObj()     { JS_ASSERT(cxFlags.funArgumentsHasLocalBinding);
                                               cxFlags.funDefinitelyNeedsArgsObj   = true; }
 
-    unsigned argumentsLocal() const;
+    unsigned argumentsLocalSlot() const;
 
     JSFunction *fun() const {
         JS_ASSERT(inFunction);
         return fun_;
     }
     void setFunction(JSFunction *fun) {
         JS_ASSERT(inFunction);
         fun_ = fun;
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -333,17 +333,16 @@ class EncapsulatedValue
     bool isBoolean() const { return value.isBoolean(); }
     bool isTrue() const { return value.isTrue(); }
     bool isFalse() const { return value.isFalse(); }
     bool isNumber() const { return value.isNumber(); }
     bool isInt32() const { return value.isInt32(); }
     bool isDouble() const { return value.isDouble(); }
     bool isString() const { return value.isString(); }
     bool isObject() const { return value.isObject(); }
-    bool isMagic() const { return value.isMagic(); }
     bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
     bool isGCThing() const { return value.isGCThing(); }
     bool isMarkable() const { return value.isMarkable(); }
 
     bool toBoolean() const { return value.toBoolean(); }
     double toNumber() const { return value.toNumber(); }
     int32_t toInt32() const { return value.toInt32(); }
     double toDouble() const { return value.toDouble(); }
@@ -459,24 +458,16 @@ SlotRangeWriteBarrierPost(JSCompartment 
 static inline const Value *
 Valueify(const EncapsulatedValue *array)
 {
     JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
     JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
     return (const Value *)array;
 }
 
-static inline HeapValue *
-HeapValueify(Value *v)
-{
-    JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
-    JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
-    return (HeapValue *)v;
-}
-
 class HeapSlotArray
 {
     HeapSlot *array;
 
   public:
     HeapSlotArray(HeapSlot *array) : array(array) {}
 
     operator const Value *() const { return Valueify(array); }
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -124,22 +124,16 @@ MarkIdRootRange(JSTracer *trc, size_t le
 /*** Value Marking ***/
 
 void
 MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name);
 
 void
 MarkValueRange(JSTracer *trc, size_t len, EncapsulatedValue *vec, const char *name);
 
-inline void
-MarkValueRange(JSTracer *trc, HeapValue *begin, HeapValue *end, const char *name)
-{
-    return MarkValueRange(trc, end - begin, begin, name);
-}
-
 void
 MarkValueRoot(JSTracer *trc, Value *v, const char *name);
 
 void
 MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name);
 
 inline void
 MarkValueRootRange(JSTracer *trc, Value *begin, Value *end, const char *name)
deleted file mode 100644
--- a/js/src/jit-test/tests/basic/testAliasedLet.js
+++ /dev/null
@@ -1,11 +0,0 @@
-function f() {
-    let (x, y, z) {
-        eval('x = 1; y = 2; z = 3');
-        for (var i = 0; i < 10000; ++i) {
-            assertEq(x, 1);
-            assertEq(y, 2);
-            assertEq(z, 3);
-        }
-    }
-}
-f();
deleted file mode 100644
--- a/js/src/jit-test/tests/basic/testFunApplyOverflow.js
+++ /dev/null
@@ -1,10 +0,0 @@
-function g(x,y) {
-    return x + y;
-}
-
-function f(x) {
-    return g.apply(null, arguments);
-}
-
-for (var i = 0; i < 100; ++i)
-    assertEq(f(i, 1), i+1);
deleted file mode 100644
--- a/js/src/jit-test/tests/basic/testGeneratorDieButScopeAlive.js
+++ /dev/null
@@ -1,16 +0,0 @@
-var g = newGlobal('new-compartment');
-var dbg = new Debugger(g);
-
-
-var hits = 0;
-dbg.onDebuggerStatement = function(frame) {
-    ++hits;
-    frame.older.eval("escaped = function() { return y }");
-}
-
-g.escaped = undefined;
-g.eval("function h() { debugger }");
-g.eval("(function () { var y = 42; h(); yield })().next();");
-assertEq(g.eval("escaped()"), 42);
-gc();
-assertEq(g.eval("escaped()"), 42);
deleted file mode 100644
--- a/js/src/jit-test/tests/debug/Frame-eval-14.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Test the corner case of accessing an unaliased variable of a block
-// while the block is not live.
-
-var g = newGlobal('new-compartment');
-g.eval("function h() { debugger }");
-g.eval("function f() { let (x = 1, y) { (function() { y = 0 })(); h() } }");
-g.eval("var surprise = null");
-
-var dbg = new Debugger(g);
-dbg.onDebuggerStatement = function(hFrame) {
-    var fFrame = hFrame.older;
-    assertEq(fFrame.environment.getVariable('x'), 1);
-    assertEq(fFrame.environment.getVariable('y'), 0);
-    fFrame.eval("surprise = function() { return ++x }");
-    assertEq(g.surprise(), 2);
-}
-g.f();
-assertEq(g.surprise !== null, true);
-
-// Either succeed or throw an error about 'x' not being live
-try {
-    assertEq(g.surprise(), 3);
-} catch (e) {
-    assertEq(e+'', 'Error: x is not live');
-}
--- a/js/src/js.msg
+++ b/js/src/js.msg
@@ -220,17 +220,17 @@ MSG_DEF(JSMSG_RESERVED_SLOT_RANGE,    16
 MSG_DEF(JSMSG_CANT_DECODE_PRINCIPALS, 167, 0, JSEXN_INTERNALERR, "can't decode JSPrincipals")
 MSG_DEF(JSMSG_CANT_SEAL_OBJECT,       168, 1, JSEXN_ERR, "can't seal {0} objects")
 MSG_DEF(JSMSG_TOO_MANY_CATCH_VARS,    169, 0, JSEXN_SYNTAXERR, "too many catch variables")
 MSG_DEF(JSMSG_BAD_XML_MARKUP,         170, 0, JSEXN_SYNTAXERR, "invalid XML markup")
 MSG_DEF(JSMSG_BAD_XML_CHARACTER,      171, 0, JSEXN_SYNTAXERR, "illegal XML character")
 MSG_DEF(JSMSG_BAD_DEFAULT_XML_NAMESPACE,172,0,JSEXN_SYNTAXERR, "invalid default XML namespace")
 MSG_DEF(JSMSG_BAD_XML_NAME_SYNTAX,    173, 0, JSEXN_SYNTAXERR, "invalid XML name")
 MSG_DEF(JSMSG_BRACKET_AFTER_ATTR_EXPR,174, 0, JSEXN_SYNTAXERR, "missing ] after attribute expression")
-MSG_DEF(JSMSG_NESTING_GENERATOR,      175, 0, JSEXN_TYPEERR, "already executing generator")
+MSG_DEF(JSMSG_NESTING_GENERATOR,      175, 1, JSEXN_TYPEERR, "already executing generator {0}")
 MSG_DEF(JSMSG_CURLY_IN_XML_EXPR,      176, 0, JSEXN_SYNTAXERR, "missing } in XML expression")
 MSG_DEF(JSMSG_BAD_XML_NAMESPACE,      177, 1, JSEXN_TYPEERR, "invalid XML namespace {0}")
 MSG_DEF(JSMSG_BAD_XML_ATTR_NAME,      178, 1, JSEXN_TYPEERR, "invalid XML attribute name {0}")
 MSG_DEF(JSMSG_BAD_XML_NAME,           179, 1, JSEXN_TYPEERR, "invalid XML name {0}")
 MSG_DEF(JSMSG_BAD_XML_CONVERSION,     180, 1, JSEXN_TYPEERR, "can't convert {0} to XML")
 MSG_DEF(JSMSG_BAD_XMLLIST_CONVERSION, 181, 1, JSEXN_TYPEERR, "can't convert {0} to XMLList")
 MSG_DEF(JSMSG_BAD_GENERATOR_SEND,     182, 1, JSEXN_TYPEERR, "attempt to send {0} to newborn generator")
 MSG_DEF(JSMSG_NO_ASSIGN_IN_XML_ATTR,  183, 0, JSEXN_SYNTAXERR, "missing = in XML attribute")
--- a/js/src/jsanalyze.cpp
+++ b/js/src/jsanalyze.cpp
@@ -315,21 +315,23 @@ ScriptAnalysis::analyzeBytecode(JSContex
           case JSOP_SETNAME:
           case JSOP_DELNAME:
             usesScopeChain_ = true;
             isInlineable = false;
             break;
 
           case JSOP_GETALIASEDVAR:
           case JSOP_CALLALIASEDVAR:
-          case JSOP_SETALIASEDVAR: {
+          case JSOP_SETALIASEDVAR:
             JS_ASSERT(!isInlineable);
             usesScopeChain_ = true;
+            /* XXX: this can be removed after bug 659577. */
+            if (ScopeCoordinate(pc).binding >= script->nfixed)
+                localsAliasStack_ = true;
             break;
-          }
 
           case JSOP_DEFFUN:
           case JSOP_DEFVAR:
           case JSOP_DEFCONST:
           case JSOP_SETCONST:
             extendsScope_ = true;
             isInlineable = canTrackVars = false;
             break;
@@ -1911,25 +1913,25 @@ ScriptAnalysis::needsArgsObj(NeedsArgsOb
         return needsArgsObj(state, SSAValue::PhiValue(use->offset, use->u.phi));
 
     jsbytecode *pc = script->code + use->offset;
     JSOp op = JSOp(*pc);
 
     if (op == JSOP_POP || op == JSOP_POPN)
         return false;
 
+#ifdef JS_METHODJIT
     /* SplatApplyArgs can read fp->canonicalActualArg(i) directly. */
     if (state.canOptimizeApply && op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) {
-#ifdef JS_METHODJIT
         JS_ASSERT(mjit::IsLowerableFunCallOrApply(pc));
-#endif
         state.haveOptimizedApply = true;
         state.canOptimizeApply = false;
         return false;
     }
+#endif
 
     /* arguments[i] can read fp->canonicalActualArg(i) directly. */
     if (!state.haveOptimizedApply && op == JSOP_GETELEM && use->u.which == 1) {
         state.canOptimizeApply = false;
         return false;
     }
 
     /* arguments.length length can read fp->numActualArgs() directly. */
@@ -1959,21 +1961,18 @@ ScriptAnalysis::needsArgsObj(JSContext *
 {
     JS_ASSERT(script->argumentsHasLocalBinding());
 
     /*
      * Since let variables and dynamic name access are not tracked, we cannot
      * soundly perform this analysis in their presence. Also, debuggers may
      * want to see 'arguments', so assume every arguments object escapes.
      */
-    if (script->bindingsAccessedDynamically || script->numClosedArgs() > 0 ||
-        localsAliasStack() || cx->compartment->debugMode())
-    {
+    if (script->bindingsAccessedDynamically || localsAliasStack() || cx->compartment->debugMode())
         return true;
-    }
 
     unsigned pcOff = script->argumentsBytecode() - script->code;
 
     NeedsArgsObjState state(cx);
     return needsArgsObj(state, SSAValue::PushedValue(pcOff, 0));
 }
 
 CrossSSAValue
--- a/js/src/jsanalyze.h
+++ b/js/src/jsanalyze.h
@@ -358,24 +358,23 @@ static inline uint32_t GetBytecodeSlot(J
       case JSOP_LOCALINC:
       case JSOP_LOCALDEC:
         return LocalSlot(script, GET_SLOTNO(pc));
 
       case JSOP_GETALIASEDVAR:
       case JSOP_CALLALIASEDVAR:
       case JSOP_SETALIASEDVAR:
       {
-        ScopeCoordinate sc(pc);
-        if (StaticBlockObject *block = ScopeCoordinateBlockChain(script, pc))
-            return LocalSlot(script, block->slotToFrameLocal(script, sc.slot));
-        if (script->bindings.slotIsArg(sc.slot))
-            return ArgSlot(script->bindings.slotToArg(sc.slot));
-        return LocalSlot(script, script->bindings.slotToLocal(sc.slot));
+          ScopeCoordinate sc = ScopeCoordinate(pc);
+          return script->bindings.bindingIsArg(sc.binding)
+                 ? ArgSlot(script->bindings.bindingToArg(sc.binding))
+                 : LocalSlot(script, script->bindings.bindingToLocal(sc.binding));
       }
 
+
       case JSOP_THIS:
         return ThisSlot();
 
       default:
         JS_NOT_REACHED("Bad slot opcode");
         return 0;
     }
 }
--- a/js/src/jsapi-tests/testArgumentsObject.cpp
+++ b/js/src/jsapi-tests/testArgumentsObject.cpp
@@ -92,17 +92,17 @@ ExhaustiveTest(const char funcode[])
     EVAL(CALL_CODES[ArgCount], &v);
     ArgumentsObject &argsobj = JSVAL_TO_OBJECT(v)->asArguments();
 
     Value elems[MAX_ELEMS];
 
     for (size_t i = 0; i <= ArgCount; i++) {
         for (size_t j = 0; j <= ArgCount - i; j++) {
             ClearElements(elems);
-            CHECK(argsobj.maybeGetElements(i, j, elems));
+            CHECK(argsobj.getElements(i, j, elems));
             for (size_t k = 0; k < j; k++)
                 CHECK_SAME(elems[k], INT_TO_JSVAL(i + k));
             for (size_t k = j; k < MAX_ELEMS - 1; k++)
                 CHECK_SAME(elems[k], JSVAL_NULL);
             CHECK_SAME(elems[MAX_ELEMS - 1], INT_TO_JSVAL(42));
         }
     }
 
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -394,17 +394,17 @@ GetElement(JSContext *cx, JSObject *obj,
 {
     AssertGreaterThanZero(index);
     if (obj->isDenseArray() && index < obj->getDenseArrayInitializedLength() &&
         !(*vp = obj->getDenseArrayElement(uint32_t(index))).isMagic(JS_ARRAY_HOLE)) {
         *hole = JS_FALSE;
         return JS_TRUE;
     }
     if (obj->isArguments()) {
-        if (obj->asArguments().maybeGetElement(uint32_t(index), vp)) {
+        if (obj->asArguments().getElement(uint32_t(index), vp)) {
             *hole = JS_FALSE;
             return true;
         }
     }
 
     return DoGetElement(cx, obj, index, hole, vp);
 }
 
@@ -433,17 +433,17 @@ GetElements(JSContext *cx, HandleObject 
         for (Value *dst = vp; src < srcend; ++dst, ++src)
             *dst = src->isMagic(JS_ARRAY_HOLE) ? UndefinedValue() : *src;
         return true;
     }
 
     if (aobj->isArguments()) {
         ArgumentsObject &argsobj = aobj->asArguments();
         if (!argsobj.hasOverriddenLength()) {
-            if (argsobj.maybeGetElements(0, length, vp))
+            if (argsobj.getElements(0, length, vp))
                 return true;
         }
     }
 
     return GetElementsSlow(cx, aobj, length, vp);
 }
 
 }
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -989,19 +989,19 @@ JSContext::JSContext(JSRuntime *rt)
 #ifdef JS_METHODJIT
     methodJitEnabled(false),
 #endif
     inferenceEnabled(false),
 #ifdef MOZ_TRACE_JSCALLS
     functionCallback(NULL),
 #endif
     enumerators(NULL),
-    innermostGenerator_(NULL),
 #ifdef DEBUG
     stackIterAssertionEnabled(true),
+    okToAccessUnaliasedBindings(0),
 #endif
     activeCompilations(0)
 {
     PodZero(&link);
 #ifdef JSGC_ROOT_ANALYSIS
     PodArrayZero(thingGCRooters);
 #ifdef DEBUG
     skipGCRooters = NULL;
@@ -1075,34 +1075,36 @@ void
 JSContext::wrapPendingException()
 {
     Value v = getPendingException();
     clearPendingException();
     if (compartment->wrap(this, &v))
         setPendingException(v);
 }
 
-
-void
-JSContext::enterGenerator(JSGenerator *gen)
+JSGenerator *
+JSContext::generatorFor(StackFrame *fp) const
 {
-    JS_ASSERT(!gen->prevGenerator);
-    gen->prevGenerator = innermostGenerator_;
-    innermostGenerator_ = gen;
-}
+    JS_ASSERT(stack.containsSlow(fp));
+    JS_ASSERT(fp->isGeneratorFrame());
+    JS_ASSERT(!fp->isFloatingGenerator());
+    JS_ASSERT(!genStack.empty());
 
-void
-JSContext::leaveGenerator(JSGenerator *gen)
-{
-    JS_ASSERT(innermostGenerator_ == gen);
-    innermostGenerator_ = innermostGenerator_->prevGenerator;
-    gen->prevGenerator = NULL;
+    if (JS_LIKELY(fp == genStack.back()->liveFrame()))
+        return genStack.back();
+
+    /* General case; should only be needed for debug APIs. */
+    for (size_t i = 0; i < genStack.length(); ++i) {
+        if (genStack[i]->liveFrame() == fp)
+            return genStack[i];
+    }
+    JS_NOT_REACHED("no matching generator");
+    return NULL;
 }
 
-
 bool
 JSContext::runningWithTrustedPrincipals() const
 {
     return !compartment || compartment->principals == runtime->trustedPrincipals();
 }
 
 void
 JSRuntime::setGCMaxMallocBytes(size_t value)
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1237,22 +1237,39 @@ struct JSContext : js::ContextFriendFiel
 #endif
 
     DSTOffsetCache dstOffsetCache;
 
     /* List of currently active non-escaping enumerators (for-in). */
     JSObject *enumerators;
 
   private:
-    /* Innermost-executing generator or null if no generator are executing. */
-    JSGenerator *innermostGenerator_;
+    /*
+     * To go from a live generator frame (on the stack) to its generator object
+     * (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
+     * generators, pushing and popping when entering and leaving generator
+     * frames, respectively.
+     */
+    js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
+
   public:
-    JSGenerator *innermostGenerator() const { return innermostGenerator_; }
-    void enterGenerator(JSGenerator *gen);
-    void leaveGenerator(JSGenerator *gen);
+    /* Return the generator object for the given generator frame. */
+    JSGenerator *generatorFor(js::StackFrame *fp) const;
+
+    /* Early OOM-check. */
+    inline bool ensureGeneratorStackSpace();
+
+    bool enterGenerator(JSGenerator *gen) {
+        return genStack.append(gen);
+    }
+
+    void leaveGenerator(JSGenerator *gen) {
+        JS_ASSERT(genStack.back() == gen);
+        genStack.popBack();
+    }
 
     inline void* malloc_(size_t bytes) {
         return runtime->malloc_(bytes, this);
     }
 
     inline void* mallocNoReport(size_t bytes) {
         JS_ASSERT(bytes != 0);
         return runtime->malloc_(bytes, NULL);
@@ -1274,16 +1291,19 @@ struct JSContext : js::ContextFriendFiel
         runtime->free_(p);
     }
 
     JS_DECLARE_NEW_METHODS(malloc_, inline)
     JS_DECLARE_DELETE_METHODS(free_, inline)
 
     void purge();
 
+    /* For DEBUG. */
+    inline void assertValidStackDepth(unsigned depth);
+
     bool isExceptionPending() {
         return throwing;
     }
 
     js::Value getPendingException() {
         JS_ASSERT(throwing);
         return exception;
     }
@@ -1296,16 +1316,22 @@ struct JSContext : js::ContextFriendFiel
     }
 
 #ifdef DEBUG
     /*
      * Controls whether a quadratic-complexity assertion is performed during
      * stack iteration; defaults to true.
      */
     bool stackIterAssertionEnabled;
+
+    /*
+     * When greather than zero, it is ok to accessed non-aliased fields of
+     * ScopeObjects because the accesses are coming from the DebugScopeProxy.
+     */
+    unsigned okToAccessUnaliasedBindings;
 #endif
 
     /*
      * Count of currently active compilations.
      * When there are compilations active for the context, the GC must not
      * purge the ParseMapPool.
      */
     unsigned activeCompilations;
@@ -1332,16 +1358,33 @@ struct JSContext : js::ContextFriendFiel
      * threshold when p is not null. The function takes the pointer and not
      * a boolean flag to minimize the amount of code in its inlined callers.
      */
     JS_FRIEND_API(void) checkMallocGCPressure(void *p);
 }; /* struct JSContext */
 
 namespace js {
 
+class AutoAllowUnaliasedVarAccess
+{
+    JSContext *cx;
+  public:
+    AutoAllowUnaliasedVarAccess(JSContext *cx) : cx(cx) {
+#ifdef DEBUG
+        cx->okToAccessUnaliasedBindings++;
+#endif
+    }
+    ~AutoAllowUnaliasedVarAccess() {
+#ifdef DEBUG
+        JS_ASSERT(cx->okToAccessUnaliasedBindings);
+        cx->okToAccessUnaliasedBindings--;
+#endif
+    }
+};
+
 struct AutoResolving {
   public:
     enum Kind {
         LOOKUP,
         WATCH
     };
 
     AutoResolving(JSContext *cx, JSObject *obj, jsid id, Kind kind = LOOKUP
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -536,23 +536,40 @@ JSContext::setCompileOptions(unsigned ne
     JS_ASSERT((newcopts & JSCOMPILEOPTION_MASK) == newcopts);
     if (JS_LIKELY(getCompileOptions() == newcopts))
         return;
     JSVersion version = findVersion();
     JSVersion newVersion = js::OptionFlagsToVersion(newcopts, version);
     maybeOverrideVersion(newVersion);
 }
 
+inline void
+JSContext::assertValidStackDepth(unsigned depth)
+{
+#ifdef DEBUG
+    JS_ASSERT(0 <= regs().sp - fp()->base());
+    JS_ASSERT(depth <= uintptr_t(regs().sp - fp()->base()));
+#endif
+}
 
 inline js::LifoAlloc &
 JSContext::typeLifoAlloc()
 {
     return compartment->typeLifoAlloc;
 }
 
+inline bool
+JSContext::ensureGeneratorStackSpace()
+{
+    bool ok = genStack.reserve(genStack.length() + 1);
+    if (!ok)
+        js_ReportOutOfMemory(this);
+    return ok;
+}
+
 inline void
 JSContext::setPendingException(js::Value v) {
     JS_ASSERT(!IsPoisonedValue(v));
     this->throwing = true;
     this->exception = v;
     js::assertSameCompartment(this, v);
 }
 
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -560,18 +560,21 @@ JS_GetFrameCallObject(JSContext *cx, JSS
      * in missing scopes, we can expect to find fp's CallObject on 'o'. Note:
      *  - GetDebugScopeForFrame wraps every ScopeObject (missing or not) with
      *    a DebugScopeObject proxy.
      *  - If fp is an eval-in-function, then fp has no callobj of its own and
      *    JS_GetFrameCallObject will return the innermost function's callobj.
      */
     while (o) {
         ScopeObject &scope = o->asDebugScope().scope();
-        if (scope.isCall())
+        if (scope.isCall()) {
+            JS_ASSERT_IF(cx->compartment->debugMode() && fp->isNonEvalFunctionFrame(),
+                         fp == scope.asCall().maybeStackFrame());
             return o;
+        }
         o = o->enclosingScope();
     }
     return NULL;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_GetFrameThis(JSContext *cx, JSStackFrame *fpArg, jsval *thisv)
 {
@@ -797,20 +800,20 @@ GetPropertyDesc(JSContext *cx, JSObject 
 
     if (wasThrowing)
         cx->setPendingException(lastException);
 
     pd->flags |= (shape->enumerable() ? JSPD_ENUMERATE : 0)
               |  (!shape->writable()  ? JSPD_READONLY  : 0)
               |  (!shape->configurable() ? JSPD_PERMANENT : 0);
     pd->spare = 0;
-    if (shape->setter() == CallObject::setArgOp) {
+    if (shape->getter() == CallObject::getArgOp) {
         pd->slot = shape->shortid();
         pd->flags |= JSPD_ARGUMENT;
-    } else if (shape->setter() == CallObject::setVarOp) {
+    } else if (shape->getter() == CallObject::getVarOp) {
         pd->slot = shape->shortid();
         pd->flags |= JSPD_VARIABLE;
     } else {
         pd->slot = 0;
     }
     pd->alias = JSVAL_VOID;
 
     return JS_TRUE;
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -122,17 +122,17 @@ fun_getProperty(JSContext *cx, HandleObj
 
 #ifdef JS_METHODJIT
     if (JSID_IS_ATOM(id, cx->runtime->atomState.callerAtom) && fp && fp->prev()) {
         /*
          * If the frame was called from within an inlined frame, mark the
          * innermost function as uninlineable to expand its frame and allow us
          * to recover its callee object.
          */
-        InlinedSite *inlined;
+        JSInlinedSite *inlined;
         jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
         if (inlined) {
             mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc);
             JSFunction *fun = chunk->inlineFrames()[inlined->inlineIndex].fun;
             fun->script()->uninlineable = true;
             MarkTypeObjectFlags(cx, fun, OBJECT_FLAG_UNINLINEABLE);
         }
     }
@@ -691,17 +691,17 @@ js_fun_apply(JSContext *cx, unsigned arg
         if (!cx->stack.pushInvokeArgs(cx, length, &args))
             return false;
 
         /* Push fval, obj, and aobj's elements as args. */
         args.calleev() = fval;
         args.thisv() = vp[2];
 
         /* Steps 7-8. */
-        cx->fp()->forEachUnaliasedActual(CopyTo(args.array()));
+        cx->fp()->forEachCanonicalActualArg(CopyTo(args.array()));
     } else {
         /* Step 3. */
         if (!vp[3].isObject()) {
             JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_APPLY_ARGS, js_apply_str);
             return false;
         }
 
         /*
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -239,16 +239,19 @@ extern JSFunction *
 js_ValueToFunction(JSContext *cx, const js::Value *vp, unsigned flags);
 
 extern JSObject *
 js_ValueToCallableObject(JSContext *cx, js::Value *vp, unsigned flags);
 
 extern void
 js_ReportIsNotFunction(JSContext *cx, const js::Value *vp, unsigned flags);
 
+extern void
+js_PutCallObject(js::StackFrame *fp, js::CallObject &callobj);
+
 namespace js {
 
 /*
  * Function extended with reserved slots for use by various kinds of functions.
  * Most functions do not have these extensions, but enough are that efficient
  * storage is required (no malloc'ed reserved slots).
  */
 class FunctionExtended : public JSFunction
@@ -270,16 +273,19 @@ JSFunction::toExtended()
 
 inline const js::FunctionExtended *
 JSFunction::toExtended() const
 {
     JS_ASSERT(isExtended());
     return static_cast<const js::FunctionExtended *>(this);
 }
 
+extern void
+js_PutArgsObject(js::StackFrame *fp);
+
 inline bool
 js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; }
 
 namespace js {
 
 template<XDRMode mode>
 bool
 XDRInterpretedFunction(XDRState<mode> *xdr, JSObject **objp, JSScript *parentScript);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3236,17 +3236,17 @@ SweepPhase(JSRuntime *rt, JSGCInvocation
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_START);
         if (rt->gcFinalizeCallback)
             rt->gcFinalizeCallback(&fop, JSFINALIZE_START, !isFull);
     }
 
     /* Finalize unreachable (key,value) pairs in all weak maps. */
     WeakMapBase::sweepAll(&rt->gcMarker);
-    rt->debugScopes->sweep(rt);
+    rt->debugScopes->sweep();
 
     SweepAtomState(rt);
 
     /* Collect watch points associated with unreachable objects. */
     WatchpointMap::sweepAll(rt);
 
     /* Detach unreachable debuggers and global objects from each other. */
     Debugger::sweepAll(&fop);
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -5187,18 +5187,18 @@ NestingPrologue(JSContext *cx, StackFram
          * functions or any of their transitive inner functions.
          */
         if (!ClearActiveNesting(script)) {
             script->reentrantOuterFunction = true;
             MarkTypeObjectFlags(cx, fp->fun(), OBJECT_FLAG_REENTRANT_FUNCTION);
         }
 
         nesting->activeCall = &fp->callObj();
-        nesting->argArray = Valueify(nesting->activeCall->argArray());
-        nesting->varArray = Valueify(nesting->activeCall->varArray());
+        nesting->argArray = fp->formalArgs();
+        nesting->varArray = fp->slots();
     }
 
     /* Maintain stack frame count for the function. */
     nesting->activeFrames++;
 }
 
 void
 NestingEpilogue(StackFrame *fp)
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -18,19 +18,16 @@
 #include "gc/Heap.h"
 #include "js/HashTable.h"
 
 namespace JS {
 struct TypeInferenceSizes;
 }
 
 namespace js {
-
-class CallObject;
-
 namespace types {
 
 /* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */
 struct TypeObjectKey {
     static intptr_t keyBits(TypeObjectKey *obj) { return (intptr_t) obj; }
     static TypeObjectKey *getKey(TypeObjectKey *obj) { return obj; }
 };
 
@@ -959,17 +956,17 @@ struct TypeScriptNesting
 
     /* If this is an outer function, list of inner functions. */
     JSScript *children;
 
     /* Link for children list of parent. */
     JSScript *next;
 
     /* If this is an outer function, the most recent activation. */
-    CallObject *activeCall;
+    JSObject *activeCall;
 
     /*
      * If this is an outer function, pointers to the most recent activation's
      * arguments and variables arrays. These could be referring either to stack
      * values in activeCall's frame (if it has not finished yet) or to the
      * internal slots of activeCall (if the frame has finished). Pointers to
      * these fields can be embedded directly in JIT code (though remember to
      * use 'addDependency == true' when calling resolveNameAccess).
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -218,17 +218,16 @@ NoSuchMethod(JSContext *cx, unsigned arg
 
 bool
 js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
 {
     JS_ASSERT(script);
     JS_ASSERT(fp == cx->fp());
     JS_ASSERT(fp->script() == script);
     JS_ASSERT_IF(!fp->isGeneratorFrame(), cx->regs().pc == script->code);
-    JS_ASSERT_IF(fp->isEvalFrame(), script->isActiveEval);
 #ifdef JS_METHODJIT_SPEW
     JMCheckLogging();
 #endif
 
     JS_CHECK_RECURSION(cx, return false);
 
     /* FIXME: Once bug 470510 is fixed, make this an assert. */
     if (script->compileAndGo) {
@@ -313,21 +312,26 @@ js::InvokeKernel(JSContext *cx, CallArgs
     if (!TypeMonitorCall(cx, args, construct))
         return false;
 
     /* Get pointer to new frame/slots, prepare arguments. */
     InvokeFrameGuard ifg;
     if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg))
         return false;
 
+    /* Now that the new frame is rooted, maybe create a call object. */
+    StackFrame *fp = ifg.fp();
+    if (!fp->functionPrologue(cx))
+        return false;
+
     /* Run function until JSOP_STOP, JSOP_RETURN or error. */
-    JSBool ok = RunScript(cx, fun->script(), ifg.fp());
+    JSBool ok = RunScript(cx, fun->script(), fp);
 
     /* Propagate the return value out. */
-    args.rval() = ifg.fp()->returnValue();
+    args.rval() = fp->returnValue();
     JS_ASSERT_IF(ok && construct, !args.rval().isPrimitive());
     return ok;
 }
 
 bool
 js::Invoke(JSContext *cx, const Value &thisv, const Value &fval, unsigned argc, Value *argv,
            Value *rval)
 {
@@ -444,25 +448,36 @@ js::ExecuteKernel(JSContext *cx, JSScrip
     }
 
     ExecuteFrameGuard efg;
     if (!cx->stack.pushExecuteFrame(cx, script, thisv, scopeChain, type, evalInFrame, &efg))
         return false;
 
     if (!script->ensureRanAnalysis(cx, &scopeChain))
         return false;
-    TypeScript::SetThis(cx, script, efg.fp()->thisValue());
+
+    /* Give strict mode eval its own fresh lexical environment. */
+    StackFrame *fp = efg.fp();
+    if (fp->isStrictEvalFrame() && !CallObject::createForStrictEval(cx, fp))
+        return false;
 
     Probes::startExecution(cx, script);
-    bool ok = RunScript(cx, script, efg.fp());
+
+    TypeScript::SetThis(cx, script, fp->thisValue());
+
+    bool ok = RunScript(cx, script, fp);
+
+    if (fp->isStrictEvalFrame())
+        js_PutCallObject(fp, fp->callObj());
+
     Probes::stopExecution(cx, script);
 
     /* Propgate the return value out. */
     if (result)
-        *result = efg.fp()->returnValue();
+        *result = fp->returnValue();
     return ok;
 }
 
 bool
 js::Execute(JSContext *cx, JSScript *script, JSObject &scopeChainArg, Value *rval)
 {
     /* The scope chain could be anything, so innerize just in case. */
     RootedObject scopeChain(cx, &scopeChainArg);
@@ -675,43 +690,43 @@ js::TypeOfValue(JSContext *cx, const Val
  * of the with block with sp + stackIndex.
  */
 static bool
 EnterWith(JSContext *cx, int stackIndex)
 {
     StackFrame *fp = cx->fp();
     Value *sp = cx->regs().sp;
     JS_ASSERT(stackIndex < 0);
-    JS_ASSERT(int(cx->regs().stackDepth()) + stackIndex >= 0);
+    JS_ASSERT(fp->base() <= sp + stackIndex);
 
     RootedObject obj(cx);
     if (sp[-1].isObject()) {
         obj = &sp[-1].toObject();
     } else {
         obj = js_ValueToNonNullObject(cx, sp[-1]);
         if (!obj)
-            return false;
+            return JS_FALSE;
         sp[-1].setObject(*obj);
     }
 
-    WithObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
-                                             cx->regs().stackDepth() + stackIndex);
+    JSObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
+                                           sp + stackIndex - fp->base());
     if (!withobj)
-        return false;
-
-    fp->pushOnScopeChain(*withobj);
-    return true;
+        return JS_FALSE;
+
+    fp->setScopeChain(*withobj);
+    return JS_TRUE;
 }
 
 /* Unwind block and scope chains to match the given depth. */
 void
 js::UnwindScope(JSContext *cx, uint32_t stackDepth)
 {
     StackFrame *fp = cx->fp();
-    JS_ASSERT(stackDepth <= cx->regs().stackDepth());
+    JS_ASSERT(fp->base() + stackDepth <= cx->regs().sp);
 
     for (ScopeIter si(fp); !si.done(); si = si.enclosing()) {
         switch (si.type()) {
           case ScopeIter::Block:
             if (si.staticBlock().stackDepth() < stackDepth)
                 return;
             fp->popBlock(cx);
             break;
@@ -730,17 +745,17 @@ js::UnwindScope(JSContext *cx, uint32_t 
 void
 js::UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs)
 {
 
     /* c.f. the regular (catchable) TryNoteIter loop in Interpret. */
     for (TryNoteIter tni(regs); !tni.done(); ++tni) {
         JSTryNote *tn = *tni;
         if (tn->kind == JSTRY_ITER) {
-            Value *sp = regs.spForStackDepth(tn->stackDepth);
+            Value *sp = regs.fp()->base() + tn->stackDepth;
             UnwindIteratorForUncatchableException(cx, &sp[-1].toObject());
         }
     }
 }
 
 TryNoteIter::TryNoteIter(const FrameRegs &regs)
   : regs(regs),
     script(regs.fp()->script()),
@@ -790,17 +805,17 @@ TryNoteIter::settle()
          * invoked the finally blocks.
          *
          * To address this, we make [enditer] always decrease the stack even
          * when its implementation throws an exception. Thus already executed
          * [enditer] and [gosub] opcodes will have try notes with the stack
          * depth exceeding the current one and this condition is what we use to
          * filter them out.
          */
-        if (tn->stackDepth <= regs.stackDepth())
+        if (tn->stackDepth <= regs.sp - regs.fp()->base())
             break;
     }
 }
 
 /*
  * Increment/decrement the value 'v'. The resulting value is stored in *slot.
  * The result of the expression (taking into account prefix/postfix) is stored
  * in *expr.
@@ -827,16 +842,66 @@ DoIncDec(JSContext *cx, JSScript *script
     double sum = d + (cs.format & JOF_INC ? 1 : -1);
     *slot = NumberValue(sum);
     *expr = (cs.format & JOF_POST) ? NumberValue(d) : *slot;
 
     TypeScript::MonitorOverflow(cx, script, pc);
     return true;
 }
 
+static inline void
+CheckLocalAccess(StackFrame *fp, unsigned index, bool aliased = false)
+{
+#ifdef DEBUG
+    if (index < fp->numFixed()) {
+        JS_ASSERT(fp->script()->varIsAliased(index) == aliased);
+    } else {
+        unsigned depth = index - fp->numFixed();
+        for (StaticBlockObject *b = fp->maybeBlockChain(); b; b = b->enclosingBlock()) {
+            if (b->containsVarAtDepth(depth)) {
+                JS_ASSERT(b->isAliased(depth - b->stackDepth()) == aliased);
+                return;
+            }
+        }
+        /*
+         * Unfortunately, strange uses of JSOP_GETLOCAL (e.g., comprehensions
+         * and group assignment) access slots above script->nfixed and not in
+         * any block so we cannot use JS_NOT_REACHED here.
+         */
+    }
+#endif
+}
+
+static inline void
+CheckArgAccess(StackFrame *fp, unsigned index)
+{
+    JS_ASSERT(fp->script()->formalLivesInArgumentsObject(index) ==
+              fp->script()->argsObjAliasesFormals());
+}
+
+/*
+ * This function is temporary. Bug 659577 will change all ALIASEDVAR
+ * access to use the scope chain instead.
+ */
+static inline Value &
+AliasedVar(StackFrame *fp, ScopeCoordinate sc)
+{
+    JSScript *script = fp->script();
+#ifdef DEBUG
+    JS_ASSERT(sc.hops == 0);  /* Temporary */
+    if (script->bindings.bindingIsArg(sc.binding))
+        JS_ASSERT(script->formalLivesInCallObject(script->bindings.bindingToArg(sc.binding)));
+    else
+        CheckLocalAccess(fp, script->bindings.bindingToLocal(sc.binding), true);
+#endif
+    return script->bindings.bindingIsArg(sc.binding)
+           ? fp->formalArg(script->bindings.bindingToArg(sc.binding))
+           : fp->localSlot(script->bindings.bindingToLocal(sc.binding));
+}
+
 #define PUSH_COPY(v)             do { *regs.sp++ = v; assertSameCompartment(cx, regs.sp[-1]); } while (0)
 #define PUSH_COPY_SKIP_CHECK(v)  *regs.sp++ = v
 #define PUSH_NULL()              regs.sp++->setNull()
 #define PUSH_UNDEFINED()         regs.sp++->setUndefined()
 #define PUSH_BOOLEAN(b)          regs.sp++->setBoolean(b)
 #define PUSH_DOUBLE(d)           regs.sp++->setDouble(d)
 #define PUSH_INT32(i)            regs.sp++->setInt32(i)
 #define PUSH_STRING(s)           do { regs.sp++->setString(s); assertSameCompartment(cx, regs.sp[-1]); } while (0)
@@ -1105,18 +1170,16 @@ js::Interpret(JSContext *cx, StackFrame 
 # define END_CASE_LEN3      len = 3; goto advance_pc;
 # define END_CASE_LEN4      len = 4; goto advance_pc;
 # define END_CASE_LEN5      len = 5; goto advance_pc;
 # define END_CASE_LEN6      len = 6; goto advance_pc;
 # define END_CASE_LEN7      len = 7; goto advance_pc;
 # define END_CASE_LEN8      len = 8; goto advance_pc;
 # define END_CASE_LEN9      len = 9; goto advance_pc;
 # define END_CASE_LEN10     len = 10; goto advance_pc;
-# define END_CASE_LEN11     len = 11; goto advance_pc;
-# define END_CASE_LEN12     len = 12; goto advance_pc;
 # define END_VARLEN_CASE    goto advance_pc;
 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
 # define END_EMPTY_CASES    goto advance_pc_by_one;
 
 #endif /* !JS_THREADED_INTERP */
 
 #define ENABLE_INTERRUPTS() (interruptEnabler.enableInterrupts())
 
@@ -1169,16 +1232,17 @@ js::Interpret(JSContext *cx, StackFrame 
 
 #define RESET_USE_METHODJIT() ((void) 0)
 
 #endif
 
 #define RESTORE_INTERP_VARS()                                                 \
     JS_BEGIN_MACRO                                                            \
         SET_SCRIPT(regs.fp()->script());                                      \
+        argv = regs.fp()->maybeFormalArgs();                                  \
         atoms = FrameAtomBase(cx, regs.fp());                                 \
         JS_ASSERT(&cx->regs() == &regs);                                      \
     JS_END_MACRO
 
 #define RESTORE_INTERP_VARS_CHECK_EXCEPTION()                                 \
     JS_BEGIN_MACRO                                                            \
         RESTORE_INTERP_VARS();                                                \
         if (cx->isExceptionPending())                                         \
@@ -1231,16 +1295,17 @@ js::Interpret(JSContext *cx, StackFrame 
      * single-step mode.
      */
     InterpreterFrames interpreterFrame(cx, &regs, interruptEnabler);
 
     /* Copy in hot values that change infrequently. */
     JSRuntime *const rt = cx->runtime;
     Rooted<JSScript*> script(cx);
     SET_SCRIPT(regs.fp()->script());
+    Value *argv = regs.fp()->maybeFormalArgs();
     CHECK_INTERRUPT_HANDLER();
 
     /*
      * Pool of rooters for use in this interpreter frame. References to these
      * are used for local variables within interpreter cases. This avoids
      * creating new rooters each time an interpreter case is entered, and also
      * correctness pitfalls due to incorrect compilation of destructor calls
      * around computed gotos.
@@ -1265,18 +1330,18 @@ js::Interpret(JSContext *cx, StackFrame 
      * the atom map to turn frequently executed LOAD_ATOM into simple array
      * access. For less frequent object loads we have to recover the segment
      * from atoms pointer first.
      */
     HeapPtrAtom *atoms = script->atoms;
 
 #if JS_HAS_GENERATORS
     if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) {
-        JS_ASSERT(size_t(regs.pc - script->code) <= script->length);
-        JS_ASSERT(regs.stackDepth() <= script->nslots);
+        JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
+        JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) <= StackDepth(script));
 
         /*
          * To support generator_throw and to catch ignored exceptions,
          * fail if cx->isExceptionPending() is true.
          */
         if (cx->isExceptionPending())
             goto error;
     }
@@ -1284,17 +1349,17 @@ js::Interpret(JSContext *cx, StackFrame 
 
     /* State communicated between non-local jumps: */
     bool interpReturnOK;
 
     /* Don't call the script prologue if executing between Method and Trace JIT. */
     if (interpMode == JSINTERP_NORMAL) {
         StackFrame *fp = regs.fp();
         JS_ASSERT_IF(!fp->isGeneratorFrame(), regs.pc == script->code);
-        if (!fp->isGeneratorFrame() && !fp->prologue(cx, UseNewTypeAtEntry(cx, fp)))
+        if (!ScriptPrologueOrGeneratorResume(cx, fp, UseNewTypeAtEntry(cx, fp)))
             goto error;
         if (cx->compartment->debugMode()) {
             JSTrapStatus status = ScriptDebugPrologue(cx, fp);
             switch (status) {
               case JSTRAP_CONTINUE:
                 break;
               case JSTRAP_RETURN:
                 interpReturnOK = true;
@@ -1519,22 +1584,35 @@ BEGIN_CASE(JSOP_UNDEFINED)
     PUSH_UNDEFINED();
 END_CASE(JSOP_UNDEFINED)
 
 BEGIN_CASE(JSOP_POP)
     regs.sp--;
 END_CASE(JSOP_POP)
 
 BEGIN_CASE(JSOP_POPN)
-    JS_ASSERT(GET_UINT16(regs.pc) <= regs.stackDepth());
+{
     regs.sp -= GET_UINT16(regs.pc);
 #ifdef DEBUG
-    if (StaticBlockObject *block = regs.fp()->maybeBlockChain())
-        JS_ASSERT(regs.stackDepth() >= block->stackDepth() + block->slotCount());
+    JS_ASSERT(regs.fp()->base() <= regs.sp);
+    StaticBlockObject *block = regs.fp()->maybeBlockChain();
+    JS_ASSERT_IF(block,
+                 block->stackDepth() + block->slotCount()
+                 <= (size_t) (regs.sp - regs.fp()->base()));
+    for (JSObject *obj = regs.fp()->scopeChain(); obj; obj = obj->enclosingScope()) {
+        if (!obj->isBlock() || !obj->isWith())
+            continue;
+        if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp()))
+            break;
+        JS_ASSERT(regs.fp()->base() + obj->asBlock().stackDepth()
+                  + (obj->isBlock() ? obj->asBlock().slotCount() : 1)
+                  <= regs.sp);
+    }
 #endif
+}
 END_CASE(JSOP_POPN)
 
 BEGIN_CASE(JSOP_SETRVAL)
 BEGIN_CASE(JSOP_POPV)
     POP_RETURN_VALUE();
 END_CASE(JSOP_POPV)
 
 BEGIN_CASE(JSOP_ENTERWITH)
@@ -1571,22 +1649,24 @@ BEGIN_CASE(JSOP_STOP)
      * false after the inline_return label.
      */
     CHECK_BRANCH();
 
     interpReturnOK = true;
     if (entryFrame != regs.fp())
   inline_return:
     {
+        AssertValidFunctionScopeChainAtExit(regs.fp());
+
         if (cx->compartment->debugMode())
             interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
 
-        regs.fp()->epilogue(cx);
-
-        /* The JIT inlines the epilogue. */
+        interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK);
+
+        /* The JIT inlines ScriptEpilogue. */
 #ifdef JS_METHODJIT
   jit_return:
 #endif
 
         /* The results of lowered call/apply frames need to be shifted. */
         bool shiftResult = regs.fp()->loweredCallOrApply();
 
         cx->stack.popInlineFrame(regs);
@@ -1609,17 +1689,17 @@ BEGIN_CASE(JSOP_STOP)
             len = JSOP_CALL_LENGTH;
             DO_NEXT_OP(len);
         }
 
         /* Increment pc so that |sp - fp->slots == ReconstructStackDepth(pc)|. */
         regs.pc += JSOP_CALL_LENGTH;
         goto error;
     } else {
-        JS_ASSERT(regs.stackDepth() == 0);
+        JS_ASSERT(regs.sp == regs.fp()->base());
     }
     interpReturnOK = true;
     goto exit;
 }
 
 BEGIN_CASE(JSOP_DEFAULT)
     regs.sp--;
     /* FALL THROUGH */
@@ -1726,90 +1806,90 @@ BEGIN_CASE(JSOP_IN)
     TRY_BRANCH_AFTER_COND(cond, 2);
     regs.sp--;
     regs.sp[-1].setBoolean(cond);
 }
 END_CASE(JSOP_IN)
 
 BEGIN_CASE(JSOP_ITER)
 {
-    JS_ASSERT(regs.stackDepth() >= 1);
+    JS_ASSERT(regs.sp > regs.fp()->base());
     uint8_t flags = GET_UINT8(regs.pc);
     if (!ValueToIterator(cx, flags, &regs.sp[-1]))
         goto error;
     CHECK_INTERRUPT_HANDLER();
     JS_ASSERT(!regs.sp[-1].isPrimitive());
 }
 END_CASE(JSOP_ITER)
 
 BEGIN_CASE(JSOP_MOREITER)
 {
-    JS_ASSERT(regs.stackDepth() >= 1);
+    JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
     JS_ASSERT(regs.sp[-1].isObject());
     PUSH_NULL();
     bool cond;
     if (!IteratorMore(cx, &regs.sp[-2].toObject(), &cond, &regs.sp[-1]))
         goto error;
     CHECK_INTERRUPT_HANDLER();
     regs.sp[-1].setBoolean(cond);
 }
 END_CASE(JSOP_MOREITER)
 
 BEGIN_CASE(JSOP_ITERNEXT)
 {
-    JS_ASSERT(regs.stackDepth() >= GET_INT8(regs.pc));
     Value *itervp = regs.sp - GET_INT8(regs.pc);
+    JS_ASSERT(itervp >= regs.fp()->base());
     JS_ASSERT(itervp->isObject());
     PUSH_NULL();
     if (!IteratorNext(cx, &itervp->toObject(), &regs.sp[-1]))
         goto error;
 }
 END_CASE(JSOP_ITERNEXT)
 
 BEGIN_CASE(JSOP_ENDITER)
 {
-    JS_ASSERT(regs.stackDepth() >= 1);
+    JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
     bool ok = CloseIterator(cx, &regs.sp[-1].toObject());
     regs.sp--;
     if (!ok)
         goto error;
 }
 END_CASE(JSOP_ENDITER)
 
 BEGIN_CASE(JSOP_DUP)
 {
-    JS_ASSERT(regs.stackDepth() >= 1);
+    JS_ASSERT(regs.sp > regs.fp()->base());
     const Value &rref = regs.sp[-1];
     PUSH_COPY(rref);
 }
 END_CASE(JSOP_DUP)
 
 BEGIN_CASE(JSOP_DUP2)
 {
-    JS_ASSERT(regs.stackDepth() >= 2);
+    JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
     const Value &lref = regs.sp[-2];
     const Value &rref = regs.sp[-1];
     PUSH_COPY(lref);
     PUSH_COPY(rref);
 }
 END_CASE(JSOP_DUP2)
 
 BEGIN_CASE(JSOP_SWAP)
 {
-    JS_ASSERT(regs.stackDepth() >= 2);
+    JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
     Value &lref = regs.sp[-2];
     Value &rref = regs.sp[-1];
     lref.swap(rref);
 }
 END_CASE(JSOP_SWAP)
 
 BEGIN_CASE(JSOP_PICK)
 {
     unsigned i = GET_UINT8(regs.pc);
-    JS_ASSERT(regs.stackDepth() >= i + 1);
+    JS_ASSERT(regs.sp - (i + 1) >= regs.fp()->base());
     Value lval = regs.sp[-int(i + 1)];
     memmove(regs.sp - (i + 1), regs.sp - i, sizeof(Value) * i);
     regs.sp[-1] = lval;
 }
 END_CASE(JSOP_PICK)
 
 BEGIN_CASE(JSOP_SETCONST)
 {
@@ -2285,38 +2365,32 @@ BEGIN_CASE(JSOP_ALIASEDVARINC)
 END_CASE(JSOP_ALIASEDVARINC)
 
 BEGIN_CASE(JSOP_DECARG)
 BEGIN_CASE(JSOP_ARGDEC)
 BEGIN_CASE(JSOP_INCARG)
 BEGIN_CASE(JSOP_ARGINC)
 {
     unsigned i = GET_ARGNO(regs.pc);
-    if (script->argsObjAliasesFormals()) {
-        const Value &arg = regs.fp()->argsObj().arg(i);
-        Value v;
-        if (!DoIncDec(cx, script, regs.pc, arg, &v, &regs.sp[0]))
-            goto error;
-        regs.fp()->argsObj().setArg(i, v);
-    } else {
-        Value &arg = regs.fp()->unaliasedFormal(i);
-        if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0]))
-            goto error;
-    }
+    CheckArgAccess(regs.fp(), i);
+    Value &arg = regs.fp()->formalArg(i);
+    if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0]))
+        goto error;
     regs.sp++;
 }
 END_CASE(JSOP_ARGINC);
 
 BEGIN_CASE(JSOP_DECLOCAL)
 BEGIN_CASE(JSOP_LOCALDEC)
 BEGIN_CASE(JSOP_INCLOCAL)
 BEGIN_CASE(JSOP_LOCALINC)
 {
     unsigned i = GET_SLOTNO(regs.pc);
-    Value &local = regs.fp()->unaliasedLocal(i);
+    CheckLocalAccess(regs.fp(), i);
+    Value &local = regs.fp()->localSlot(i);
     if (!DoIncDec(cx, script, regs.pc, local, &local, &regs.sp[0]))
         goto error;
     regs.sp++;
 }
 END_CASE(JSOP_LOCALINC)
 
 BEGIN_CASE(JSOP_THIS)
     if (!ComputeThis(cx, regs.fp()))
@@ -2417,22 +2491,23 @@ BEGIN_CASE(JSOP_FUNAPPLY)
     if (!GuardFunApplySpeculation(cx, regs))
         goto error;
     /* FALL THROUGH */
 
 BEGIN_CASE(JSOP_NEW)
 BEGIN_CASE(JSOP_CALL)
 BEGIN_CASE(JSOP_FUNCALL)
 {
-    JS_ASSERT(regs.stackDepth() >= 2 + GET_ARGC(regs.pc));
     CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
+    JS_ASSERT(args.base() >= regs.fp()->base());
 
     bool construct = (*regs.pc == JSOP_NEW);
 
     RootedFunction &fun = rootFunction0;
+
     /* Don't bother trying to fast-path calls to scripted non-constructors. */
     if (!IsFunctionObject(args.calleev(), fun.address()) || !fun->isInterpretedConstructor()) {
         if (construct) {
             if (!InvokeConstructorKernel(cx, args))
                 goto error;
         } else {
             if (!InvokeKernel(cx, args))
                 goto error;
@@ -2456,16 +2531,20 @@ BEGIN_CASE(JSOP_FUNCALL)
         JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CLEARED_SCOPE);
         goto error;
     }
 
     if (!cx->stack.pushInlineFrame(cx, regs, args, *fun, newScript, initial))
         goto error;
 
     RESTORE_INTERP_VARS();
+
+    if (!regs.fp()->functionPrologue(cx))
+        goto error;
+
     RESET_USE_METHODJIT();
 
     bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
 
 #ifdef JS_METHODJIT
     if (!newType) {
         /* Try to ensure methods are method JIT'd.  */
         mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, script->code,
@@ -2478,17 +2557,17 @@ BEGIN_CASE(JSOP_FUNCALL)
             CHECK_PARTIAL_METHODJIT(status);
             interpReturnOK = mjit::JaegerStatusToSuccess(status);
             CHECK_INTERRUPT_HANDLER();
             goto jit_return;
         }
     }
 #endif
 
-    if (!regs.fp()->prologue(cx, newType))
+    if (!ScriptPrologue(cx, regs.fp(), newType))
         goto error;
 
     if (cx->compartment->debugMode()) {
         switch (ScriptDebugPrologue(cx, regs.fp())) {
           case JSTRAP_CONTINUE:
             break;
           case JSTRAP_RETURN:
             interpReturnOK = true;
@@ -2735,17 +2814,17 @@ BEGIN_CASE(JSOP_ACTUALSFILLED)
 {
     PUSH_INT32(JS_MAX(regs.fp()->numActualArgs(), GET_UINT16(regs.pc)));
 }
 END_CASE(JSOP_ACTUALSFILLED)
 
 BEGIN_CASE(JSOP_ARGUMENTS)
     JS_ASSERT(!regs.fp()->fun()->hasRest());
     if (script->needsArgsObj()) {
-        ArgumentsObject *obj = ArgumentsObject::createExpected(cx, regs.fp());
+        ArgumentsObject *obj = ArgumentsObject::create(cx, regs.fp());
         if (!obj)
             goto error;
         PUSH_COPY(ObjectValue(*obj));
     } else {
         PUSH_COPY(MagicValue(JS_OPTIMIZED_ARGUMENTS));
     }
 END_CASE(JSOP_ARGUMENTS)
 
@@ -2759,69 +2838,69 @@ BEGIN_CASE(JSOP_REST)
         goto error;
 }
 END_CASE(JSOP_REST)
 
 BEGIN_CASE(JSOP_CALLALIASEDVAR)
 BEGIN_CASE(JSOP_GETALIASEDVAR)
 {
     ScopeCoordinate sc = ScopeCoordinate(regs.pc);
-    PUSH_COPY(regs.fp()->aliasedVarScope(sc).aliasedVar(sc));
+    Value &var = AliasedVar(regs.fp(), sc);
+    PUSH_COPY(var);
 }
 END_CASE(JSOP_GETALIASEDVAR)
 
 BEGIN_CASE(JSOP_SETALIASEDVAR)
 {
     ScopeCoordinate sc = ScopeCoordinate(regs.pc);
-    regs.fp()->aliasedVarScope(sc).setAliasedVar(sc, regs.sp[-1]);
+    Value &var = AliasedVar(regs.fp(), sc);
+    var = regs.sp[-1];
 }
 END_CASE(JSOP_SETALIASEDVAR)
 
 BEGIN_CASE(JSOP_GETARG)
 BEGIN_CASE(JSOP_CALLARG)
 {
     unsigned i = GET_ARGNO(regs.pc);
-    if (script->argsObjAliasesFormals())
-        PUSH_COPY(regs.fp()->argsObj().arg(i));
-    else
-        PUSH_COPY(regs.fp()->unaliasedFormal(i));
+    CheckArgAccess(regs.fp(), i);
+    PUSH_COPY(regs.fp()->formalArg(i));
 }
 END_CASE(JSOP_GETARG)
 
 BEGIN_CASE(JSOP_SETARG)
 {
     unsigned i = GET_ARGNO(regs.pc);
-    if (script->argsObjAliasesFormals())
-        regs.fp()->argsObj().setArg(i, regs.sp[-1]);
-    else
-        regs.fp()->unaliasedFormal(i) = regs.sp[-1];
+    CheckArgAccess(regs.fp(), i);
+    regs.fp()->formalArg(i) = regs.sp[-1];
 }
 END_CASE(JSOP_SETARG)
 
 BEGIN_CASE(JSOP_GETLOCAL)
 BEGIN_CASE(JSOP_CALLLOCAL)
 {
     unsigned i = GET_SLOTNO(regs.pc);
-    PUSH_COPY_SKIP_CHECK(regs.fp()->unaliasedLocal(i));
+    CheckLocalAccess(regs.fp(), i);
+    PUSH_COPY_SKIP_CHECK(regs.fp()->localSlot(i));
 
     /*
      * Skip the same-compartment assertion if the local will be immediately
      * popped. We do not guarantee sync for dead locals when coming in from the
      * method JIT, and a GETLOCAL followed by POP is not considered to be
      * a use of the variable.
      */
     if (regs.pc[JSOP_GETLOCAL_LENGTH] != JSOP_POP)
         assertSameCompartment(cx, regs.sp[-1]);
 }
 END_CASE(JSOP_GETLOCAL)
 
 BEGIN_CASE(JSOP_SETLOCAL)
 {
     unsigned i = GET_SLOTNO(regs.pc);
-    regs.fp()->unaliasedLocal(i) = regs.sp[-1];
+    CheckLocalAccess(regs.fp(), i);
+    regs.fp()->localSlot(i) = regs.sp[-1];
 }
 END_CASE(JSOP_SETLOCAL)
 
 BEGIN_CASE(JSOP_DEFCONST)
 BEGIN_CASE(JSOP_DEFVAR)
 {
     PropertyName *dn = atoms[GET_UINT32_INDEX(regs.pc)]->asPropertyName();
 
@@ -2958,17 +3037,17 @@ BEGIN_CASE(JSOP_LAMBDA)
 
     JS_ASSERT(obj->getProto());
     PUSH_OBJECT(*obj);
 }
 END_CASE(JSOP_LAMBDA)
 
 BEGIN_CASE(JSOP_CALLEE)
     JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
-    PUSH_COPY(regs.fp()->calleev());
+    PUSH_COPY(argv[-2]);
 END_CASE(JSOP_CALLEE)
 
 BEGIN_CASE(JSOP_GETTER)
 BEGIN_CASE(JSOP_SETTER)
 {
     JSOp op2 = JSOp(*++regs.pc);
     RootedId &id = rootId0;
     Value rval;
@@ -2990,27 +3069,28 @@ BEGIN_CASE(JSOP_SETTER)
         id = JSID_VOID;
         i = -2;
       gs_pop_lval:
         FETCH_OBJECT(cx, i - 1, obj);
         break;
 
       case JSOP_INITPROP:
       {
-        JS_ASSERT(regs.stackDepth() >= 2);
+        JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
         rval = regs.sp[-1];
         i = -1;
         PropertyName *name;
         LOAD_NAME(0, name);
         id = NameToId(name);
         goto gs_get_lval;
       }
       default:
         JS_ASSERT(op2 == JSOP_INITELEM);
-        JS_ASSERT(regs.stackDepth() >= 3);
+
+        JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
         rval = regs.sp[-1];
         id = JSID_VOID;
         i = -2;
       gs_get_lval:
       {
         const Value &lref = regs.sp[i-1];
         JS_ASSERT(lref.isObject());
         obj = &lref.toObject();
@@ -3118,25 +3198,25 @@ BEGIN_CASE(JSOP_NEWOBJECT)
 
     CHECK_INTERRUPT_HANDLER();
 }
 END_CASE(JSOP_NEWOBJECT)
 
 BEGIN_CASE(JSOP_ENDINIT)
 {
     /* FIXME remove JSOP_ENDINIT bug 588522 */
-    JS_ASSERT(regs.stackDepth() >= 1);
+    JS_ASSERT(regs.sp - regs.fp()->base() >= 1);
     JS_ASSERT(regs.sp[-1].isObject());
 }
 END_CASE(JSOP_ENDINIT)
 
 BEGIN_CASE(JSOP_INITPROP)
 {
     /* Load the property's initial value into rval. */
-    JS_ASSERT(regs.stackDepth() >= 2);
+    JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
     Value rval = regs.sp[-1];
 
     /* Load the object being initialized into lval/obj. */
     RootedObject &obj = rootObject0;
     obj = &regs.sp[-2].toObject();
     JS_ASSERT(obj->isObject());
 
     RootedId &id = rootId0;
@@ -3154,17 +3234,17 @@ BEGIN_CASE(JSOP_INITPROP)
 
     regs.sp--;
 }
 END_CASE(JSOP_INITPROP);
 
 BEGIN_CASE(JSOP_INITELEM)
 {
     /* Pop the element's value into rval. */
-    JS_ASSERT(regs.stackDepth() >= 3);
+    JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
     const Value &rref = regs.sp[-1];
 
     RootedObject &obj = rootObject0;
 
     /* Find the object being initialized at top of stack. */
     const Value &lref = regs.sp[-3];
     JS_ASSERT(lref.isObject());
     obj = &lref.toObject();
@@ -3640,93 +3720,100 @@ END_CASE(JSOP_GETFUNNS)
 #endif /* JS_HAS_XML_SUPPORT */
 
 BEGIN_CASE(JSOP_ENTERBLOCK)
 BEGIN_CASE(JSOP_ENTERLET0)
 BEGIN_CASE(JSOP_ENTERLET1)
 {
     StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(regs.pc))->asStaticBlock();
 
-    if (op == JSOP_ENTERBLOCK) {
-        JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
-        JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= script->nslots);
-        Value *vp = regs.sp + blockObj.slotCount();
-        SetValueRangeToUndefined(regs.sp, vp);
-        regs.sp = vp;
-    }
-
     /* Clone block iff there are any closed-over variables. */
     if (!regs.fp()->pushBlock(cx, blockObj))
         goto error;
+
+    if (op == JSOP_ENTERBLOCK) {
+        JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() == regs.sp);
+        Value *vp = regs.sp + blockObj.slotCount();
+        JS_ASSERT(regs.sp < vp);
+        JS_ASSERT(vp <= regs.fp()->slots() + script->nslots);
+        SetValueRangeToUndefined(regs.sp, vp);
+        regs.sp = vp;
+    } else if (op == JSOP_ENTERLET0) {
+        JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
+                  == regs.sp);
+    } else if (op == JSOP_ENTERLET1) {
+        JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
+                  == regs.sp - 1);
+    }
 }
 END_CASE(JSOP_ENTERBLOCK)
 
 BEGIN_CASE(JSOP_LEAVEBLOCK)
 BEGIN_CASE(JSOP_LEAVEFORLETIN)
 BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
 {
     DebugOnly<uint32_t> blockDepth = regs.fp()->blockChain().stackDepth();
 
     regs.fp()->popBlock(cx);
 
     if (op == JSOP_LEAVEBLOCK) {
         /* Pop the block's slots. */
         regs.sp -= GET_UINT16(regs.pc);
-        JS_ASSERT(regs.stackDepth() == blockDepth);
+        JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp);
     } else if (op == JSOP_LEAVEBLOCKEXPR) {
         /* Pop the block's slots maintaining the topmost expr. */
         Value *vp = &regs.sp[-1];
         regs.sp -= GET_UINT16(regs.pc);
-        JS_ASSERT(regs.stackDepth() == blockDepth + 1);
+        JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp - 1);
         regs.sp[-1] = *vp;
     } else {
         /* Another op will pop; nothing to do here. */
         len = JSOP_LEAVEFORLETIN_LENGTH;
         DO_NEXT_OP(len);
     }
 }
 END_CASE(JSOP_LEAVEBLOCK)
 
 #if JS_HAS_GENERATORS
 BEGIN_CASE(JSOP_GENERATOR)
 {
     JS_ASSERT(!cx->isExceptionPending());
-    regs.fp()->initGeneratorFrame();
     regs.pc += JSOP_GENERATOR_LENGTH;
     JSObject *obj = js_NewGenerator(cx);
     if (!obj)
         goto error;
     regs.fp()->setReturnValue(ObjectValue(*obj));
     interpReturnOK = true;
     if (entryFrame != regs.fp())
         goto inline_return;
     goto exit;
 }
 
 BEGIN_CASE(JSOP_YIELD)
     JS_ASSERT(!cx->isExceptionPending());
     JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
-    if (cx->innermostGenerator()->state == JSGEN_CLOSING) {
-        js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, JSDVG_SEARCH_STACK,
-                            ObjectValue(regs.fp()->callee()), NULL);
+    if (cx->generatorFor(regs.fp())->state == JSGEN_CLOSING) {
+        js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD,
+                            JSDVG_SEARCH_STACK, argv[-2], NULL);
         goto error;
     }
     regs.fp()->setReturnValue(regs.sp[-1]);
     regs.fp()->setYielding();
     regs.pc += JSOP_YIELD_LENGTH;
     interpReturnOK = true;
     goto exit;
 
 BEGIN_CASE(JSOP_ARRAYPUSH)
 {
     uint32_t slot = GET_UINT16(regs.pc);
     JS_ASSERT(script->nfixed <= slot);
     JS_ASSERT(slot < script->nslots);
+    CheckLocalAccess(regs.fp(), slot);
     RootedObject &obj = rootObject0;
-    obj = &regs.fp()->unaliasedLocal(slot).toObject();
+    obj = &regs.fp()->slots()[slot].toObject();
     if (!js_NewbornArrayPush(cx, obj, regs.sp[-1]))
         goto error;
     regs.sp--;
 }
 END_CASE(JSOP_ARRAYPUSH)
 #endif /* JS_HAS_GENERATORS */
 
 #if JS_THREADED_INTERP
@@ -3832,17 +3919,17 @@ END_CASE(JSOP_ARRAYPUSH)
             UnwindScope(cx, tn->stackDepth);
 
             /*
              * Set pc to the first bytecode after the the try note to point
              * to the beginning of catch or finally or to [enditer] closing
              * the for-in loop.
              */
             regs.pc = (script)->main() + tn->start + tn->length;
-            regs.sp = regs.spForStackDepth(tn->stackDepth);
+            regs.sp = regs.fp()->base() + tn->stackDepth;
 
             switch (tn->kind) {
               case JSTRY_CATCH:
                   JS_ASSERT(*regs.pc == JSOP_ENTERBLOCK);
 
 #if JS_HAS_GENERATORS
                 /* Catch cannot intercept the closing of a generator. */
                   if (JS_UNLIKELY(cx->getPendingException().isMagic(JS_GENERATOR_CLOSING)))
@@ -3902,20 +3989,29 @@ END_CASE(JSOP_ARRAYPUSH)
     regs.setToEndOfScript();
 
     if (entryFrame != regs.fp())
         goto inline_return;
 
   exit:
     if (cx->compartment->debugMode())
         interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
-    if (!regs.fp()->isGeneratorFrame())
-        regs.fp()->epilogue(cx);
+    interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp(), interpReturnOK);
     regs.fp()->setFinishedInInterpreter();
 
+#ifdef DEBUG
+    JS_ASSERT(entryFrame == regs.fp());
+    if (regs.fp()->isFunctionFrame())
+        AssertValidFunctionScopeChainAtExit(regs.fp());
+    else if (regs.fp()->isEvalFrame())
+        AssertValidEvalFrameScopeChainAtExit(regs.fp());
+    else if (!regs.fp()->isGeneratorFrame())
+        JS_ASSERT(!regs.fp()->scopeChain()->isScope());
+#endif
+
 #ifdef JS_METHODJIT
     /*
      * This path is used when it's guaranteed the method can be finished
      * inside the JIT.
      */
   leave_on_safe_point:
 #endif
 
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -13,16 +13,41 @@
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsopcode.h"
 
 #include "vm/Stack.h"
 
 namespace js {
 
+/*
+ * ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue
+ * must be called before the script executes. ScriptEpilogue must be called
+ * after the script returns or exits via exception.
+ */
+
+inline bool
+ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script);
+
+inline bool
+ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok);
+
+/*
+ * It is not valid to call ScriptPrologue when a generator is resumed or to
+ * call ScriptEpilogue when a generator yields. However, the debugger still
+ * needs LIFO notification of generator start/stop. This pair of functions does
+ * the right thing based on the state of 'fp'.
+ */
+
+inline bool
+ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp);
+
+inline bool
+ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok);
+
 /* Implemented in jsdbgapi: */
 
 /*
  * Announce to the debugger that the thread has entered a new JavaScript frame,
  * |fp|. Call whatever hooks have been registered to observe new frames, and
  * return a JSTrapStatus code indication how execution should proceed:
  *
  * - JSTRAP_CONTINUE: Continue execution normally.
@@ -244,16 +269,19 @@ UnwindScope(JSContext *cx, uint32_t stac
  * just preserving the basic engine stack invariants.
  */
 extern void
 UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs);
 
 extern bool
 OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval, Value *vp);
 
+inline void
+AssertValidFunctionScopeChainAtExit(StackFrame *fp);
+
 class TryNoteIter
 {
     const FrameRegs &regs;
     JSScript *script;
     uint32_t pcOffset;
     JSTryNote *tn, *tnEnd;
     void settle();
   public:
--- a/js/src/jsinterpinlines.h
+++ b/js/src/jsinterpinlines.h
@@ -416,23 +416,130 @@ DefVarOrConstOperation(JSContext *cx, Ha
             }
             return false;
         }
     }
 
     return true;
 }
 
+inline bool
+FunctionNeedsPrologue(JSContext *cx, JSFunction *fun)
+{
+    /* Heavyweight functions need call objects created. */
+    if (fun->isHeavyweight())
+        return true;
+
+    /* Outer and inner functions need to preserve nesting invariants. */
+    if (cx->typeInferenceEnabled() && fun->script()->nesting())
+        return true;
+
+    return false;
+}
+
+inline bool
+ScriptPrologue(JSContext *cx, StackFrame *fp, bool newType)
+{
+    JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj());
+
+    if (fp->isConstructing()) {
+        JSObject *obj = js_CreateThisForFunction(cx, RootedObject(cx, &fp->callee()), newType);
+        if (!obj)
+            return false;
+        fp->functionThis().setObject(*obj);
+    }
+
+    Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
+
+    return true;
+}
+
+inline bool
+ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok)
+{
+    Probes::exitJSFun(cx, fp->maybeFun(), fp->script());
+
+    /*
+     * If inline-constructing, replace primitive rval with the new object
+     * passed in via |this|, and instrument this constructor invocation.
+     */
+    if (fp->isConstructing() && ok) {
+        if (fp->returnValue().isPrimitive())
+            fp->setReturnValue(ObjectValue(fp->constructorThis()));
+    }
+
+    return ok;
+}
+
+inline bool
+ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp, bool newType)
+{
+    if (!fp->isGeneratorFrame())
+        return ScriptPrologue(cx, fp, newType);
+    return true;
+}
+
+inline bool
+ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok)
+{
+    if (!fp->isYielding())
+        return ScriptEpilogue(cx, fp, ok);
+    return ok;
+}
+
 inline void
 InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
 {
     if (script == regs->fp()->script())
         enabler.enableInterrupts();
 }
 
+inline void
+AssertValidEvalFrameScopeChainAtExit(StackFrame *fp)
+{
+#ifdef DEBUG
+    JS_ASSERT(fp->isEvalFrame());
+
+    JS_ASSERT(!fp->hasBlockChain());
+    JSObject &scope = *fp->scopeChain();
+
+    if (fp->isStrictEvalFrame())
+        JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
+    else if (fp->isDebuggerFrame())
+        JS_ASSERT(!scope.isScope());
+    else if (fp->isDirectEvalFrame())
+        JS_ASSERT(scope == *fp->prev()->scopeChain());
+    else
+        JS_ASSERT(scope.isGlobal());
+#endif
+}
+
+inline void
+AssertValidFunctionScopeChainAtExit(StackFrame *fp)
+{
+#ifdef DEBUG
+    JS_ASSERT(fp->isFunctionFrame());
+    if (fp->isGeneratorFrame() || fp->isYielding())
+        return;
+
+    if (fp->isEvalFrame()) {
+        AssertValidEvalFrameScopeChainAtExit(fp);
+        return;
+    }
+
+    JS_ASSERT(!fp->hasBlockChain());
+    JSObject &scope = *fp->scopeChain();
+
+    if (fp->fun()->isHeavyweight() && fp->hasCallObj())
+        JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
+    else if (scope.isCall() || scope.isBlock())
+        JS_ASSERT(scope.asScope().maybeStackFrame() != fp);
+#endif
+}
+
 static JS_ALWAYS_INLINE bool
 AddOperation(JSContext *cx, const Value &lhs, const Value &rhs, Value *res)
 {
     if (lhs.isInt32() && rhs.isInt32()) {
         int32_t l = lhs.toInt32(), r = rhs.toInt32();
         int32_t sum = l + r;
         if (JS_UNLIKELY(bool((l ^ sum) & (r ^ sum) & 0x80000000))) {
             res->setDouble(double(l) + double(r));
@@ -610,17 +717,17 @@ GetObjectElementOperation(JSContext *cx,
         do {
             if (obj->isDenseArray()) {
                 if (index < obj->getDenseArrayInitializedLength()) {
                     *res = obj->getDenseArrayElement(index);
                     if (!res->isMagic())
                         break;
                 }
             } else if (obj->isArguments()) {
-                if (obj->asArguments().maybeGetElement(index, res))
+                if (obj->asArguments().getElement(index, res))
                     break;
             }
             if (!obj->getElement(cx, index, res))
                 return false;
         } while(0);
     } else {
         JSScript *script;
         jsbytecode *pc;
@@ -776,17 +883,17 @@ GreaterThanOrEqualOperation(JSContext *c
 static inline bool
 GuardFunApplySpeculation(JSContext *cx, FrameRegs &regs)
 {
     if (regs.sp[-1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
         CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
         if (!IsNativeFunction(args.calleev(), js_fun_apply)) {
             if (!JSScript::applySpeculationFailed(cx, regs.fp()->script()))
                 return false;
-            regs.sp[-1] = ObjectValue(regs.fp()->argsObj());
+            args[1] = ObjectValue(regs.fp()->argsObj());
         }
     }
     return true;
 }
 
 }  /* namespace js */
 
 #endif /* jsinterpinlines_h__ */
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1319,33 +1319,42 @@ generator_finalize(FreeOp *fop, JSObject
 
     /*
      * gen is open when a script has not called its close method while
      * explicitly manipulating it.
      */
     JS_ASSERT(gen->state == JSGEN_NEWBORN ||
               gen->state == JSGEN_CLOSED ||
               gen->state == JSGEN_OPEN);
-    JS_POISON(gen->fp, JS_FREE_PATTERN, sizeof(StackFrame));
-    JS_POISON(gen, JS_FREE_PATTERN, sizeof(JSGenerator));
     fop->free_(gen);
 }
 
 static void
 MarkGenerator(JSTracer *trc, JSGenerator *gen)
 {
-    MarkValueRange(trc,
-                   HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
-                   HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
-                   "Generator Floating Args");
-    gen->fp->mark(trc);
-    MarkValueRange(trc,
-                   HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
-                   HeapValueify(gen->regs.sp),
-                   "Generator Floating Stack");
+    StackFrame *fp = gen->floatingFrame();
+
+    /*
+     * MarkGenerator should only be called when regs is based on the floating frame.
+     * See calls to RebaseRegsFromTo.
+     */
+    JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots());
+
+    /*
+     * Currently, generators are not mjitted. Still, (overflow) args can be
+     * pushed by the mjit and need to be conservatively marked. Technically, the
+     * formal args and generator slots are safe for exact marking, but since the
+     * plan is to eventually mjit generators, it makes sense to future-proof
+     * this code and save someone an hour later.
+     */
+    MarkValueRange(trc, (HeapValue *)fp->formalArgsEnd() - gen->floatingStack,
+                   gen->floatingStack, "Generator Floating Args");
+    fp->mark(trc);
+    MarkValueRange(trc, gen->regs.sp - fp->slots(),
+                   (HeapValue *)fp->slots(), "Generator Floating Stack");
 }
 
 static void
 GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
 {
     JSCompartment *comp = cx->compartment;
     if (comp->needsBarrier())
         MarkGenerator(comp->barrierTracer(), gen);
@@ -1360,16 +1369,17 @@ generator_trace(JSTracer *trc, JSObject 
 
     /*
      * Do not mark if the generator is running; the contents may be trash and
      * will be replaced when the generator stops.
      */
     if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
         return;
 
+    JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
     MarkGenerator(trc, gen);
 }
 
 Class js::GeneratorClass = {
     "Generator",
     JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
@@ -1400,86 +1410,103 @@ Class js::GeneratorClass = {
  * from *fp.  We know that upon return, the JSOP_GENERATOR opcode will return
  * from the activation in fp, so we can steal away fp->callobj and fp->argsobj
  * if they are non-null.
  */
 JSObject *
 js_NewGenerator(JSContext *cx)
 {
     FrameRegs &stackRegs = cx->regs();
-    JS_ASSERT(stackRegs.stackDepth() == 0);
     StackFrame *stackfp = stackRegs.fp();
+    JS_ASSERT(stackfp->base() == cx->regs().sp);
+    JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs());
 
     Rooted<GlobalObject*> global(cx, &stackfp->global());
     JSObject *proto = global->getOrCreateGeneratorPrototype(cx);
     if (!proto)
         return NULL;
     JSObject *obj = NewObjectWithGivenProto(cx, &GeneratorClass, proto, global);
     if (!obj)
         return NULL;
 
     /* Load and compute stack slot counts. */
-    Value *stackvp = stackfp->generatorArgsSnapshotBegin();
-    unsigned vplen = stackfp->generatorArgsSnapshotEnd() - stackvp;
+    Value *stackvp = stackfp->actualArgs() - 2;
+    unsigned vplen = stackfp->formalArgsEnd() - stackvp;
 
     /* Compute JSGenerator size. */
     unsigned nbytes = sizeof(JSGenerator) +
                    (-1 + /* one Value included in JSGenerator */
                     vplen +
                     VALUES_PER_STACK_FRAME +
-                    stackfp->script()->nslots) * sizeof(HeapValue);
+                    stackfp->numSlots()) * sizeof(HeapValue);
 
     JS_ASSERT(nbytes % sizeof(Value) == 0);
     JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0);
 
     JSGenerator *gen = (JSGenerator *) cx->malloc_(nbytes);
     if (!gen)
         return NULL;
     SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value));
 
     /* Cut up floatingStack space. */
-    HeapValue *genvp = gen->stackSnapshot;
+    HeapValue *genvp = gen->floatingStack;
     StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
 
     /* Initialize JSGenerator. */
     gen->obj.init(obj);
     gen->state = JSGEN_NEWBORN;
     gen->enumerators = NULL;
-    gen->fp = genfp;
-    gen->prevGenerator = NULL;
+    gen->floating = genfp;
 
     /* Copy from the stack to the generator's floating frame. */
     gen->regs.rebaseFromTo(stackRegs, *genfp);
-    genfp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
+    genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
                               cx, genfp, genvp, stackfp, stackvp, stackRegs.sp);
+    genfp->initFloatingGenerator();
+    stackfp->setYielding();  /* XXX: to be removed */
 
     obj->setPrivate(gen);
     return obj;
 }
 
+JSGenerator *
+js_FloatingFrameToGenerator(StackFrame *fp)
+{
+    JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator());
+    char *floatingStackp = (char *)(fp->actualArgs() - 2);
+    char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
+    return reinterpret_cast<JSGenerator *>(p);
+}
+
 typedef enum JSGeneratorOp {
     JSGENOP_NEXT,
     JSGENOP_SEND,
     JSGENOP_THROW,
     JSGENOP_CLOSE
 } JSGeneratorOp;
 
 /*
  * Start newborn or restart yielding generator and perform the requested
  * operation inside its frame.
  */
 static JSBool
 SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
                 JSGenerator *gen, const Value &arg)
 {
     if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
-        JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NESTING_GENERATOR);
+        js_ReportValueError(cx, JSMSG_NESTING_GENERATOR,
+                            JSDVG_SEARCH_STACK, ObjectOrNullValue(obj),
+                            JS_GetFunctionId(gen->floatingFrame()->fun()));
         return JS_FALSE;
     }
 
+    /* Check for OOM errors here, where we can fail easily. */
+    if (!cx->ensureGeneratorStackSpace())
+        return JS_FALSE;
+
     /*
      * Write barrier is needed since the generator stack can be updated,
      * and it's not barriered in any other way. We need to do it before
      * gen->state changes, which can cause us to trace the generator
      * differently.
      *
      * We could optimize this by setting a bit on the generator to signify
      * that it has been marked. If this bit has already been set, there is no
@@ -1509,50 +1536,53 @@ SendToGenerator(JSContext *cx, JSGenerat
 
       default:
         JS_ASSERT(op == JSGENOP_CLOSE);
         cx->setPendingException(MagicValue(JS_GENERATOR_CLOSING));
         gen->state = JSGEN_CLOSING;
         break;
     }
 
+    StackFrame *genfp = gen->floatingFrame();
+
     JSBool ok;
     {
         GeneratorFrameGuard gfg;
         if (!cx->stack.pushGeneratorFrame(cx, gen, &gfg)) {
             gen->state = JSGEN_CLOSED;
             return JS_FALSE;
         }
 
         StackFrame *fp = gfg.fp();
         gen->regs = cx->regs();
+        JS_ASSERT(gen->liveFrame() == fp);
 
         cx->enterGenerator(gen);   /* OOM check above. */
         JSObject *enumerators = cx->enumerators;
         cx->enumerators = gen->enumerators;
 
         ok = RunScript(cx, fp->script(), fp);
 
         gen->enumerators = cx->enumerators;
         cx->enumerators = enumerators;
         cx->leaveGenerator(gen);
     }
 
-    if (gen->fp->isYielding()) {
+    if (gen->floatingFrame()->isYielding()) {
         /* Yield cannot fail, throw or be called on closing. */
         JS_ASSERT(ok);
         JS_ASSERT(!cx->isExceptionPending());
         JS_ASSERT(gen->state == JSGEN_RUNNING);
         JS_ASSERT(op != JSGENOP_CLOSE);
-        gen->fp->clearYielding();
+        genfp->clearYielding();
         gen->state = JSGEN_OPEN;
         return JS_TRUE;
     }
 
-    gen->fp->clearReturnValue();
+    genfp->clearReturnValue();
     gen->state = JSGEN_CLOSED;
     if (ok) {
         /* Returned, explicitly or by falling off the end. */
         if (op == JSGENOP_CLOSE)
             return JS_TRUE;
         return js_ThrowStopIteration(cx);
     }
 
@@ -1634,17 +1664,17 @@ generator_op(JSContext *cx, Native nativ
             return true;
         }
     }
 
     bool undef = ((op == JSGENOP_SEND || op == JSGENOP_THROW) && args.length() != 0);
     if (!SendToGenerator(cx, op, obj, gen, undef ? args[0] : UndefinedValue()))
         return false;
 
-    args.rval() = gen->fp->returnValue();
+    args.rval() = gen->floatingFrame()->returnValue();
     return true;
 }
 
 static JSBool
 generator_send(JSContext *cx, unsigned argc, Value *vp)
 {
     return generator_op(cx, generator_send, JSGENOP_SEND, vp, argc);
 }
--- a/js/src/jsiter.h
+++ b/js/src/jsiter.h
@@ -257,36 +257,73 @@ ForOf(JSContext *cx, const Value &iterab
 
 } /* namespace js */
 
 #if JS_HAS_GENERATORS
 
 /*
  * Generator state codes.
  */
-enum JSGeneratorState
-{
+typedef enum JSGeneratorState {
     JSGEN_NEWBORN,  /* not yet started */
     JSGEN_OPEN,     /* started by a .next() or .send(undefined) call */
     JSGEN_RUNNING,  /* currently executing via .next(), etc., call */
     JSGEN_CLOSING,  /* close method is doing asynchronous return */
     JSGEN_CLOSED    /* closed, cannot be started or closed again */
-};
+} JSGeneratorState;
 
-struct JSGenerator
-{
+struct JSGenerator {
     js::HeapPtrObject   obj;
     JSGeneratorState    state;
     js::FrameRegs       regs;
     JSObject            *enumerators;
-    JSGenerator         *prevGenerator;
-    js::StackFrame      *fp;
-    js::HeapValue       stackSnapshot[1];
+    js::StackFrame      *floating;
+    js::HeapValue       floatingStack[1];
+
+    js::StackFrame *floatingFrame() {
+        return floating;
+    }
+
+    js::StackFrame *liveFrame() {
+        JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
+                  (regs.fp() != floatingFrame()));
+        return regs.fp();
+    }
 };
 
 extern JSObject *
 js_NewGenerator(JSContext *cx);
+
+/*
+ * Generator stack frames do not have stable pointers since they get copied to
+ * and from the generator object and the stack (see SendToGenerator). This is a
+ * problem for Block and With objects, which need to store a pointer to the
+ * enclosing stack frame. The solution is for Block and With objects to store
+ * a pointer to the "floating" stack frame stored in the generator object,
+ * since it is stable, and maintain, in the generator object, a pointer to the
+ * "live" stack frame (either a copy on the stack or the floating frame). Thus,
+ * Block and With objects must "normalize" to and from the floating/live frames
+ * in the case of generators using the following functions.
+ */
+inline js::StackFrame *
+js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp)
+{
+    if (JS_UNLIKELY(fp->isGeneratorFrame()))
+        return cx->generatorFor(fp)->floatingFrame();
+    return fp;
+}
+
+/* Given a floating frame, given the JSGenerator containing it. */
+extern JSGenerator *
+js_FloatingFrameToGenerator(js::StackFrame *fp);
+
+inline js::StackFrame *
+js_LiveFrameIfGenerator(js::StackFrame *fp)
+{
+    return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp;
+}
+
 #endif
 
 extern JSObject *
 js_InitIteratorClasses(JSContext *cx, JSObject *obj);
 
 #endif /* jsiter_h___ */
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2844,17 +2844,16 @@ js::NewObjectWithClassProto(JSContext *c
 }
 
 JSObject *
 js::NewObjectWithType(JSContext *cx, HandleTypeObject type, JSObject *parent, gc::AllocKind kind)
 {
     JS_ASSERT(type->proto->hasNewType(type));
     JS_ASSERT(parent);
 
-    JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
     if (CanBeFinalizedInBackground(kind, &ObjectClass))
         kind = GetBackgroundAllocKind(kind);
 
     NewObjectCache &cache = cx->runtime->newObjectCache;
 
     NewObjectCache::EntryIndex entry = -1;
     if (parent == type->proto->getParent()) {
         if (cache.lookupType(&ObjectClass, type, kind, &entry))
@@ -3868,16 +3867,24 @@ JSObject::setSlotSpan(JSContext *cx, uin
 bool
 JSObject::growSlots(JSContext *cx, uint32_t oldCount, uint32_t newCount)
 {
     JS_ASSERT(newCount > oldCount);
     JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
     JS_ASSERT(!isDenseArray());
 
     /*
+     * Slots are only allocated for call objects when new properties are
+     * added to them, which can only happen while the call is still on the
+     * stack (and an eval, DEFFUN, etc. happens). We thus do not need to
+     * worry about updating any active outer function args/vars.
+     */
+    JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL);
+
+    /*
      * Slot capacities are determined by the span of allocated objects. Due to
      * the limited number of bits to store shape slots, object growth is
      * throttled well before the slot capacity can overflow.
      */
     JS_ASSERT(newCount < NELEMENTS_LIMIT);
 
     size_t oldSize = Probes::objectResizeActive() ? computedSizeOfThisSlotsElements() : 0;
     size_t newSize = oldSize + (newCount - oldCount) * sizeof(Value);
@@ -6233,19 +6240,25 @@ js_DumpStackFrame(JSContext *cx, StackFr
         if (sp - fp->slots() < 10000) { // sanity
             for (Value *p = fp->slots(); p < sp; p++) {
                 fprintf(stderr, "    %p: ", (void *) p);
                 dumpValue(*p);
                 fputc('\n', stderr);
             }
         }
         if (fp->hasArgs()) {
-            fprintf(stderr, "  actuals: %p (%u) ", (void *) fp->actuals(), (unsigned) fp->numActualArgs());
-            fprintf(stderr, "  formals: %p (%u)\n", (void *) fp->formals(), (unsigned) fp->numFormalArgs());
+            fprintf(stderr, "  actuals: %p (%u) ", (void *) fp->actualArgs(), (unsigned) fp->numActualArgs());
+            fprintf(stderr, "  formals: %p (%u)\n", (void *) fp->formalArgs(), (unsigned) fp->numFormalArgs());
         }
+        if (fp->hasCallObj()) {
+            fprintf(stderr, "  has call obj: ");
+            dumpValue(ObjectValue(fp->callObj()));
+            fprintf(stderr, "\n");
+        }
+        MaybeDumpObject("argsobj", fp->maybeArgsObj());
         MaybeDumpObject("blockChain", fp->maybeBlockChain());
         if (!fp->isDummyFrame()) {
             MaybeDumpValue("this", fp->thisValue());
             fprintf(stderr, "  rval: ");
             dumpValue(fp->returnValue());
         } else {
             fprintf(stderr, "dummy frame");
         }
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -509,25 +509,24 @@ js_Disassemble1(JSContext *cx, JSScript 
 
       case JOF_JUMP: {
         ptrdiff_t off = GET_JUMP_OFFSET(pc);
         Sprint(sp, " %u (%+d)", loc + (int) off, (int) off);
         break;
       }
 
       case JOF_SCOPECOORD: {
-        Value v = StringValue(ScopeCoordinateName(script, pc));
-        JSAutoByteString bytes;
-        if (!ToDisassemblySource(cx, v, &bytes))
-            return 0;
-        ScopeCoordinate sc(pc);
-        Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.slot);
-        break;
+        unsigned i = GET_UINT16(pc);
+        Sprint(sp, " %u", i);
+        pc += sizeof(uint16_t);
+        i = GET_UINT16(pc);
+        Sprint(sp, " %u", i);
+        pc += sizeof(uint16_t);
+        /* FALL THROUGH */
       }
-
       case JOF_ATOM: {
         Value v = StringValue(script->getAtom(GET_UINT32_INDEX(pc)));
         JSAutoByteString bytes;
         if (!ToDisassemblySource(cx, v, &bytes))
             return 0;
         Sprint(sp, " %s", bytes.ptr());
         break;
       }
@@ -1404,22 +1403,16 @@ GetStr(SprintStack *ss, unsigned i)
 JS_STATIC_ASSERT(JSOP_FAKE_LIMIT <= 255);
 
 static inline void
 AddParenSlop(SprintStack *ss)
 {
     ss->sprinter.reserveAndClear(PAREN_SLOP);
 }
 
-static unsigned
-StackDepth(JSScript *script)
-{
-    return script->nslots - script->nfixed;
-}
-
 static JSBool
 PushOff(SprintStack *ss, ptrdiff_t off, JSOp op, jsbytecode *pc = NULL)
 {
     unsigned top;
 
     /* ss->top points to the next free slot; be paranoid about overflow. */
     top = ss->top;
     JS_ASSERT(top < StackDepth(ss->printer->script));
@@ -1845,17 +1838,17 @@ GetLocal(SprintStack *ss, int i)
  * If IsVarSlot returns true, the var's atom is returned in *varAtom.
  * If IsVarSlot returns false (indicating that this is a get of a let binding),
  * the stack depth of the associated slot is returned in *localSlot.
  */
 static bool
 IsVarSlot(JSPrinter *jp, jsbytecode *pc, JSAtom **varAtom, int *localSlot)
 {
     if (JOF_OPTYPE(*pc) == JOF_SCOPECOORD) {
-        *varAtom = ScopeCoordinateName(jp->script, pc);
+        *varAtom = ScopeCoordinateAtom(jp->script, pc);
         LOCAL_ASSERT_RV(*varAtom, NULL);
         return true;
     }
 
     unsigned slot = GET_SLOTNO(pc);
     if (slot < jp->script->nfixed) {
         *varAtom = GetArgOrVarAtom(jp, jp->fun->nargs + slot);
         LOCAL_ASSERT_RV(*varAtom, NULL);
@@ -5708,17 +5701,17 @@ js_DecompileValueGenerator(JSContext *cx
                 goto release_pcstack;
             pc = pcstack[pcdepth];
         } else {
             /*
              * We search from fp->sp to base to find the most recently
              * calculated value matching v under assumption that it is
              * it that caused exception, see bug 328664.
              */
-            Value *stackBase = cx->regs().spForStackDepth(0);
+            Value *stackBase = fp->base();
             Value *sp = cx->regs().sp;
             do {
                 if (sp == stackBase) {
                     pcdepth = -1;
                     goto release_pcstack;
                 }
             } while (*--sp != v);
 
--- a/js/src/jsopcode.h
+++ b/js/src/jsopcode.h
@@ -55,17 +55,17 @@ typedef enum JSOp {
                                      atom index */
 #define JOF_INT32         14      /* int32_t immediate operand */
 #define JOF_OBJECT        15      /* unsigned 16-bit object index */
 #define JOF_SLOTOBJECT    16      /* uint16_t slot index + object index */
 #define JOF_REGEXP        17      /* unsigned 32-bit regexp index */
 #define JOF_INT8          18      /* int8_t immediate operand */
 #define JOF_ATOMOBJECT    19      /* uint16_t constant index + object index */
 #define JOF_UINT16PAIR    20      /* pair of uint16_t immediates */
-#define JOF_SCOPECOORD    21      /* pair of uint16_t immediates followed by block index */
+#define JOF_SCOPECOORD    21      /* pair of uint16_t immediates followed by atom index */
 #define JOF_TYPEMASK      0x001f  /* mask for above immediate types */
 
 #define JOF_NAME          (1U<<5) /* name operation */
 #define JOF_PROP          (2U<<5) /* obj.prop operation */
 #define JOF_ELEM          (3U<<5) /* obj[index] operation */
 #define JOF_XMLNAME       (4U<<5) /* XML name: *, a::b, @a, @a::b, etc. */
 #define JOF_MODEMASK      (7U<<5) /* mask for above addressing modes */
 #define JOF_SET           (1U<<8) /* set (i.e., assignment) operation */
--- a/js/src/jsopcode.tbl
+++ b/js/src/jsopcode.tbl
@@ -328,24 +328,16 @@ OPDEF(JSOP_TRY,         134,"try",      
 OPDEF(JSOP_FINALLY,     135,"finally",    NULL,       1,  0,  2,  0,  JOF_BYTE)
 
 /*
  * An "aliased variable" is a var, let, or formal arg that is aliased. Sources
  * of aliasing include: nested functions accessing the vars of an enclosing
  * function, function statements that are conditionally executed, 'eval',
  * 'with', 'arguments' and E4X filters. All of these cases require creating a
  * CallObject to own the aliased variable.
- *
- * An ALIASEDVAR opcode contains the following immediates:
- *  uint16 hops:  the number of scope objects to skip to find the ScopeObject
- *                containing the variable being accessed
- *  uint16 slot:  the slot containing the variable in the ScopeObject (this
- *                'slot' does not include RESERVED_SLOTS).
- *  uint32 block: the index (into the script object table) of the block chain
- *                at the point of the variable access.
  */
 OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL,   9,  0,  1, 19,  JOF_SCOPECOORD|JOF_NAME)
 OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL,  9,  0,  1, 19,  JOF_SCOPECOORD|JOF_NAME)
 OPDEF(JSOP_SETALIASEDVAR, 138,"setaliasedvar",NULL,   9,  1,  1,  3,  JOF_SCOPECOORD|JOF_NAME|JOF_SET|JOF_DETECTING)
 OPDEF(JSOP_INCALIASEDVAR, 139,"incaliasedvar",NULL,   10, 0,  1, 15,  JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_TMPSLOT3|JOF_DECOMPOSE)
 OPDEF(JSOP_DECALIASEDVAR, 140,"decaliasedvar",NULL,   10, 0,  1, 15,  JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_TMPSLOT3|JOF_DECOMPOSE)
 OPDEF(JSOP_ALIASEDVARINC, 141,"aliasedvarinc",NULL,   10, 0,  1, 15,  JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
 OPDEF(JSOP_ALIASEDVARDEC, 142,"aliasedvardec",NULL,   10, 0,  1, 15,  JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -284,32 +284,32 @@ Shape::getChildBinding(JSContext *cx, co
 
         /*
          * Update the number of fixed slots which bindings of this shape will
          * have. Bindings are constructed as new properties come in, so the
          * call object allocation class is not known ahead of time. Compute
          * the fixed slot count here, which will feed into call objects created
          * off of the bindings.
          */
-        uint32_t slots = child.slotSpan();
+        uint32_t slots = child.slotSpan() + 1;  /* Add one for private data. */
         gc::AllocKind kind = gc::GetGCObjectKind(slots);
 
         /*
          * Make sure that the arguments and variables in the call object all
          * end up in a contiguous range of slots. We need this to be able to
          * embed the args/vars arrays in the TypeScriptNesting for the function
          * after the call object's frame has finished.
          */
         uint32_t nfixed = gc::GetGCKindSlots(kind);
         if (nfixed < slots) {
-            nfixed = CallObject::RESERVED_SLOTS;
-            JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS);
+            nfixed = CallObject::RESERVED_SLOTS + 1;
+            JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS + 1);
         }
 
-        shape->setNumFixedSlots(nfixed);
+        shape->setNumFixedSlots(nfixed - 1);
     }
     return shape;
 }
 
 /* static */ Shape *
 Shape::replaceLastProperty(JSContext *cx, const StackBaseShape &base, JSObject *proto, Shape *shape_)
 {
     RootedShape shape(cx, shape_);
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -877,21 +877,16 @@ struct Shape : public js::gc::Cell
      * from the compartment.
      */
     static inline void readBarrier(const Shape *shape);
 
     static inline ThingRootKind rootKind() { return THING_ROOT_SHAPE; }
 
     inline void markChildren(JSTracer *trc);
 
-    inline Shape *search(JSContext *cx, jsid id) {
-        Shape **_;
-        return search(cx, this, id, &_);
-    }
-
     /* For JIT usage */
     static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
 
   private:
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));
         JS_STATIC_ASSERT(offsetof(Shape, slotInfo) == offsetof(js::shadow::Shape, slotInfo));
         JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Shape::FIXED_SLOTS_SHIFT);
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -62,17 +62,17 @@ Bindings::lookup(JSContext *cx, JSAtom *
     Shape **spp;
     Shape *shape = Shape::search(cx, lastBinding, AtomToId(name), &spp);
     if (!shape)
         return NONE;
 
     if (indexp)
         *indexp = shape->shortid();
 
-    if (shape->setter() == CallObject::setArgOp)
+    if (shape->getter() == CallObject::getArgOp)
         return ARGUMENT;
 
     return shape->writable() ? VARIABLE : CONSTANT;
 }
 
 bool
 Bindings::add(JSContext *cx, HandleAtom name, BindingKind kind)
 {
@@ -97,24 +97,24 @@ Bindings::add(JSContext *cx, HandleAtom 
     uint16_t *indexp;
     PropertyOp getter;
     StrictPropertyOp setter;
     uint32_t slot = CallObject::RESERVED_SLOTS;
 
     if (kind == ARGUMENT) {
         JS_ASSERT(nvars == 0);
         indexp = &nargs;
-        getter = NULL;
+        getter = CallObject::getArgOp;
         setter = CallObject::setArgOp;
         slot += nargs;
     } else {
         JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
 
         indexp = &nvars;
-        getter = NULL;
+        getter = CallObject::getVarOp;
         setter = CallObject::setVarOp;
         if (kind == CONSTANT)
             attrs |= JSPROP_READONLY;
         slot += nargs + nvars;
     }
 
     RootedId id(cx);
     if (!name) {
@@ -203,30 +203,30 @@ Bindings::getLocalNameArray(JSContext *c
     for (unsigned i = 0; i < n; i++)
         names[i].maybeAtom = POISON;
 #endif
 
     for (Shape::Range r = lastBinding->all(); !r.empty(); r.popFront()) {
         const Shape &shape = r.front();
         unsigned index = uint16_t(shape.shortid());
 
-        if (shape.setter() == CallObject::setArgOp) {
+        if (shape.getter() == CallObject::getArgOp) {
             JS_ASSERT(index < nargs);
             names[index].kind = ARGUMENT;
         } else {
             JS_ASSERT(index < nvars);
             index += nargs;
             names[index].kind = shape.writable() ? VARIABLE : CONSTANT;
         }
 
         if (JSID_IS_ATOM(shape.propid())) {
             names[index].maybeAtom = JSID_TO_ATOM(shape.propid());
         } else {
             JS_ASSERT(JSID_IS_INT(shape.propid()));
-            JS_ASSERT(shape.setter() == CallObject::setArgOp);
+            JS_ASSERT(shape.getter() == CallObject::getArgOp);
             names[index].maybeAtom = NULL;
         }
     }
 
 #ifdef DEBUG
     for (unsigned i = 0; i < n; i++)
         JS_ASSERT(names[i].maybeAtom != POISON);
 #endif
@@ -236,17 +236,17 @@ Bindings::getLocalNameArray(JSContext *c
 
 const Shape *
 Bindings::lastArgument() const
 {
     JS_ASSERT(lastBinding);
 
     const js::Shape *shape = lastVariable();
     if (nvars > 0) {
-        while (shape->previous() && shape->setter() != CallObject::setArgOp)
+        while (shape->previous() && shape->getter() != CallObject::getArgOp)
             shape = shape->previous();
     }
     return shape;
 }
 
 const Shape *
 Bindings::lastVariable() const
 {
@@ -599,20 +599,20 @@ js::XDRScript(XDRState<mode> *xdr, JSScr
         if (scriptBits & (1 << SavedCallerFun))
             script->savedCallerFun = true;
         if (scriptBits & (1 << StrictModeCode))
             script->strictModeCode = true;
         if (scriptBits & (1 << ContainsDynamicNameAccess))
             script->bindingsAccessedDynamically = true;
         if (scriptBits & (1 << ArgumentsHasLocalBinding)) {
             PropertyName *arguments = cx->runtime->atomState.argumentsAtom;
-            unsigned local;
-            DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &local);
+            unsigned slot;
+            DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &slot);
             JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
-            script->setArgumentsHasLocalBinding(local);
+            script->setArgumentsHasLocalBinding(slot);
         }
         if (scriptBits & (1 << NeedsArgsObj))
             script->setNeedsArgsObj(true);
         if (scriptBits & (1 << IsGenerator))
             script->isGenerator = true;
     }
 
     JS_STATIC_ASSERT(sizeof(jsbytecode) == 1);
@@ -1304,22 +1304,27 @@ JSScript::NewScriptFromEmitter(JSContext
     }
     script->bindingsAccessedDynamically = bce->sc->bindingsAccessedDynamically();
     script->hasSingletons = bce->hasSingletons;
 #ifdef JS_METHODJIT
     if (cx->compartment->debugMode())
         script->debugMode = true;
 #endif
 
-    if (bce->sc->funArgumentsHasLocalBinding()) {
-        // This must precede the script->bindings.transfer() call below
-        script->setArgumentsHasLocalBinding(bce->sc->argumentsLocal());
-        if (bce->sc->funDefinitelyNeedsArgsObj())        
-            script->setNeedsArgsObj(true);
+    if (bce->sc->inFunction) {
+        if (bce->sc->funArgumentsHasLocalBinding()) {
+            // This must precede the script->bindings.transfer() call below.
+            script->setArgumentsHasLocalBinding(bce->sc->argumentsLocalSlot());
+            if (bce->sc->funDefinitelyNeedsArgsObj())
+                script->setNeedsArgsObj(true);
+        } else {
+            JS_ASSERT(!bce->sc->funDefinitelyNeedsArgsObj());
+        }
     } else {
+        JS_ASSERT(!bce->sc->funArgumentsHasLocalBinding());
         JS_ASSERT(!bce->sc->funDefinitelyNeedsArgsObj());
     }
 
     if (nClosedArgs)
         PodCopy<uint32_t>(script->closedArgs()->vector, &bce->closedArgs[0], nClosedArgs);
     if (nClosedVars)
         PodCopy<uint32_t>(script->closedVars()->vector, &bce->closedVars[0], nClosedVars);
 
@@ -1796,17 +1801,17 @@ js::CloneScript(JSContext *cx, JSScript 
     dst->mainOffset = src->mainOffset;
     dst->natoms = src->natoms;
     dst->setVersion(src->getVersion());
     dst->nfixed = src->nfixed;
     dst->nTypeSets = src->nTypeSets;
     dst->nslots = src->nslots;
     dst->staticLevel = src->staticLevel;
     if (src->argumentsHasLocalBinding()) {
-        dst->setArgumentsHasLocalBinding(src->argumentsLocal());
+        dst->setArgumentsHasLocalBinding(src->argumentsLocalSlot());
         if (src->analyzedArgsUsage())
             dst->setNeedsArgsObj(src->needsArgsObj());
     }
     dst->cloneHasArray(src);
     dst->noScriptRval = src->noScriptRval;
     dst->savedCallerFun = src->savedCallerFun;
     dst->strictModeCode = src->strictModeCode;
     dst->compileAndGo = src->compileAndGo;
@@ -2124,20 +2129,20 @@ JSScript::markChildren(JSTracer *trc)
             BreakpointSite *site = debugScript()->breakpoints[i];
             if (site && site->trapHandler)
                 MarkValue(trc, &site->trapClosure, "trap closure");
         }
     }
 }
 
 void
-JSScript::setArgumentsHasLocalBinding(uint16_t local)
+JSScript::setArgumentsHasLocalBinding(uint16_t slot)
 {
     argsHasLocalBinding_ = true;
-    argsLocal_ = local;
+    argsSlot_ = slot;
     needsArgsAnalysis_ = true;
 }
 
 void
 JSScript::setNeedsArgsObj(bool needsArgsObj)
 {
     JS_ASSERT(!analyzedArgsUsage());
     JS_ASSERT_IF(needsArgsObj, argumentsHasLocalBinding());
@@ -2159,47 +2164,49 @@ JSScript::applySpeculationFailed(JSConte
      * stack that has just now flowed into an apply. In this case, there is
      * nothing to do; GuardFunApplySpeculation will patch in the real argsobj.
      */
     if (script->needsArgsObj())
         return true;
 
     script->needsArgsObj_ = true;
 
-    const unsigned local = script->argumentsLocal();
+    const unsigned slot = script->argumentsLocalSlot();
 
     /*
      * By design, the apply-arguments optimization is only made when there
      * are no outstanding cases of MagicValue(JS_OPTIMIZED_ARGUMENTS) other
      * than this particular invocation of 'f.apply(x, arguments)'. Thus, there
      * are no outstanding values of MagicValue(JS_OPTIMIZED_ARGUMENTS) on the
      * stack. However, there are three things that need fixup:
      *  - there may be any number of activations of this script that don't have
      *    an argsObj that now need one.
      *  - jit code compiled (and possible active on the stack) with the static
      *    assumption of !script->needsArgsObj();
      *  - type inference data for the script assuming script->needsArgsObj; and
      */
     for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) {
         StackFrame *fp = i.fp();
         if (fp->isFunctionFrame() && fp->script() == script) {
-            ArgumentsObject *argsobj = ArgumentsObject::createExpected(cx, fp);
-            if (!argsobj) {
-                /*
-                 * We can't leave stack frames with script->needsArgsObj but no
-                 * arguments object. It is, however, safe to leave frames with
-                 * an arguments object but !script->needsArgsObj.
-                 */
-                script->needsArgsObj_ = false;
-                return false;
+            if (!fp->hasArgsObj()) {
+                ArgumentsObject *obj = ArgumentsObject::create(cx, fp);
+                if (!obj) {
+                    /*
+                     * We can't leave stack frames where script->needsArgsObj
+                     * and !fp->hasArgsObj. It is, however, safe to leave frames
+                     * where fp->hasArgsObj and !fp->script->needsArgsObj.
+                     */
+                    script->needsArgsObj_ = false;
+                    return false;
+                }
+
+                /* Note: 'arguments' may have already been overwritten. */
+                if (fp->localSlot(slot).isMagic(JS_OPTIMIZED_ARGUMENTS))
+                    fp->localSlot(slot) = ObjectValue(*obj);
             }
-
-            /* Note: 'arguments' may have already been overwritten. */
-            if (fp->unaliasedLocal(local).isMagic(JS_OPTIMIZED_ARGUMENTS))
-                fp->unaliasedLocal(local) = ObjectValue(*argsobj);
         }
     }
 
 #ifdef JS_METHODJIT
     if (script->hasJITInfo()) {
         mjit::Recompiler::clearStackReferences(cx->runtime->defaultFreeOp(), script);
         mjit::ReleaseScriptCode(cx->runtime->defaultFreeOp(), script);
     }
@@ -2208,16 +2215,17 @@ JSScript::applySpeculationFailed(JSConte
     if (script->hasAnalysis() && script->analysis()->ranInference()) {
         types::AutoEnterTypeInference enter(cx);
         types::TypeScript::MonitorUnknown(cx, script, script->argumentsBytecode());
     }
 
     return true;
 }
 
+#ifdef DEBUG
 bool
 JSScript::varIsAliased(unsigned varSlot)
 {
     if (bindingsAccessedDynamically)
         return true;
 
     for (uint32_t i = 0; i < numClosedVars(); ++i) {
         if (closedVars()->vector[i] == varSlot) {
@@ -2251,8 +2259,9 @@ JSScript::formalLivesInCallObject(unsign
         if (closedArgs()->vector[i] == argSlot) {
             JS_ASSERT(function()->isHeavyweight());
             return true;
         }
     }
 
     return false;
 }
+#endif
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -109,22 +109,22 @@ class Bindings
     uint16_t numArgs() const { return nargs; }
     uint16_t numVars() const { return nvars; }
     unsigned count() const { return nargs + nvars; }
 
     /*
      * These functions map between argument/var indices [0, nargs/nvars) and
      * and Bindings indices [0, nargs + nvars).
      */
-    bool slotIsArg(uint16_t i) const { return i < nargs; }
-    bool slotIsLocal(uint16_t i) const { return i >= nargs; }
-    uint16_t argToSlot(uint16_t i) { JS_ASSERT(i < nargs); return i; }
-    uint16_t localToSlot(uint16_t i) { return i + nargs; }
-    uint16_t slotToArg(uint16_t i) { JS_ASSERT(slotIsArg(i)); return i; }
-    uint16_t slotToLocal(uint16_t i) { JS_ASSERT(slotIsLocal(i)); return i - nargs; }
+    bool bindingIsArg(uint16_t i) const { return i < nargs; }
+    bool bindingIsLocal(uint16_t i) const { return i >= nargs; }
+    uint16_t argToBinding(uint16_t i) { JS_ASSERT(i < nargs); return i; }
+    uint16_t localToBinding(uint16_t i) { return i + nargs; }
+    uint16_t bindingToArg(uint16_t i) { JS_ASSERT(bindingIsArg(i)); return i; }
+    uint16_t bindingToLocal(uint16_t i) { JS_ASSERT(bindingIsLocal(i)); return i - nargs; }
 
     /* Ensure these bindings have a shape lineage. */
     inline bool ensureShape(JSContext *cx);
 
     /* Return the shape lineage generated for these bindings. */
     inline Shape *lastShape() const;
 
     /*
@@ -488,17 +488,17 @@ struct JSScript : public js::gc::Cell
 
     uint16_t        nTypeSets;  /* number of type sets used in this script for
                                    dynamic type monitoring */
 
     uint16_t        nslots;     /* vars plus maximum stack depth */
     uint16_t        staticLevel;/* static level for display maintenance */
 
   private:
-    uint16_t        argsLocal_; /* local holding 'arguments' (if argumentsHasLocalBindings) */
+    uint16_t        argsSlot_;  /* slot holding 'arguments' (if argumentsHasLocalBindings) */
 
     // 8-bit fields.
 
   public:
     // The kinds of the optional arrays.
     enum ArrayKind {
         CONSTS,
         OBJECTS,
@@ -583,18 +583,18 @@ struct JSScript : public js::gc::Cell
                                JSVersion version);
     static JSScript *NewScriptFromEmitter(JSContext *cx, js::BytecodeEmitter *bce);
 
     void setVersion(JSVersion v) { version = v; }
 
     /* See ContextFlags::funArgumentsHasLocalBinding comment. */
     bool argumentsHasLocalBinding() const { return argsHasLocalBinding_; }
     jsbytecode *argumentsBytecode() const { JS_ASSERT(code[0] == JSOP_ARGUMENTS); return code; }
-    unsigned argumentsLocal() const { JS_ASSERT(argsHasLocalBinding_); return argsLocal_; }
-    void setArgumentsHasLocalBinding(uint16_t local);
+    unsigned argumentsLocalSlot() const { JS_ASSERT(argsHasLocalBinding_); return argsSlot_; }
+    void setArgumentsHasLocalBinding(uint16_t slot);
 
     /*
      * As an optimization, even when argsHasLocalBinding, the function prologue
      * may not need to create an arguments object. This is determined by
      * needsArgsObj which is set by ScriptAnalysis::analyzeSSA before running
      * the script the first time. When !needsArgsObj, the prologue may simply
      * write MagicValue(JS_OPTIMIZED_ARGUMENTS) to 'arguments's slot and any
      * uses of 'arguments' will be guaranteed to handle this magic value.
@@ -856,21 +856,22 @@ struct JSScript : public js::gc::Cell
 
     uint32_t getClosedVar(uint32_t index) {
         js::ClosedSlotArray *arr = closedVars();
         JS_ASSERT(index < arr->length);
         return arr->vector[index];
     }
 
 
+#ifdef DEBUG
     bool varIsAliased(unsigned varSlot);
     bool formalIsAliased(unsigned argSlot);
     bool formalLivesInArgumentsObject(unsigned argSlot);
     bool formalLivesInCallObject(unsigned argSlot);
-
+#endif
   private:
     /*
      * Recompile with or without single-stepping support, as directed
      * by stepModeEnabled().
      */
     void recompileForStepMode(js::FreeOp *fop);
 
     /* Attempt to change this->stepMode to |newValue|. */
@@ -938,16 +939,22 @@ struct JSScript : public js::gc::Cell
     void markChildren(JSTracer *trc);
 };
 
 JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
 
 /* If this fails, add/remove padding within JSScript. */
 JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
 
+static JS_INLINE unsigned
+StackDepth(JSScript *script)
+{
+    return script->nslots - script->nfixed;
+}
+
 /*
  * New-script-hook calling is factored from NewScriptFromEmitter so that it
  * and callers of XDRScript can share this code.  In the case of callers
  * of XDRScript, the hook should be invoked only after successful decode
  * of any owning function (the fun parameter) or script object (null fun).
  */
 extern JS_FRIEND_API(void)
 js_CallNewScriptHook(JSContext *cx, JSScript *script, JSFunction *fun);
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -56,18 +56,18 @@ Bindings::lastShape() const
     JS_ASSERT(!lastBinding->inDictionary());
     return lastBinding;
 }
 
 Shape *
 Bindings::initialShape(JSContext *cx) const
 {
     /* Get an allocation kind to match an empty call object. */
-    gc::AllocKind kind = gc::FINALIZE_OBJECT2_BACKGROUND;
-    JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS);
+    gc::AllocKind kind = gc::FINALIZE_OBJECT4;
+    JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS + 1);
 
     return EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind,
                                        BaseShape::VAROBJ);
 }
 
 bool
 Bindings::ensureShape(JSContext *cx)
 {
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -2210,23 +2210,27 @@ LambdaIsGetElem(JSObject &lambda, JSCont
     GET_NAME_FROM_BYTECODE(script, pc, 0, bname);
     pc += JSOP_NAME_LENGTH;
 
     /*
      * Do a conservative search for 'b' in the enclosing scope. Avoid using a
      * real name lookup since this can trigger observable effects.
      */
     Value b;
-    RootedObject scope(cx);
-    scope = cx->stack.currentScriptedScopeChain();
+    JSObject *scope = cx->stack.currentScriptedScopeChain();
     while (true) {
-        if (!scope->isCall() && !scope->isBlock())
+        if (scope->isCall()) {
+            if (scope->asCall().containsVarOrArg(bname, &b, cx))
+                break;
+        } else if (scope->isBlock()) {
+            if (scope->asClonedBlock().containsVar(bname, &b, cx))
+                break;
+        } else {
             return NULL;
-        if (HasDataProperty(cx, scope, bname, &b))
-            break;
+        }
         scope = &scope->asScope().enclosingScope();
     }
 
     /* Look for 'a' to be the lambda's first argument. */
     if (JSOp(*pc) != JSOP_GETARG || GET_SLOTNO(pc) != 0)
         return NULL;
     pc += JSOP_GETARG_LENGTH;
 
--- a/js/src/jsval.h
+++ b/js/src/jsval.h
@@ -210,21 +210,20 @@ typedef enum JSWhyMagic
                                   * enumerated like a native object. */
     JS_NO_ITER_VALUE,            /* there is not a pending iterator value */
     JS_GENERATOR_CLOSING,        /* exception value thrown when closing a generator */
     JS_NO_CONSTANT,              /* compiler sentinel value */
     JS_THIS_POISON,              /* used in debug builds to catch tracing errors */
     JS_ARG_POISON,               /* used in debug builds to catch tracing errors */
     JS_SERIALIZE_NO_NODE,        /* an empty subnode in the AST serializer */
     JS_LAZY_ARGUMENTS,           /* lazy arguments value on the stack */
+    JS_UNASSIGNED_ARGUMENTS,     /* the initial value of callobj.arguments */
     JS_OPTIMIZED_ARGUMENTS,      /* optimized-away 'arguments' value */
     JS_IS_CONSTRUCTING,          /* magic value passed to natives to indicate construction */
     JS_OVERWRITTEN_CALLEE,       /* arguments.callee has been overwritten */
-    JS_FORWARD_TO_CALL_OBJECT,   /* args object element stored in call object */
-    JS_BLOCK_NEEDS_CLONE,        /* value of static block object slot */
     JS_GENERIC_MAGIC             /* for local use */
 } JSWhyMagic;
 
 #if defined(IS_LITTLE_ENDIAN)
 # if JS_BITS_PER_WORD == 32
 typedef union jsval_layout
 {
     uint64_t asBits;
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -1072,32 +1072,70 @@ mjit::Compiler::generatePrologue()
         {
             stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
             OOL_STUBCALL(stubs::HitStackQuota, REJOIN_NONE);
             stubcc.crossJump(stubcc.masm.jump(), masm.label());
         }
 
         markUndefinedLocals();
 
+        types::TypeScriptNesting *nesting = script->nesting();
+
         /*
-         * Load the scope chain into the frame if it will be needed by NAME
-         * opcodes or by the nesting prologue below. The scope chain is always
-         * set for global and eval frames, and will have been set by
-         * HeavyweightFunctionPrologue for heavyweight function frames.
+         * Run the function prologue if necessary. This is always done in a
+         * stub for heavyweight functions (including nesting outer functions).
          */
-        if (!script->function()->isHeavyweight() &&
-            (analysis->usesScopeChain() || script->nesting()))
-        {
-            RegisterID t0 = Registers::ReturnReg;
-            Jump hasScope = masm.branchTest32(Assembler::NonZero,
-                                              FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
-            masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
-            masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
-            masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
-            hasScope.linkTo(masm.label(), &masm);
+        JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
+        if (script->function()->isHeavyweight()) {
+            prepareStubCall(Uses(0));
+            INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
+        } else {
+            /*
+             * Load the scope chain into the frame if it will be needed by NAME
+             * opcodes or by the nesting prologue below. The scope chain is
+             * always set for global and eval frames, and will have been set by
+             * CreateFunCallObject for heavyweight function frames.
+             */
+            if (analysis->usesScopeChain() || nesting) {
+                RegisterID t0 = Registers::ReturnReg;
+                Jump hasScope = masm.branchTest32(Assembler::NonZero,
+                                                  FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
+                masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
+                masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
+                masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
+                hasScope.linkTo(masm.label(), &masm);
+            }
+
+            if (nesting) {
+                /*
+                 * Inline the common case for the nesting prologue: the
+                 * function is a non-heavyweight inner function with no
+                 * children of its own. We ensure during inference that the
+                 * outer function does not add scope objects for 'let' or
+                 * 'with', so that the frame's scope chain will be
+                 * the parent's call object, and if it differs from the
+                 * parent's current activation then the parent is reentrant.
+                 */
+                JSScript *parent = nesting->parent;
+                JS_ASSERT(parent);
+                JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
+                             !parent->analysis()->addsScopeObjects());
+
+                RegisterID t0 = Registers::ReturnReg;
+                masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
+                masm.loadPtr(Address(t0), t0);
+
+                Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
+                Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
+                masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
+
+                stubcc.linkExitDirect(mismatch, stubcc.masm.label());
+                OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
+                stubcc.crossJump(stubcc.masm.jump(), masm.label());
+            }
         }
 
         /*
          * When 'arguments' is used in the script, it may be optimized away
          * which involves reading from the stack frame directly, including
          * fp->u.nactual. fp->u.nactual is only set when numActual != numFormal,
          * so store 'fp->u.nactual = numFormal' when there is no over/underflow.
          */
@@ -1118,56 +1156,19 @@ mjit::Compiler::generatePrologue()
         if (script->function()) {
             prepareStubCall(Uses(0));
             INLINE_STUBCALL(stubs::AssertArgumentTypes, REJOIN_NONE);
         }
 #endif
         ensureDoubleArguments();
     }
 
-    /* Inline StackFrame::prologue. */
-    if (script->isActiveEval && script->strictModeCode) {
-        prepareStubCall(Uses(0));
-        INLINE_STUBCALL(stubs::StrictEvalPrologue, REJOIN_EVAL_PROLOGUE);
-    } else if (script->function()) {
-        if (script->function()->isHeavyweight()) {
-            prepareStubCall(Uses(0));
-            INLINE_STUBCALL(stubs::HeavyweightFunctionPrologue, REJOIN_FUNCTION_PROLOGUE);
-        } else if (types::TypeScriptNesting *nesting = script->nesting()) {
-            /*
-             * Inline the common case for the nesting prologue: the
-             * function is a non-heavyweight inner function with no
-             * children of its own. We ensure during inference that the
-             * outer function does not add scope objects for 'let' or
-             * 'with', so that the frame's scope chain will be
-             * the parent's call object, and if it differs from the
-             * parent's current activation then the parent is reentrant.
-             */
-            JSScript *parent = nesting->parent;
-            JS_ASSERT(parent);
-            JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
-                         !parent->analysis()->addsScopeObjects());
-
-            RegisterID t0 = Registers::ReturnReg;
-            masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
-            masm.loadPtr(Address(t0), t0);
-
-            Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
-            Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
-            masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
-
-            stubcc.linkExitDirect(mismatch, stubcc.masm.label());
-            OOL_STUBCALL(stubs::TypeNestingPrologue, REJOIN_FUNCTION_PROLOGUE);
-            stubcc.crossJump(stubcc.masm.jump(), masm.label());
-        }
-
-        if (isConstructing) {
-            if (!constructThis())
-                return Compile_Error;
-        }
+    if (isConstructing) {
+        if (!constructThis())
+            return Compile_Error;
     }
 
     if (debugMode()) {
         prepareStubCall(Uses(0));
         INLINE_STUBCALL(stubs::ScriptDebugPrologue, REJOIN_RESUME);
     } else if (Probes::callTrackingActive(cx)) {
         prepareStubCall(Uses(0));
         INLINE_STUBCALL(stubs::ScriptProbeOnlyPrologue, REJOIN_RESUME);
@@ -1203,18 +1204,18 @@ mjit::Compiler::markUndefinedLocal(uint3
             masm.storeValue(UndefinedValue(), local);
     }
 }
 
 void
 mjit::Compiler::markUndefinedLocals()
 {
     /*
-     * Set locals to undefined. Skip locals which aren't closed and are known
-     * to be defined before used,
+     * Set locals to undefined, as in initCallFrameLatePrologue.
+     * Skip locals which aren't closed and are known to be defined before used,
      */
     for (uint32_t i = 0; i < script->nfixed; i++)
         markUndefinedLocal(0, i);
 }
 
 CompileStatus
 mjit::Compiler::generateEpilogue()
 {
@@ -2778,92 +2779,108 @@ mjit::Compiler::generateMethod()
 
           BEGIN_CASE(JSOP_GETARG)
           BEGIN_CASE(JSOP_CALLARG)
           {
             restoreVarType();
             uint32_t arg = GET_SLOTNO(PC);
             if (JSObject *singleton = pushedSingleton(0))
                 frame.push(ObjectValue(*singleton));
-            else if (script->argsObjAliasesFormals())
-                jsop_aliasedArg(arg, /* get = */ true);
             else
                 frame.pushArg(arg);
           }
           END_CASE(JSOP_GETARG)
 
           BEGIN_CASE(JSOP_BINDGNAME)
             jsop_bindgname();
           END_CASE(JSOP_BINDGNAME)
 
           BEGIN_CASE(JSOP_SETARG)
           {
             jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
             bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
-
-            uint32_t arg = GET_SLOTNO(PC);
-            if (script->argsObjAliasesFormals())
-                jsop_aliasedArg(arg, /* get = */ false, pop);
-            else
-                frame.storeArg(arg, pop);
-
+            frame.storeArg(GET_SLOTNO(PC), pop);
             updateVarType();
 
             if (pop) {
                 frame.pop();
                 PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
                 break;
             }
           }
           END_CASE(JSOP_SETARG)
 
-          BEGIN_CASE(JSOP_GETLOCAL)
-          BEGIN_CASE(JSOP_CALLLOCAL)
           BEGIN_CASE(JSOP_GETALIASEDVAR)
           BEGIN_CASE(JSOP_CALLALIASEDVAR)
           {
+            /* This is all temporary until bug 659577. */
+            if (JSObject *singleton = pushedSingleton(0)) {
+                frame.push(ObjectValue(*singleton));
+            } else {
+                ScopeCoordinate sc = ScopeCoordinate(PC);
+                if (script->bindings.bindingIsArg(sc.binding))
+                    frame.pushArg(script->bindings.bindingToArg(sc.binding));
+                else
+                    frame.pushLocal(script->bindings.bindingToLocal(sc.binding));
+            }
+          }
+          END_CASE(JSOP_GETALIASEDVAR)
+
+          BEGIN_CASE(JSOP_GETLOCAL)
+          BEGIN_CASE(JSOP_CALLLOCAL)
+          {
+
             /*
              * Update the var type unless we are about to pop the variable.
              * Sync is not guaranteed for types of dead locals, and GETLOCAL
              * followed by POP is not regarded as a use of the variable.
              */
             jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
             if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
                 restoreVarType();
+            uint32_t slot = GET_SLOTNO(PC);
             if (JSObject *singleton = pushedSingleton(0))
                 frame.push(ObjectValue(*singleton));
-            else if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
-                jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ true);
             else
-                frame.pushLocal(GET_SLOTNO(PC));
-
-            PC += GetBytecodeLength(PC);
-            break;
+                frame.pushLocal(slot);
           }
           END_CASE(JSOP_GETLOCAL)
 
-          BEGIN_CASE(JSOP_SETLOCAL)
           BEGIN_CASE(JSOP_SETALIASEDVAR)
           {
-            jsbytecode *next = &PC[GetBytecodeLength(PC)];
+            /* This is all temporary until bug 659577. */
+            jsbytecode *next = &PC[JSOP_SETALIASEDVAR_LENGTH];
             bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
-            if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
-                jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ false, pop);
+            ScopeCoordinate sc = ScopeCoordinate(PC);
+            if (script->bindings.bindingIsArg(sc.binding))
+                frame.storeArg(script->bindings.bindingToArg(sc.binding), pop);
             else
-                frame.storeLocal(GET_SLOTNO(PC), pop);
+                frame.storeLocal(script->bindings.bindingToLocal(sc.binding), pop);
             updateVarType();
 
             if (pop) {
                 frame.pop();
-                PC = next + JSOP_POP_LENGTH;
+                PC += JSOP_SETALIASEDVAR_LENGTH + JSOP_POP_LENGTH;
                 break;
             }
-
-            PC = next;
-            break;
+          }
+          END_CASE(JSOP_SETALIASEDVAR)
+
+          BEGIN_CASE(JSOP_SETLOCAL)
+          {
+            jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
+            bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
+            frame.storeLocal(GET_SLOTNO(PC), pop);
+            updateVarType();
+
+            if (pop) {
+                frame.pop();
+                PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
+                break;
+            }
           }
           END_CASE(JSOP_SETLOCAL)
 
           BEGIN_CASE(JSOP_UINT16)
             frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
           END_CASE(JSOP_UINT16)
 
           BEGIN_CASE(JSOP_NEWINIT)
@@ -3751,22 +3768,57 @@ mjit::Compiler::emitReturn(FrameEntry *f
         if (!endOfScript)
             a->returnJumps->append(masm.jump());
 
         if (a->returnSet)
             frame.freeReg(a->returnRegister);
         return;
     }
 
-    /* Inline StackFrame::epilogue. */
-    if (debugMode()) {
-        prepareStubCall(Uses(0));
-        INLINE_STUBCALL(stubs::Epilogue, REJOIN_NONE);
-    } else if (script->function() && script->nesting()) {
-        masm.sub32(Imm32(1), AbsoluteAddress(&script->nesting()->activeFrames));
+    /*
+     * Outside the mjit, activation objects (call objects and arguments objects) are put
+     * by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
+     * popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
+     * responsible for pushing/popping the initial frame. However, an mjit function
+     * epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
+     * puts activation objects. And furthermore, if the last mjit frame throws, the mjit
+     * does *not* put the activation objects. So we can't assume any particular state of
+     * puttedness upon exit from the mjit.
+     *
+     * To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
+     * entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
+     * been put.
+     */
+    if (script->function()) {
+        types::TypeScriptNesting *nesting = script->nesting();
+        if (script->function()->isHeavyweight() || script->needsArgsObj() ||
+            (nesting && nesting->children) || debugMode())
+        {
+            prepareStubCall(Uses(fe ? 1 : 0));
+            INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
+        } else {
+            /* if hasCallObj() */
+            Jump putObjs = masm.branchTest32(Assembler::NonZero,
+                                             Address(JSFrameReg, StackFrame::offsetOfFlags()),
+                                             Imm32(StackFrame::HAS_CALL_OBJ));
+            stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
+
+            stubcc.leave();
+            OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
+
+            emitReturnValue(&stubcc.masm, fe);
+            emitFinalReturn(stubcc.masm);
+
+            /*
+             * Do frame count balancing inline for inner functions in a nesting
+             * with no children of their own.
+             */
+            if (nesting)
+                masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
+        }
     }
 
     emitReturnValue(&masm, fe);
     emitFinalReturn(masm);
 
     /*
      * After we've placed the call object, all tracked state can be
      * thrown away. This will happen anyway because the next live opcode (if
@@ -5642,17 +5694,17 @@ mjit::Compiler::jsop_bindname(PropertyNa
      * If this is a BINDNAME for a variable of a non-reentrant outer function,
      * the object is definitely the outer function's active call object.
      */
     if (cx->typeInferenceEnabled()) {
         ScriptAnalysis::NameAccess access =
             analysis->resolveNameAccess(cx, NameToId(name), true);
         if (access.nesting) {
             RegisterID reg = frame.allocReg();
-            CallObject **pobj = &access.nesting->activeCall;
+            JSObject **pobj = &access.nesting->activeCall;
             masm.move(ImmPtr(pobj), reg);
             masm.loadPtr(Address(reg), reg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
             return;
         }
     }
 
     PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC));
@@ -5754,90 +5806,16 @@ mjit::Compiler::jsop_bindname(PropertyNa
 
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
 
     stubcc.rejoin(Changes(1));
 }
 #endif
 
 void
-mjit::Compiler::jsop_aliasedArg(unsigned arg, bool get, bool poppedAfter)
-{
-    RegisterID reg = frame.allocReg();
-    masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfArgsObj()), reg);
-    size_t dataOff = ArgumentsObject::getDataSlotOffset();
-    masm.loadPrivate(Address(reg, dataOff), reg);
-    int32_t argsOff = ArgumentsData::offsetOfArgs() + arg * sizeof(Value);
-    masm.addPtr(Imm32(argsOff), reg, reg);
-    if (get) {
-        FrameEntry *fe = frame.getArg(arg);
-        JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
-        frame.push(Address(reg), type, true /* = reuseBase */);
-    } else {
-        frame.storeTo(frame.peek(-1), Address(reg), poppedAfter);
-        frame.freeReg(reg);
-    }
-}
-
-void
-mjit::Compiler::jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter)
-{
-    RegisterID reg = frame.allocReg();
-    masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), reg);
-    for (unsigned i = 0; i < sc.hops; i++)
-        masm.loadPayload(Address(reg, ScopeObject::offsetOfEnclosingScope()), reg);
-
-    unsigned slot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
-
-    /*
-     * TODO bug 753158: Call and Block objects should use the same layout
-     * strategy: up to the maximum numFixedSlots and overflow (if any) in
-     * dynamic slots. For now, we special case for different layouts:
-     */
-    Address addr;
-    if (ScopeCoordinateBlockChain(script, PC)) {
-        /*
-         * Block objects use a fixed AllocKind which means an invariant number
-         * of fixed slots. Any slot below the fixed slot count is inline, any
-         * slot over is in the dynamic slots.
-         */
-        uint32_t nfixed = gc::GetGCKindSlots(BlockObject::FINALIZE_KIND);
-        if (nfixed <= slot) {
-            masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
-            addr = Address(reg, (slot - nfixed) * sizeof(Value));
-        } else {
-            addr = Address(reg, JSObject::getFixedSlotOffset(slot));
-        }
-    } else {
-        /*
-         * Using special-case hackery in Shape::getChildBinding, CallObject
-         * slots are either altogether in fixed slots or altogether in dynamic
-         * slots (by having numFixed == RESERVED_SLOTS).
-         */
-        if (script->bindings.lastShape()->numFixedSlots() <= slot) {
-            masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
-            addr = Address(reg, sc.slot * sizeof(Value));
-        } else {
-            addr = Address(reg, JSObject::getFixedSlotOffset(slot));
-        }
-    }
-
-    if (get) {
-        FrameEntry *fe = script->bindings.slotIsLocal(sc.slot)
-                         ? frame.getLocal(script->bindings.slotToLocal(sc.slot))
-                         : frame.getArg(script->bindings.slotToArg(sc.slot));
-        JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
-        frame.push(addr, type, true /* = reuseBase */);
-    } else {
-        frame.storeTo(frame.peek(-1), addr, poppedAfter);
-        frame.freeReg(reg);
-    }
-}
-
-void
 mjit::Compiler::jsop_this()
 {
     frame.pushThis();
 
     /*
      * In strict mode code, we don't wrap 'this'.
      * In direct-call eval code, we wrapped 'this' before entering the eval.
      * In global code, 'this' is always an object.
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -621,18 +621,18 @@ private:
                     Jump *slow = NULL, bool *trampoline = NULL,
                     bool fallthrough = false);
     bool startLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget);
     bool finishLoop(jsbytecode *head);
     inline bool shouldStartLoop(jsbytecode *head);
     void jsop_bindname(PropertyName *name);
     void jsop_setglobal(uint32_t index);
     void jsop_getprop_slow(PropertyName *name, bool forPrototype = false);
-    void jsop_aliasedArg(unsigned i, bool get, bool poppedAfter = false);
-    void jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter = false);
+    void jsop_getarg(uint32_t slot);
+    void jsop_setarg(uint32_t slot, bool popped);
     void jsop_this();
     void emitReturn(FrameEntry *fe);
     void emitFinalReturn(Assembler &masm);
     void loadReturnValue(Assembler *masm, FrameEntry *fe);
     void emitReturnValue(Assembler *masm, FrameEntry *fe);
     void emitInlineReturnValue(FrameEntry *fe);
     void dispatchCall(VoidPtrStubUInt32 stub, uint32_t argc);
     void interruptCheckHelper();
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -57,17 +57,17 @@ FindExceptionHandler(JSContext *cx)
 
             /*
              * Set pc to the first bytecode after the the try note to point
              * to the beginning of catch or finally or to [enditer] closing
              * the for-in loop.
              */
             jsbytecode *pc = script->main() + tn->start + tn->length;
             cx->regs().pc = pc;
-            cx->regs().sp = cx->regs().spForStackDepth(tn->stackDepth);
+            cx->regs().sp = fp->base() + tn->stackDepth;
 
             switch (tn->kind) {
                 case JSTRY_CATCH:
                   JS_ASSERT(JSOp(*pc) == JSOP_ENTERBLOCK);
 
 #if JS_HAS_GENERATORS
                   /* Catch cannot intercept the closing of a generator. */
                   if (JS_UNLIKELY(cx->getPendingException().isMagic(JS_GENERATOR_CLOSING)))
@@ -114,16 +114,32 @@ FindExceptionHandler(JSContext *cx)
     }
 
     return NULL;
 }
 
 /*
  * Clean up a frame and return.
  */
+static void
+InlineReturn(VMFrame &f)
+{
+    JS_ASSERT(f.fp() != f.entryfp);
+    AssertValidFunctionScopeChainAtExit(f.fp());
+
+    f.cx->stack.popInlineFrame(f.regs);
+
+    DebugOnly<JSOp> op = JSOp(*f.regs.pc);
+    JS_ASSERT(op == JSOP_CALL ||
+              op == JSOP_NEW ||
+              op == JSOP_EVAL ||
+              op == JSOP_FUNCALL ||
+              op == JSOP_FUNAPPLY);
+    f.regs.pc += JSOP_CALL_LENGTH;
+}
 
 void JS_FASTCALL
 stubs::SlowCall(VMFrame &f, uint32_t argc)
 {
     if (*f.regs.pc == JSOP_FUNAPPLY && !GuardFunApplySpeculation(f.cx, f.regs))
         THROW();
 
     CallArgs args = CallArgsFromSp(argc, f.regs.sp);
@@ -141,17 +157,17 @@ stubs::SlowNew(VMFrame &f, uint32_t argc
         THROW();
 
     types::TypeScript::Monitor(f.cx, f.script(), f.pc(), args.rval());
 }
 
 static inline bool
 CheckStackQuota(VMFrame &f)
 {
-    JS_ASSERT(f.regs.stackDepth() == 0);
+    JS_ASSERT(f.regs.sp == f.fp()->base());
 
     f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR);
     if (f.stackLimit)
         return true;
 
     /* Remove the current partially-constructed frame before throwing. */
     f.cx->stack.popFrameAfterOverflow();
     js_ReportOverRecursed(f.cx);
@@ -285,16 +301,20 @@ UncachedInlineCall(VMFrame &f, InitialFr
 
     /* Get pointer to new frame/slots, prepare arguments. */
     if (!cx->stack.pushInlineFrame(cx, regs, args, *newfun, newscript, initial, &f.stackLimit))
         return false;
 
     /* Finish the handoff to the new frame regs. */
     PreserveRegsGuard regsGuard(cx, regs);
 
+    /* Scope with a call object parented by callee's parent. */
+    if (!regs.fp()->functionPrologue(cx))
+        return false;
+
     /*
      * If newscript was successfully compiled, run it. Skip for calls which
      * will be constructing a new type object for 'this'.
      */
     if (!newType) {
         if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing(), cx->compartment->needsBarrier())) {
             if (jit->invokeEntry) {
                 *pret = jit->invokeEntry;
@@ -518,32 +538,26 @@ js_InternalThrow(VMFrame &f)
             // This can turn a throw or error into a healthy return. Note that
             // we will run ScriptDebugEpilogue again (from AnyFrameEpilogue);
             // ScriptDebugEpilogue is prepared for this eventuality.
             if (js::ScriptDebugEpilogue(cx, f.fp(), false))
                 return cx->jaegerRuntime().forceReturnFromExternC();
         }
 
 
-        f.fp()->epilogue(f.cx);
+        ScriptEpilogue(f.cx, f.fp(), false);
 
         // Don't remove the last frame, this is the responsibility of
         // JaegerShot()'s caller. We only guarantee that ScriptEpilogue()
         // has been run.
         if (f.entryfp == f.fp())
             break;
 
-        f.cx->stack.popInlineFrame(f.regs);
-        DebugOnly<JSOp> op = JSOp(*f.regs.pc);
-        JS_ASSERT(op == JSOP_CALL ||
-                  op == JSOP_NEW ||
-                  op == JSOP_EVAL ||
-                  op == JSOP_FUNCALL ||
-                  op == JSOP_FUNAPPLY);
-        f.regs.pc += JSOP_CALL_LENGTH;
+        JS_ASSERT(&cx->regs() == &f.regs);
+        InlineReturn(f);
     }
 
     JS_ASSERT(&cx->regs() == &f.regs);
 
     if (!pc)
         return NULL;
 
     StackFrame *fp = cx->fp();
@@ -596,17 +610,17 @@ void JS_FASTCALL
 stubs::CreateThis(VMFrame &f, JSObject *proto)
 {
     JSContext *cx = f.cx;
     StackFrame *fp = f.fp();
     RootedObject callee(cx, &fp->callee());
     JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
     if (!obj)
         THROW();
-    fp->thisValue() = ObjectValue(*obj);
+    fp->formalArgs()[-1].setObject(*obj);
 }
 
 void JS_FASTCALL
 stubs::ScriptDebugPrologue(VMFrame &f)
 {
     Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script());
     JSTrapStatus status = js::ScriptDebugPrologue(f.cx, f.fp());
     switch (status) {
@@ -687,35 +701,33 @@ FinishVarIncOp(VMFrame &f, RejoinState r
     JSOp op = JSOp(*f.pc());
     JS_ASSERT(op == JSOP_LOCALINC || op == JSOP_INCLOCAL ||
               op == JSOP_LOCALDEC || op == JSOP_DECLOCAL ||
               op == JSOP_ARGINC || op == JSOP_INCARG ||
               op == JSOP_ARGDEC || op == JSOP_DECARG);
     const JSCodeSpec *cs = &js_CodeSpec[op];
 
     unsigned i = GET_SLOTNO(f.pc());
-    Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL)
-                 ? &f.fp()->unaliasedLocal(i)
-                 : &f.fp()->unaliasedFormal(i);
+    Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i);
 
     if (rejoin == REJOIN_POS) {
         double d = ov.toNumber();
         double N = (cs->format & JOF_INC) ? 1 : -1;
         if (!nv.setNumber(d + N))
             types::TypeScript::MonitorOverflow(cx, f.script(), f.pc());
     }
 
     *var = nv;
     *vp = (cs->format & JOF_POST) ? ov : nv;
 }
 
 extern "C" void *
 js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f)
 {
-    FrameRejoinState jsrejoin = f.fp()->rejoin();
+    JSRejoinState jsrejoin = f.fp()->rejoin();
     RejoinState rejoin;
     if (jsrejoin & 0x1) {
         /* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */
         uint32_t pcOffset = jsrejoin >> 1;
         f.regs.pc = f.fp()->script()->code + pcOffset;
         f.regs.clearInlined();
         rejoin = REJOIN_SCRIPTED;
     } else {
@@ -740,22 +752,22 @@ js_InternalInterpret(void *returnData, v
     analyze::ScriptAnalysis *analysis = script->analysis();
 
     /*
      * f.regs.sp is not normally maintained by stubs (except for call prologues
      * where it indicates the new frame), so is not expected to be coherent
      * here. Update it to its value at the start of the opcode.
      */
     Value *oldsp = f.regs.sp;
-    f.regs.sp = f.regs.spForStackDepth(analysis->getCode(pc).stackDepth);
+    f.regs.sp = fp->base() + analysis->getCode(pc).stackDepth;
 
     jsbytecode *nextpc = pc + GetBytecodeLength(pc);
     Value *nextsp = NULL;
     if (nextpc != script->code + script->length && analysis->maybeCode(nextpc))
-        nextsp = f.regs.spForStackDepth(analysis->getCode(nextpc).stackDepth);
+        nextsp = fp->base() + analysis->getCode(nextpc).stackDepth;
 
     JS_ASSERT(&cx->regs() == &f.regs);
 
 #ifdef JS_METHODJIT_SPEW
     JaegerSpew(JSpew_Recompile, "interpreter rejoin (file \"%s\") (line \"%d\") (op %s) (opline \"%d\")\n",
                script->filename, script->lineno, OpcodeNames[op], PCToLineNumber(script, pc));
 #endif
 
@@ -850,23 +862,28 @@ js_InternalInterpret(void *returnData, v
         f.regs.pc = nextpc;
         break;
 
       case REJOIN_PUSH_OBJECT:
         nextsp[-1].setObject(* (JSObject *) returnReg);
         f.regs.pc = nextpc;
         break;
 
+      case REJOIN_DEFLOCALFUN:
+        fp->slots()[GET_SLOTNO(pc)].setObject(* (JSObject *) returnReg);
+        f.regs.pc = nextpc;
+        break;
+
       case REJOIN_THIS_PROTOTYPE: {
         RootedObject callee(cx, &fp->callee());
         JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL;
         JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
         if (!obj)
             return js_InternalThrow(f);
-        fp->thisValue() = ObjectValue(*obj);
+        fp->formalArgs()[-1].setObject(*obj);
 
         if (Probes::callTrackingActive(cx))
             Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script());
 
         if (script->debugMode) {
             JSTrapStatus status = js::ScriptDebugPrologue(f.cx, f.fp());
             switch (status) {
               case JSTRAP_CONTINUE:
@@ -880,67 +897,52 @@ js_InternalInterpret(void *returnData, v
               default:
                 JS_NOT_REACHED("bad ScriptDebugPrologue status");
             }
         }
 
         break;
       }
 
-      /*
-       * Each of these cases indicates a point of progress through
-       * generatePrologue. Execute the rest of the prologue here.
-       */
       case REJOIN_CHECK_ARGUMENTS:
+        /*
+         * Do all the work needed in arity check JIT prologues after the
+         * arguments check occurs (FixupArity has been called if needed, but
+         * the stack check and late prologue have not been performed.
+         */
         if (!CheckStackQuota(f))
             return js_InternalThrow(f);
-        fp->initVarsToUndefined();
+
+        SetValueRangeToUndefined(fp->slots(), script->nfixed);
+
+        if (!fp->functionPrologue(cx))
+            return js_InternalThrow(f);
+        /* FALLTHROUGH */
+
+      case REJOIN_FUNCTION_PROLOGUE:
         fp->scopeChain();
-        if (!fp->prologue(cx, types::UseNewTypeAtEntry(cx, fp)))
+
+        /* Construct the 'this' object for the frame if necessary. */
+        if (!ScriptPrologueOrGeneratorResume(cx, fp, types::UseNewTypeAtEntry(cx, fp)))
             return js_InternalThrow(f);
 
         /*
-         * We would normally call ScriptDebugPrologue here. But in debug mode,
-         * we only use JITted functions' invokeEntry entry point, whereas
-         * CheckArgumentTypes (REJOIN_CHECK_ARGUMENTS) is only reachable via
-         * the other entry points.
+         * Having called ScriptPrologueOrGeneratorResume, we would normally call
+         * ScriptDebugPrologue here. But in debug mode, we only use JITted
+         * functions' invokeEntry entry point, whereas CheckArgumentTypes
+         * (REJOIN_CHECK_ARGUMENTS) and FunctionFramePrologue
+         * (REJOIN_FUNCTION_PROLOGUE) are only reachable via the other entry
+         * points. So we should never need either of these rejoin tails in debug
+         * mode.
          *
          * If we fix bug 699196 ("Debug mode code could use inline caches
-         * now"), then this case will become reachable again.
+         * now"), then these cases will become reachable again.
          */
         JS_ASSERT(!cx->compartment->debugMode());
-        break;
 
-      /* Finish executing the tail of generatePrologue. */
-      case REJOIN_FUNCTION_PROLOGUE:
-        if (fp->isConstructing()) {
-            JS_ASSERT(false);
-            RootedObject callee(cx, &fp->callee());
-            JSObject *obj = js_CreateThisForFunction(cx, callee, types::UseNewTypeAtEntry(cx, fp));
-            if (!obj)
-                return js_InternalThrow(f);
-            fp->functionThis() = ObjectValue(*obj);
-        }
-        /* FALLTHROUGH */
-      case REJOIN_EVAL_PROLOGUE:
-        if (cx->compartment->debugMode()) {
-            Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
-            JSTrapStatus status = ScriptDebugPrologue(cx, fp);
-            switch (status) {
-              case JSTRAP_CONTINUE:
-                break;
-              case JSTRAP_RETURN:
-                return f.cx->jaegerRuntime().forceReturnFromFastCall();
-              case JSTRAP_ERROR:
-              case JSTRAP_THROW:
-                return js_InternalThrow(f);
-              default:
-                JS_NOT_REACHED("bad ScriptDebugPrologue status");
-            }
-        }
         break;
 
       case REJOIN_CALL_PROLOGUE:
       case REJOIN_CALL_PROLOGUE_LOWERED_CALL:
       case REJOIN_CALL_PROLOGUE_LOWERED_APPLY:
         if (returnReg) {
             uint32_t argc = 0;
             if (rejoin == REJOIN_CALL_PROLOGUE)
@@ -1053,17 +1055,17 @@ js_InternalInterpret(void *returnData, v
       }
 
       default:
         JS_NOT_REACHED("Missing rejoin");
     }
 
     if (nextDepth == UINT32_MAX)
         nextDepth = analysis->getCode(f.regs.pc).stackDepth;
-    f.regs.sp = f.regs.spForStackDepth(nextDepth);
+    f.regs.sp = fp->base() + nextDepth;
 
     /*
      * Monitor the result of the previous op when finishing a JOF_TYPESET op.
      * The result may not have been marked if we bailed out while inside a stub
      * for the op.
      */
     if (f.regs.pc == nextpc && (js_CodeSpec[op].format & JOF_TYPESET))
         types::TypeScript::Monitor(cx, script, pc, f.regs.sp[-1]);
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -1051,16 +1051,20 @@ mjit::EnterMethodJIT(JSContext *cx, Stac
     /* The entry frame should have finished. */
     JS_ASSERT(fp == cx->fp());
 
     if (ok) {
         /* The trampoline wrote the return value but did not set the HAS_RVAL flag. */
         fp->markReturnValue();
     }
 
+    /* See comment in mjit::Compiler::emitReturn. */
+    if (fp->isFunctionFrame())
+        fp->updateEpilogueFlags();
+
     return ok ? Jaeger_Returned : Jaeger_Throwing;
 }
 
 static inline JaegerStatus
 CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, bool partial)
 {
     JS_CHECK_RECURSION(cx, return Jaeger_ThrowBeforeEnter);
 
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -104,17 +104,17 @@ struct VMFrame
     static size_t offsetOfRegsPc() {
         return offsetof(VMFrame, regs.pc);
     }
 
     JSContext    *cx;
     Value        *stackLimit;
     StackFrame   *entryfp;
     FrameRegs    *oldregs;
-    FrameRejoinState stubRejoin;  /* How to rejoin if inside a call from an IC stub. */
+    JSRejoinState stubRejoin;  /* How to rejoin if inside a call from an IC stub. */
 
 #if defined(JS_CPU_X86)
     void         *unused0, *unused1;  /* For 16 byte alignment */
 #endif
 
 #if defined(JS_CPU_X86)
     void *savedEBX;
     void *savedEDI;
@@ -289,33 +289,35 @@ enum RejoinState {
      * that they have already been patched and can be ignored.
      */
     REJOIN_NATIVE_PATCHED,
 
     /* Call returns a payload, which should be pushed before starting next bytecode. */
     REJOIN_PUSH_BOOLEAN,
     REJOIN_PUSH_OBJECT,
 
+    /* Call returns an object, which should be assigned to a local per the current bytecode. */
+    REJOIN_DEFLOCALFUN,
+
     /*
      * During the prologue of constructing scripts, after the function's
      * .prototype property has been fetched.
      */
     REJOIN_THIS_PROTOTYPE,
 
     /*
      * Type check on arguments failed during prologue, need stack check and
      * the rest of the JIT prologue before the script can execute.
      */
     REJOIN_CHECK_ARGUMENTS,
 
     /*
-     * The script's jitcode was discarded during one of the following steps of
-     * a frame's prologue.
+     * The script's jitcode was discarded after marking an outer function as
+     * reentrant or due to a GC while creating a call object.
      */
-    REJOIN_EVAL_PROLOGUE,
     REJOIN_FUNCTION_PROLOGUE,
 
     /*
      * State after calling a stub which returns a JIT code pointer for a call
      * or NULL for an already-completed call.
      */
     REJOIN_CALL_PROLOGUE,
     REJOIN_CALL_PROLOGUE_LOWERED_CALL,
@@ -332,24 +334,24 @@ enum RejoinState {
     /*
      * For an opcode fused with IFEQ/IFNE, call returns a boolean indicating
      * the result of the comparison and whether to take or not take the branch.
      */
     REJOIN_BRANCH
 };
 
 /* Get the rejoin state for a StackFrame after returning from a scripted call. */
-static inline FrameRejoinState
+static inline JSRejoinState
 ScriptedRejoin(uint32_t pcOffset)
 {
     return REJOIN_SCRIPTED | (pcOffset << 1);
 }
 
 /* Get the rejoin state for a StackFrame after returning from a stub call. */
-static inline FrameRejoinState
+static inline JSRejoinState
 StubRejoin(RejoinState rejoin)
 {
     return rejoin << 1;
 }
 
 /* Helper to watch for recompilation and frame expansion activity on a compartment. */
 struct RecompilationMonitor
 {
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -1049,17 +1049,17 @@ ic::SplatApplyArgs(VMFrame &f)
         unsigned length = f.regs.fp()->numActualArgs();
         JS_ASSERT(length <= StackSpace::ARGS_LENGTH_MAX);
 
         f.regs.sp--;
         if (!BumpStack(f, length))
             THROWV(false);
 
         /* Steps 7-8. */
-        f.regs.fp()->forEachUnaliasedActual(CopyTo(f.regs.sp));
+        f.regs.fp()->forEachCanonicalActualArg(CopyTo(f.regs.sp));
 
         f.regs.sp += length;
         f.u.call.dynamicArgc = length;
         return true;
     }
 
     /*
      * This stub should mimic the steps taken by js_fun_apply. Step 1 and part
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -289,16 +289,18 @@ class SetPropCompiler : public PICStubCo
                                             Address(pic.objReg, JSObject::offsetOfType()),
                                             ImmPtr(obj->getType(cx)));
             if (!otherGuards.append(typeGuard))
                 return error();
         }
 
         JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
 
+        MaybeJump skipOver;
+
         if (adding) {
             JS_ASSERT(shape->hasSlot());
             pic.shapeRegHasBaseShape = false;
 
             if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, NULL,
                                          pic.objReg, pic.shapeReg)) {
                 return error();
             }
@@ -346,21 +348,39 @@ class SetPropCompiler : public PICStubCo
             //    #         can have the same shape, thus we must not rely on the identity
             // >--+--<      of 'fun' remaining the same. However, since:
             //   |||         1. the shape includes all arguments and locals and their setters
             //    \\     V     and getters, and
             //      \===/    2. arguments and locals have different getters
             //              then we can rely on fun->nargs remaining invariant.
             JSFunction *fun = obj->asCall().getCalleeFunction();
             uint16_t slot = uint16_t(shape->shortid());
-            if (shape->setterOp() == CallObject::setVarOp)
-                slot += fun->nargs;
-            slot += CallObject::RESERVED_SLOTS;
-            Address address = masm.objPropAddress(obj, pic.objReg, slot);
-            masm.storeValue(pic.u.vr, address);
+
+            /* Guard that the call object has a frame. */
+            masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
+            Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
+
+            {
+                Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
+                                           ? StackFrame::offsetOfFormalArg(fun, slot)
+                                           : StackFrame::offsetOfFixed(slot));
+                masm.storeValue(pic.u.vr, addr);
+                skipOver = masm.jump();
+            }
+
+            escapedFrame.linkTo(masm.label(), &masm);
+            {
+                if (shape->setterOp() == CallObject::setVarOp)
+                    slot += fun->nargs;
+
+                slot += CallObject::RESERVED_SLOTS;
+                Address address = masm.objPropAddress(obj, pic.objReg, slot);
+
+                masm.storeValue(pic.u.vr, address);
+            }
 
             pic.shapeRegHasBaseShape = false;
         }
 
         Jump done = masm.jump();
 
         // Common all secondary guards into one big exit.
         MaybeJump slowExit;
@@ -385,16 +405,18 @@ class SetPropCompiler : public PICStubCo
         }
 
         buffer.link(shapeGuard, pic.slowPathStart);
         if (slowExit.isSet())
             buffer.link(slowExit.get(), pic.slowPathStart);
         for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
             buffer.link(*pj, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
+        if (skipOver.isSet())
+            buffer.link(skipOver.get(), pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalize(f);
         JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
                    (void*)&pic,
                    (void*)initialShape,
                    pic.stubsGenerated,
                    cs.executableAddress());
 
         // This function can patch either the inline fast path for a generated
@@ -731,19 +753,16 @@ struct GetPropHelper {
     LookupStatus lookupAndTest() {
         LookupStatus status = lookup();
         if (status != Lookup_Cacheable)
             return status;
         return testForGet();
     }
 };
 
-namespace js {
-namespace mjit {
-
 class GetPropCompiler : public PICStubCompiler
 {
     JSObject    *obj;
     PropertyName *name;
     int         lastStubSecondShapeGuard;
 
   public:
     GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
@@ -1364,19 +1383,16 @@ class GetPropCompiler : public PICStubCo
             !pic.inlinePathPatched) {
             return patchInline(getprop.holder, getprop.shape);
         }
 
         return generateStub(getprop.holder, getprop.shape);
     }
 };
 
-}  // namespace mjit
-}  // namespace js
-
 class ScopeNameCompiler : public PICStubCompiler
 {
   private:
     typedef Vector<Jump, 8> JumpList;
 
     RootedObject scopeChain;
     RootedPropertyName name;
     GetPropHelper<ScopeNameCompiler> getprop;
@@ -1545,19 +1561,19 @@ class ScopeNameCompiler : public PICStub
         if (pic.kind == ic::PICInfo::NAME)
             masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
 
         JS_ASSERT(obj == getprop.holder);
         JS_ASSERT(getprop.holder != &scopeChain->global());
 
         CallObjPropKind kind;
         const Shape *shape = getprop.shape;
-        if (shape->setterOp() == CallObject::setArgOp) {
+        if (shape->getterOp() == CallObject::getArgOp) {
             kind = ARG;
-        } else if (shape->setterOp() == CallObject::setVarOp) {
+        } else if (shape->getterOp() == CallObject::getVarOp) {
             kind = VAR;
         } else {
             return disable("unhandled callobj sprop getter");
         }
 
         LookupStatus status = walkScopeChain(masm, fails);
         if (status != Lookup_Cacheable)
             return status;
@@ -1565,26 +1581,48 @@ class ScopeNameCompiler : public PICStub
         /* If a scope chain walk was required, the final object needs a NULL test. */
         MaybeJump finalNull;
         if (pic.kind == ic::PICInfo::NAME)
             finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
         masm.loadShape(pic.objReg, pic.shapeReg);
         Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
                                          ImmPtr(getprop.holder->lastProperty()));
 
+        /* Get callobj's stack frame. */
+        masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
+
         JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
-        unsigned slot = shape->shortid();
-        if (kind == VAR)
-            slot += fun->nargs;
-        slot += CallObject::RESERVED_SLOTS;
-        Address address = masm.objPropAddress(obj, pic.objReg, slot);
-
-        /* Safe because type is loaded first. */
-        masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
-
+        uint16_t slot = uint16_t(shape->shortid());
+
+        Jump skipOver;
+        Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
+
+        /* Not-escaped case. */
+        {
+            Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
+                                                   : StackFrame::offsetOfFixed(slot));
+            masm.loadPayload(addr, pic.objReg);
+            masm.loadTypeTag(addr, pic.shapeReg);
+            skipOver = masm.jump();
+        }
+
+        escapedFrame.linkTo(masm.label(), &masm);
+
+        {
+            if (kind == VAR)
+                slot += fun->nargs;
+
+            slot += CallObject::RESERVED_SLOTS;
+            Address address = masm.objPropAddress(obj, pic.objReg, slot);
+
+            /* Safe because type is loaded first. */
+            masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
+        }
+
+        skipOver.linkTo(masm.label(), &masm);
         Jump done = masm.jump();
 
         // All failures flow to here, so there is a common point to patch.
         for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
             pj->linkTo(masm.label(), &masm);
         if (finalNull.isSet())
             finalNull.get().linkTo(masm.label(), &masm);
         finalShape.linkTo(masm.label(), &masm);
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -907,17 +907,17 @@ stubs::NewInitObject(VMFrame &f, JSObjec
 
 void JS_FASTCALL
 stubs::InitElem(VMFrame &f, uint32_t last)
 {
     JSContext *cx = f.cx;
     FrameRegs &regs = f.regs;
 
     /* Pop the element's value into rval. */
-    JS_ASSERT(regs.stackDepth() >= 3);
+    JS_ASSERT(regs.sp - f.fp()->base() >= 3);
     const Value &rref = regs.sp[-1];
 
     /* Find the object being initialized at top of stack. */
     const Value &lref = regs.sp[-3];
     JS_ASSERT(lref.isObject());
     JSObject *obj = &lref.toObject();
 
     /* Fetch id now that we have obj. */
@@ -1015,17 +1015,17 @@ stubs::Iter(VMFrame &f, uint32_t flags)
 
 static void
 InitPropOrMethod(VMFrame &f, PropertyName *name, JSOp op)
 {
     JSContext *cx = f.cx;
     FrameRegs &regs = f.regs;
 
     /* Load the property's initial value into rval. */
-    JS_ASSERT(regs.stackDepth() >= 2);
+    JS_ASSERT(regs.sp - f.fp()->base() >= 2);
     Value rval;
     rval = regs.sp[-1];
 
     /* Load the object being initialized into lval/obj. */
     RootedObject obj(cx, &regs.sp[-2].toObject());
     JS_ASSERT(obj->isNative());
 
     /* Get the immediate property name into id. */
@@ -1043,44 +1043,44 @@ void JS_FASTCALL
 stubs::InitProp(VMFrame &f, PropertyName *name)
 {
     InitPropOrMethod(f, name, JSOP_INITPROP);
 }
 
 void JS_FASTCALL
 stubs::IterNext(VMFrame &f, int32_t offset)
 {
-    JS_ASSERT(f.regs.stackDepth() >= unsigned(offset));
+    JS_ASSERT(f.regs.sp - offset >= f.fp()->base());
     JS_ASSERT(f.regs.sp[-offset].isObject());
 
     JSObject *iterobj = &f.regs.sp[-offset].toObject();
     f.regs.sp[0].setNull();
     f.regs.sp++;
     if (!js_IteratorNext(f.cx, iterobj, &f.regs.sp[-1]))
         THROW();
 }
 
 JSBool JS_FASTCALL
 stubs::IterMore(VMFrame &f)
 {
-    JS_ASSERT(f.regs.stackDepth() >= 1);
+    JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
     JS_ASSERT(f.regs.sp[-1].isObject());
 
     Value v;
     JSObject *iterobj = &f.regs.sp[-1].toObject();
     if (!js_IteratorMore(f.cx, RootedObject(f.cx, iterobj), &v))
         THROWV(JS_FALSE);
 
     return v.toBoolean();
 }
 
 void JS_FASTCALL
 stubs::EndIter(VMFrame &f)
 {
-    JS_ASSERT(f.regs.stackDepth() >= 1);
+    JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
     if (!CloseIterator(f.cx, &f.regs.sp[-1].toObject()))
         THROW();
 }
 
 JSString * JS_FASTCALL
 stubs::TypeOf(VMFrame &f)
 {
     const Value &ref = f.regs.sp[-1];
@@ -1120,17 +1120,17 @@ stubs::Throw(VMFrame &f)
     JS_ASSERT(!cx->isExceptionPending());
     cx->setPendingException(f.regs.sp[-1]);
     THROW();
 }
 
 void JS_FASTCALL
 stubs::Arguments(VMFrame &f)
 {
-    ArgumentsObject *obj = ArgumentsObject::createExpected(f.cx, f.fp());
+    ArgumentsObject *obj = ArgumentsObject::create(f.cx, f.fp());
     if (!obj)
         THROW();
     f.regs.sp[0] = ObjectValue(*obj);
 }
 
 JSBool JS_FASTCALL
 stubs::InstanceOf(VMFrame &f)
 {
@@ -1168,31 +1168,37 @@ stubs::FastInstanceOf(VMFrame &f)
 
     f.regs.sp[-3].setBoolean(js_IsDelegate(f.cx, &lref.toObject(), f.regs.sp[-3]));
 }
 
 void JS_FASTCALL
 stubs::EnterBlock(VMFrame &f, JSObject *obj)
 {
     FrameRegs &regs = f.regs;
+    StackFrame *fp = f.fp();
     JS_ASSERT(!f.regs.inlined());
 
     StaticBlockObject &blockObj = obj->asStaticBlock();
+    if (!fp->pushBlock(f.cx, blockObj))
+        THROW();
 
     if (*regs.pc == JSOP_ENTERBLOCK) {
-        JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
-        JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= f.fp()->script()->nslots);
+        JS_ASSERT(fp->base() + blockObj.stackDepth() == regs.sp);
         Value *vp = regs.sp + blockObj.slotCount();
+        JS_ASSERT(regs.sp < vp);
+        JS_ASSERT(vp <= fp->slots() + fp->script()->nslots);
         SetValueRangeToUndefined(regs.sp, vp);
         regs.sp = vp;
+    } else if (*regs.pc == JSOP_ENTERLET0) {
+        JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
+                  == regs.sp);
+    } else if (*regs.pc == JSOP_ENTERLET1) {
+        JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
+                  == regs.sp - 1);
     }
-
-    /* Clone block iff there are any closed-over variables. */
-    if (!regs.fp()->pushBlock(f.cx, blockObj))
-        THROW();
 }
 
 void JS_FASTCALL
 stubs::LeaveBlock(VMFrame &f)
 {
     f.fp()->popBlock(f.cx);
 }
 
@@ -1511,17 +1517,17 @@ stubs::CheckArgumentTypes(VMFrame &f)
 
     {
         /* Postpone recompilations until all args have been updated. */
         types::AutoEnterTypeInference enter(f.cx);
 
         if (!f.fp()->isConstructing())
             TypeScript::SetThis(f.cx, script, fp->thisValue());
         for (unsigned i = 0; i < fun->nargs; i++)
-            TypeScript::SetArgument(f.cx, script, i, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
+            TypeScript::SetArgument(f.cx, script, i, fp->formalArg(i));
     }
 
     if (monitor.recompiled())
         return;
 
 #ifdef JS_MONOIC
     ic::GenerateArgumentCheckStub(f);
 #endif
@@ -1541,17 +1547,17 @@ stubs::AssertArgumentTypes(VMFrame &f)
      */
     if (!fp->isConstructing()) {
         Type type = GetValueType(f.cx, fp->thisValue());
         if (!TypeScript::ThisTypes(script)->hasType(type))
             TypeFailure(f.cx, "Missing type for this: %s", TypeString(type));
     }
 
     for (unsigned i = 0; i < fun->nargs; i++) {
-        Type type = GetValueType(f.cx, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
+        Type type = GetValueType(f.cx, fp->formalArg(i));
         if (!TypeScript::ArgTypes(script, i)->hasType(type))
             TypeFailure(f.cx, "Missing type for arg %d: %s", i, TypeString(type));
     }
 }
 #endif
 
 /*
  * These two are never actually called, they just give us a place to rejoin if
@@ -1598,58 +1604,44 @@ stubs::Exception(VMFrame &f)
     if (f.cx->runtime->interrupt && !js_HandleExecutionInterrupt(f.cx))
         THROW();
 
     f.regs.sp[0] = f.cx->getPendingException();
     f.cx->clearPendingException();
 }
 
 void JS_FASTCALL
-stubs::StrictEvalPrologue(VMFrame &f)
+stubs::FunctionFramePrologue(VMFrame &f)
 {
-    JS_ASSERT(f.fp()->isStrictEvalFrame());
-    CallObject *callobj = CallObject::createForStrictEval(f.cx, f.fp());
-    if (!callobj)
-        THROW();
-    f.fp()->pushOnScopeChain(*callobj);
-}
-
-void JS_FASTCALL
-stubs::HeavyweightFunctionPrologue(VMFrame &f)
-{
-    if (!f.fp()->heavyweightFunctionPrologue(f.cx))
+    if (!f.fp()->functionPrologue(f.cx))
         THROW();
 }
 
 void JS_FASTCALL
-stubs::TypeNestingPrologue(VMFrame &f)
+stubs::FunctionFrameEpilogue(VMFrame &f)
 {
-    types::NestingPrologue(f.cx, f.fp());
-}
-
-void JS_FASTCALL
-stubs::Epilogue(VMFrame &f)
-{
-    f.fp()->epilogue(f.cx);
+    f.fp()->functionEpilogue(f.cx);
 }
 
 void JS_FASTCALL
 stubs::AnyFrameEpilogue(VMFrame &f)
 {
     /*
      * On the normal execution path, emitReturn calls ScriptDebugEpilogue
-     * and inlines epilogue. This function implements forced early
+     * and inlines ScriptEpilogue. This function implements forced early
      * returns, so it must have the same effect.
      */
     bool ok = true;
     if (f.cx->compartment->debugMode())
         ok = js::ScriptDebugEpilogue(f.cx, f.fp(), ok);
-    f.fp()->epilogue(f.cx);
+    ok = ScriptEpilogue(f.cx, f.fp(), ok);
     if (!ok)
         THROW();
+    if (f.fp()->isNonEvalFunctionFrame())
+        f.fp()->functionEpilogue(f.cx);
 }
 
 template <bool Clamped>
 int32_t JS_FASTCALL
 stubs::ConvertToTypedInt(JSContext *cx, Value *vp)
 {
     JS_ASSERT(!vp->isInt32());
 
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -163,22 +163,20 @@ void JS_FASTCALL MissedBoundsCheckEntry(
 void JS_FASTCALL MissedBoundsCheckHead(VMFrame &f);
 void * JS_FASTCALL InvariantFailure(VMFrame &f, void *repatchCode);
 
 template <bool strict> int32_t JS_FASTCALL ConvertToTypedInt(JSContext *cx, Value *vp);
 void JS_FASTCALL ConvertToTypedFloat(JSContext *cx, Value *vp);
 
 void JS_FASTCALL Exception(VMFrame &f);
 
-void JS_FASTCALL StrictEvalPrologue(VMFrame &f);
-void JS_FASTCALL HeavyweightFunctionPrologue(VMFrame &f);
-void JS_FASTCALL TypeNestingPrologue(VMFrame &f);
+void JS_FASTCALL FunctionFramePrologue(VMFrame &f);
+void JS_FASTCALL FunctionFrameEpilogue(VMFrame &f);
 
 void JS_FASTCALL AnyFrameEpilogue(VMFrame &f);
-void JS_FASTCALL Epilogue(VMFrame &f);
 
 JSObject * JS_FASTCALL
 NewDenseUnallocatedArray(VMFrame &f, uint32_t length);
 
 void JS_FASTCALL ArrayConcatTwoArrays(VMFrame &f);
 void JS_FASTCALL ArrayShift(VMFrame &f);
 
 void JS_FASTCALL WriteBarrier(VMFrame &f, Value *addr);
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -1276,24 +1276,41 @@ AssertJit(JSContext *cx, unsigned argc, 
 
     JS_SET_RVAL(cx, vp, JSVAL_VOID);
     return true;
 }
 
 static JSScript *
 ValueToScript(JSContext *cx, jsval v, JSFunction **funp = NULL)
 {
-    JSFunction *fun = JS_ValueToFunction(cx, v);
-    if (!fun)
-        return NULL;
-
-    JSScript *script = fun->maybeScript();
-    if (!script)
-        JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_SCRIPTS_ONLY);
-
+    JSScript *script = NULL;
+    JSFunction *fun = NULL;
+
+    if (!JSVAL_IS_PRIMITIVE(v)) {
+        JSObject *obj = JSVAL_TO_OBJECT(v);
+        JSClass *clasp = JS_GetClass(obj);
+
+        if (clasp == Jsvalify(&GeneratorClass)) {
+            if (JSGenerator *gen = (JSGenerator *) JS_GetPrivate(obj)) {
+                fun = gen->floatingFrame()->fun();
+                script = fun->script();
+            }
+        }
+    }
+
+    if (!script) {
+        fun = JS_ValueToFunction(cx, v);
+        if (!fun)
+            return NULL;
+        script = fun->maybeScript();
+        if (!script) {
+            JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
+                                 JSSMSG_SCRIPTS_ONLY);
+        }
+    }
     if (fun && funp)
         *funp = fun;
 
     return script;
 }
 
 static JSBool
 SetDebug(JSContext *cx, unsigned argc, jsval *vp)
--- a/js/src/vm/ArgumentsObject-inl.h
+++ b/js/src/vm/ArgumentsObject-inl.h
@@ -5,19 +5,26 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef ArgumentsObject_inl_h___
 #define ArgumentsObject_inl_h___
 
 #include "ArgumentsObject.h"
 
-#include "ScopeObject-inl.h"
+namespace js {
 
-namespace js {
+inline void
+ArgumentsObject::initInitialLength(uint32_t length)
+{
+    JS_ASSERT(getFixedSlot(INITIAL_LENGTH_SLOT).isUndefined());
+    initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
+    JS_ASSERT((getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32_t(length));
+    JS_ASSERT(!hasOverriddenLength());
+}
 
 inline uint32_t
 ArgumentsObject::initialLength() const
 {
     uint32_t argc = uint32_t(getFixedSlot(INITIAL_LENGTH_SLOT).toInt32()) >> PACKED_BITS_COUNT;
     JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
     return argc;
 }
@@ -27,128 +34,151 @@ ArgumentsObject::markLengthOverridden()
 {
     uint32_t v = getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() | LENGTH_OVERRIDDEN_BIT;
     setFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(v));
 }
 
 inline bool
 ArgumentsObject::hasOverriddenLength() const
 {
-    const Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
+    const js::Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
     return v.toInt32() & LENGTH_OVERRIDDEN_BIT;
 }
 
+inline void
+ArgumentsObject::initData(ArgumentsData *data)
+{
+    JS_ASSERT(getFixedSlot(DATA_SLOT).isUndefined());
+    initFixedSlot(DATA_SLOT, PrivateValue(data));
+}
+
 inline ArgumentsData *
 ArgumentsObject::data() const
 {
-    return reinterpret_cast<ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
-}
-
-inline JSScript *
-ArgumentsObject::containingScript() const
-{
-    return data()->script;
-}
-
-inline const Value &
-ArgumentsObject::arg(unsigned i) const
-{
-    JS_ASSERT(i < data()->numArgs);
-    const Value &v = data()->args[i];
-    JS_ASSERT(!v.isMagic(JS_FORWARD_TO_CALL_OBJECT));
-    return v;
-}
-
-inline void
-ArgumentsObject::setArg(unsigned i, const Value &v)
-{
-    JS_ASSERT(i < data()->numArgs);
-    HeapValue &lhs = data()->args[i];
-    JS_ASSERT(!lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT));
-    lhs = v;
-}
-
-inline const Value &
-ArgumentsObject::element(uint32_t i) const
-{
-    JS_ASSERT(!isElementDeleted(i));
-    const Value &v = data()->args[i];
-    if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT))
-        return getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().arg(i);
-    return v;
-}
-
-inline void
-ArgumentsObject::setElement(uint32_t i, const Value &v)
-{
-    JS_ASSERT(!isElementDeleted(i));
-    HeapValue &lhs = data()->args[i];
-    if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT))
-        getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().setArg(i, v);
-    else
-        lhs = v;
+    return reinterpret_cast<js::ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
 }
 
 inline bool
 ArgumentsObject::isElementDeleted(uint32_t i) const
 {
-    JS_ASSERT(i < data()->numArgs);
-    if (i >= initialLength())
-        return false;
     return IsBitArrayElementSet(data()->deletedBits, initialLength(), i);
 }
 
 inline bool
 ArgumentsObject::isAnyElementDeleted() const
 {
     return IsAnyBitArrayElementSet(data()->deletedBits, initialLength());
 }
 
 inline void
 ArgumentsObject::markElementDeleted(uint32_t i)
 {
     SetBitArrayElement(data()->deletedBits, initialLength(), i);
 }
 
+inline const js::Value &
+ArgumentsObject::element(uint32_t i) const
+{
+    JS_ASSERT(!isElementDeleted(i));
+    return data()->slots[i];
+}
+
+inline void
+ArgumentsObject::setElement(uint32_t i, const js::Value &v)
+{
+    JS_ASSERT(!isElementDeleted(i));
+    data()->slots[i] = v;
+}
+
 inline bool
-ArgumentsObject::maybeGetElement(uint32_t i, Value *vp)
+ArgumentsObject::getElement(uint32_t i, Value *vp)
 {
     if (i >= initialLength() || isElementDeleted(i))
         return false;
-    *vp = element(i);
+
+    /*
+     * If this arguments object has an associated stack frame, that contains
+     * the canonical argument value.  Note that strict arguments objects do not
+     * alias named arguments and never have a stack frame.
+     */
+    StackFrame *fp = maybeStackFrame();
+    JS_ASSERT_IF(isStrictArguments(), !fp);
+    if (fp)
+        *vp = fp->canonicalActualArg(i);
+    else
+        *vp = element(i);
     return true;
 }
 
+namespace detail {
+
+struct STATIC_SKIP_INFERENCE CopyNonHoleArgsTo
+{
+    CopyNonHoleArgsTo(ArgumentsObject *argsobj, Value *dst) : argsobj(*argsobj), dst(dst) {}
+    ArgumentsObject &argsobj;
+    Value *dst;
+    bool operator()(uint32_t argi, Value *src) {
+        *dst++ = *src;
+        return true;
+    }
+};
+
+} /* namespace detail */
+
 inline bool
-ArgumentsObject::maybeGetElements(uint32_t start, uint32_t count, Value *vp)
+ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp)
 {
     JS_ASSERT(start + count >= start);
 
     uint32_t length = initialLength();
     if (start > length || start + count > length || isAnyElementDeleted())
         return false;
 
-    for (uint32_t i = start, end = start + count; i < end; ++i, ++vp)
-        *vp = element(i);
-    return true;
+    StackFrame *fp = maybeStackFrame();
+
+    /* If there's no stack frame for this, argument values are in elements(). */
+    if (!fp) {
+        const Value *srcbeg = Valueify(data()->slots) + start;
+        const Value *srcend = srcbeg + count;
+        const Value *src = srcbeg;
+        for (Value *dst = vp; src < srcend; ++dst, ++src)
+            *dst = *src;
+        return true;
+    }
+
+    /* Otherwise, element values are on the stack. */
+    JS_ASSERT(fp->numActualArgs() <= StackSpace::ARGS_LENGTH_MAX);
+    return fp->forEachCanonicalActualArg(detail::CopyNonHoleArgsTo(this, vp), start, count);
+}
+
+inline js::StackFrame *
+ArgumentsObject::maybeStackFrame() const
+{
+    return reinterpret_cast<js::StackFrame *>(getFixedSlot(STACK_FRAME_SLOT).toPrivate());
+}
+
+inline void
+ArgumentsObject::setStackFrame(StackFrame *frame)
+{
+    setFixedSlot(STACK_FRAME_SLOT, PrivateValue(frame));
 }
 
 inline size_t
 ArgumentsObject::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
 {
     return mallocSizeOf(data());
 }
 
-inline const Value &
+inline const js::Value &
 NormalArgumentsObject::callee() const
 {
     return data()->callee;
 }
 
 inline void
 NormalArgumentsObject::clearCallee()
 {
     data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
 }
 
-} /* namespace js */
+} // namespace js
 
 #endif /* ArgumentsObject_inl_h___ */
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -17,128 +17,151 @@
 #include "jsobjinlines.h"
 
 #include "gc/Barrier-inl.h"
 #include "vm/ArgumentsObject-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
-ArgumentsObject *
-ArgumentsObject::create(JSContext *cx, StackFrame *fp)
+struct PutArg
+{
+    PutArg(JSCompartment *comp, ArgumentsObject &argsobj)
+      : compartment(comp), argsobj(argsobj), dst(argsobj.data()->slots) {}
+    JSCompartment *compartment;
+    ArgumentsObject &argsobj;
+    HeapValue *dst;
+    bool operator()(unsigned i, Value *src) {
+        JS_ASSERT(dst->isUndefined());
+        if (!argsobj.isElementDeleted(i))
+            dst->set(compartment, *src);
+        ++dst;
+        return true;
+    }
+};
+
+void
+js_PutArgsObject(StackFrame *fp)
 {
-    JSFunction &callee = fp->callee();
-    RootedObject proto(cx, callee.global().getOrCreateObjectPrototype(cx));
+    ArgumentsObject &argsobj = fp->argsObj();
+    if (argsobj.isNormalArguments()) {
+        JS_ASSERT(argsobj.maybeStackFrame() == fp);
+        JSCompartment *comp = fp->compartment();
+        fp->forEachCanonicalActualArg(PutArg(comp, argsobj));
+        argsobj.setStackFrame(NULL);
+    } else {
+        JS_ASSERT(!argsobj.maybeStackFrame());
+    }
+}
+
+ArgumentsObject *
+ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee)
+{
+    JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
+    JS_ASSERT(!callee->toFunction()->hasRest());
+
+    RootedObject proto(cx, callee->global().getOrCreateObjectPrototype(cx));
     if (!proto)
         return NULL;
 
     RootedTypeObject type(cx);
+
     type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
-    bool strict = callee.inStrictMode();
+    bool strict = callee->toFunction()->inStrictMode();
     Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass;
 
     RootedShape emptyArgumentsShape(cx);
     emptyArgumentsShape =
         EmptyShape::getInitialShape(cx, clasp, proto,
                                     proto->getParent(), FINALIZE_KIND,
                                     BaseShape::INDEXED);
     if (!emptyArgumentsShape)
         return NULL;
 
-    unsigned numActuals = fp->numActualArgs();
-    unsigned numFormals = fp->numFormalArgs();
-    unsigned numDeletedWords = NumWordsForBitArrayOfLength(numActuals);
-    unsigned numArgs = Max(numActuals, numFormals);
-    unsigned numBytes = offsetof(ArgumentsData, args) +
+    unsigned numDeletedWords = NumWordsForBitArrayOfLength(argc);
+    unsigned numBytes = offsetof(ArgumentsData, slots) +
                         numDeletedWords * sizeof(size_t) +
-                        numArgs * sizeof(Value);
+                        argc * sizeof(Value);
 
     ArgumentsData *data = (ArgumentsData *)cx->malloc_(numBytes);
     if (!data)
         return NULL;
 
-    data->numArgs = numArgs;
-    data->callee.init(ObjectValue(callee));
-    data->script = fp->script();
-
-    /* Copy [0, numArgs) into data->slots. */
-    HeapValue *dst = data->args, *dstEnd = data->args + numArgs;
-    for (Value *src = fp->formals(), *end = src + numFormals; src != end; ++src, ++dst)
-        dst->init(*src);
-    if (numActuals > numFormals) {
-        for (Value *src = fp->actuals() + numFormals; dst != dstEnd; ++src, ++dst)
-            dst->init(*src);
-    } else if (numActuals < numFormals) {
-        for (; dst != dstEnd; ++dst)
-            dst->init(UndefinedValue());
-    }
-
-    data->deletedBits = reinterpret_cast<size_t *>(dstEnd);
+    data->callee.init(ObjectValue(*callee));
+    for (HeapValue *vp = data->slots; vp != data->slots + argc; vp++)
+        vp->init(UndefinedValue());
+    data->deletedBits = (size_t *)(data->slots + argc);
     ClearAllBitArrayElements(data->deletedBits, numDeletedWords);
 
+    /* We have everything needed to fill in the object, so make the object. */
     JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL);
     if (!obj)
         return NULL;
 
-    obj->initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(numActuals << PACKED_BITS_COUNT));
-    obj->initFixedSlot(DATA_SLOT, PrivateValue(data));
-
-    /*
-     * If it exists and the arguments object aliases formals, the call object
-     * is the canonical location for formals.
-     */
-    JSScript *script = fp->script();
-    if (fp->fun()->isHeavyweight() && script->argsObjAliasesFormals()) {
-        obj->initFixedSlot(MAYBE_CALL_SLOT, ObjectValue(fp->callObj()));
+    ArgumentsObject &argsobj = obj->asArguments();
 
-        /* Flag each slot that canonically lives in the callObj. */
-        if (script->bindingsAccessedDynamically) {
-            for (unsigned i = 0; i < numFormals; ++i)
-                data->args[i] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
-        } else {
-            for (unsigned i = 0; i < script->numClosedArgs(); ++i)
-                data->args[script->getClosedArg(i)] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
-        }
-    }
+    JS_ASSERT(UINT32_MAX > (uint64_t(argc) << PACKED_BITS_COUNT));
+    argsobj.initInitialLength(argc);
+    argsobj.initData(data);
+    argsobj.setStackFrame(NULL);
 
-    ArgumentsObject &argsobj = obj->asArguments();
-    JS_ASSERT(argsobj.initialLength() == numActuals);
-    JS_ASSERT(!argsobj.hasOverriddenLength());
+    JS_ASSERT(argsobj.numFixedSlots() >= NormalArgumentsObject::RESERVED_SLOTS);
+    JS_ASSERT(argsobj.numFixedSlots() >= StrictArgumentsObject::RESERVED_SLOTS);
+
     return &argsobj;
 }
 
 ArgumentsObject *
-ArgumentsObject::createExpected(JSContext *cx, StackFrame *fp)
+ArgumentsObject::create(JSContext *cx, StackFrame *fp)
 {
     JS_ASSERT(fp->script()->needsArgsObj());
-    ArgumentsObject *argsobj = create(cx, fp);
+
+    ArgumentsObject *argsobj = ArgumentsObject::create(cx, fp->numActualArgs(),
+                                                       RootedObject(cx, &fp->callee()));
     if (!argsobj)
         return NULL;
 
+    /*
+     * Strict mode functions have arguments objects that copy the initial
+     * actual parameter values. Non-strict mode arguments use the frame pointer
+     * to retrieve up-to-date parameter values.
+     */
+    if (argsobj->isStrictArguments())
+        fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
+    else
+        argsobj->setStackFrame(fp);
+
     fp->initArgsObj(*argsobj);
     return argsobj;
 }
 
 ArgumentsObject *
 ArgumentsObject::createUnexpected(JSContext *cx, StackFrame *fp)
 {
-    return create(cx, fp);
+    ArgumentsObject *argsobj = create(cx, fp->numActualArgs(), RootedObject(cx, &fp->callee()));
+    if (!argsobj)
+        return NULL;
+
+    fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
+    return argsobj;
 }
 
 static JSBool
 args_delProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
 {
     ArgumentsObject &argsobj = obj->asArguments();
     if (JSID_IS_INT(id)) {
         unsigned arg = unsigned(JSID_TO_INT(id));
-        if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
+        if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
+            argsobj.setElement(arg, UndefinedValue());
             argsobj.markElementDeleted(arg);
+        }
     } else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
         argsobj.markLengthOverridden();
     } else if (JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)) {
         argsobj.asNormalArguments().clearCallee();
     }
     return true;
 }
 
@@ -150,45 +173,57 @@ ArgGetter(JSContext *cx, HandleObject ob
 
     NormalArgumentsObject &argsobj = obj->asNormalArguments();
     if (JSID_IS_INT(id)) {
         /*
          * arg can exceed the number of arguments if a script changed the
          * prototype to point to another Arguments object with a bigger argc.
          */
         unsigned arg = unsigned(JSID_TO_INT(id));
-        if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
-            *vp = argsobj.element(arg);
+        if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
+            if (StackFrame *fp = argsobj.maybeStackFrame()) {
+                JS_ASSERT_IF(arg < fp->numFormalArgs(), fp->script()->formalIsAliased(arg));
+                *vp = fp->canonicalActualArg(arg);
+            } else {
+                *vp = argsobj.element(arg);
+            }
+        }
     } else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
         if (!argsobj.hasOverriddenLength())
-            *vp = Int32Value(argsobj.initialLength());
+            vp->setInt32(argsobj.initialLength());
     } else {
         JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
-        if (!argsobj.callee().isMagic(JS_OVERWRITTEN_CALLEE))
-            *vp = argsobj.callee();
+        const Value &v = argsobj.callee();
+        if (!v.isMagic(JS_OVERWRITTEN_CALLEE))
+            *vp = v;
     }
     return true;
 }
 
 static JSBool
 ArgSetter(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
 {
     if (!obj->isNormalArguments())
         return true;
 
     NormalArgumentsObject &argsobj = obj->asNormalArguments();
-    JSScript *script = argsobj.containingScript();
 
     if (JSID_IS_INT(id)) {
         unsigned arg = unsigned(JSID_TO_INT(id));
-        if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
-            argsobj.setElement(arg, *vp);
-            if (arg < script->function()->nargs)
-                types::TypeScript::SetArgument(cx, script, arg, *vp);
-            return true;
+        if (arg < argsobj.initialLength()) {
+            if (StackFrame *fp = argsobj.maybeStackFrame()) {
+                JSScript *script = fp->functionScript();
+                JS_ASSERT(script->needsArgsObj());
+                if (arg < fp->numFormalArgs()) {
+                    JS_ASSERT(fp->script()->formalIsAliased(arg));
+                    types::TypeScript::SetArgument(cx, script, arg, *vp);
+                }
+                fp->canonicalActualArg(arg) = *vp;
+                return true;
+            }
         }
     } else {
         JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom) ||
                   JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
     }
 
     /*
      * For simplicity we use delete/define to replace the property with one
@@ -235,37 +270,37 @@ args_resolve(JSContext *cx, HandleObject
 
     *objp = argsobj;
     return true;
 }
 
 bool
 NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Value &elem, Value *vp)
 {
-    JS_ASSERT(!fp->script()->needsArgsObj());
+    JS_ASSERT(!fp->hasArgsObj());
 
     /* Fast path: no need to convert to id when elem is already an int in range. */
     if (elem.isInt32()) {
         int32_t i = elem.toInt32();
         if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
-            *vp = fp->unaliasedActual(i);
+            *vp = fp->canonicalActualArg(i);
             return true;
         }
     }
 
     /* Slow path: create and canonicalize an id, then emulate args_resolve. */
 
     jsid id;
     if (!ValueToId(cx, elem, &id))
         return false;
 
     if (JSID_IS_INT(id)) {
         int32_t i = JSID_TO_INT(id);
         if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
-            *vp = fp->unaliasedActual(i);
+            *vp = fp->canonicalActualArg(i);
             return true;
         }
     }
 
     if (id == NameToId(cx->runtime->atomState.lengthAtom)) {
         *vp = Int32Value(fp->numActualArgs());
         return true;
     }
@@ -432,30 +467,44 @@ strictargs_enumerate(JSContext *cx, Hand
         id = INT_TO_JSID(i);
         if (!baseops::LookupProperty(cx, argsobj, id, &pobj, &prop))
             return false;
     }
 
     return true;
 }
 
-void
-ArgumentsObject::finalize(FreeOp *fop, JSObject *obj)
+static void
+args_finalize(FreeOp *fop, JSObject *obj)
 {
     fop->free_(reinterpret_cast<void *>(obj->asArguments().data()));
 }
 
-void
-ArgumentsObject::trace(JSTracer *trc, JSObject *obj)
+static void
+args_trace(JSTracer *trc, JSObject *obj)
 {
     ArgumentsObject &argsobj = obj->asArguments();
     ArgumentsData *data = argsobj.data();
     MarkValue(trc, &data->callee, js_callee_str);
-    MarkValueRange(trc, data->numArgs, data->args, js_arguments_str);
-    MarkScriptUnbarriered(trc, &data->script, "script");
+    MarkValueRange(trc, argsobj.initialLength(), data->slots, js_arguments_str);
+
+    /*
+     * If a generator's arguments or call object escapes, and the generator
+     * frame is not executing, the generator object needs to be marked because
+     * it is not otherwise reachable. An executing generator is rooted by its
+     * invocation.  To distinguish the two cases (which imply different access
+     * paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR
+     * flag, which is only set on the StackFrame kept in the generator object's
+     * JSGenerator.
+     */
+#if JS_HAS_GENERATORS
+    StackFrame *fp = argsobj.maybeStackFrame();
+    if (fp && fp->isFloatingGenerator())
+        MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
+#endif
 }
 
 /*
  * The classes below collaborate to lazily reflect and synchronize actual
  * argument values, argument count, and callee function object stored in a
  * StackFrame with their corresponding property values in the frame's
  * arguments object.
  */
@@ -467,22 +516,22 @@ Class js::NormalArgumentsObjectClass = {
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     args_enumerate,
     reinterpret_cast<JSResolveOp>(args_resolve),
     JS_ConvertStub,
-    ArgumentsObject::finalize,
+    args_finalize,           /* finalize   */
     NULL,                    /* checkAccess */
     NULL,                    /* call        */
     NULL,                    /* construct   */
     NULL,                    /* hasInstance */
-    ArgumentsObject::trace,
+    args_trace,
     {
         NULL,       /* equality    */
         NULL,       /* outerObject */
         NULL,       /* innerObject */
         JS_ElementIteratorStub,
         NULL,       /* unused      */
         false,      /* isWrappedNative */
     }
@@ -501,22 +550,22 @@ Class js::StrictArgumentsObjectClass = {
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     strictargs_enumerate,
     reinterpret_cast<JSResolveOp>(strictargs_resolve),
     JS_ConvertStub,
-    ArgumentsObject::finalize,
+    args_finalize,           /* finalize   */
     NULL,                    /* checkAccess */
     NULL,                    /* call        */
     NULL,                    /* construct   */
     NULL,                    /* hasInstance */
-    ArgumentsObject::trace,
+    args_trace,
     {
         NULL,       /* equality    */
         NULL,       /* outerObject */
         NULL,       /* innerObject */
         JS_ElementIteratorStub,
         NULL,       /* unused      */
         false,      /* isWrappedNative */
     }
--- a/js/src/vm/ArgumentsObject.h
+++ b/js/src/vm/ArgumentsObject.h
@@ -11,61 +11,45 @@
 #include "jsfun.h"
 
 namespace js {
 
 /*
  * ArgumentsData stores the initial indexed arguments provided to the
  * corresponding and that function itself.  It is used to store arguments[i]
  * and arguments.callee -- up until the corresponding property is modified,
- * when the relevant value is flagged to memorialize the modification.
+ * when the relevant value is overwritten with MagicValue(JS_ARGS_HOLE) to
+ * memorialize the modification.
  */
 struct ArgumentsData
 {
     /*
-     * numArgs = Max(numFormalArgs, numActualArgs)
-     * The array 'args' has numArgs elements.
-     */
-    unsigned    numArgs;
-
-    /*
-     * arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
-     * arguments.callee has been modified.
+     * arguments.callee, or MagicValue(JS_ARGS_HOLE) if arguments.callee has
+     * been modified.
      */
     HeapValue   callee;
 
-    /* The script for the function containing this arguments object. */
-    JSScript    *script;
-
     /*
      * Pointer to an array of bits indicating, for every argument in 'slots',
      * whether the element has been deleted. See isElementDeleted comment.
      */
     size_t      *deletedBits;
 
     /*
-     * This array holds either the current argument value or the magic value
-     * JS_FORWARD_TO_CALL_OBJECT. The latter means that the function has both a
-     * CallObject and an ArgumentsObject AND the particular formal variable is
-     * aliased by the CallObject. In such cases, the CallObject holds the
-     * canonical value so any element access to the arguments object should
-     * load the value out of the CallObject (which is pointed to by
-     * MAYBE_CALL_SLOT).
+     * Values of the arguments for this object, or MagicValue(JS_ARGS_HOLE) if
+     * the indexed argument has been modified.
      */
-    HeapValue   args[1];
-
-    /* For jit use: */
-    static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
+    HeapValue   slots[1];
 };
 
 /*
  * ArgumentsObject instances represent |arguments| objects created to store
  * function arguments when a function is called.  It's expensive to create such
- * objects if they're never used, so they're only created when they are
- * potentially used.
+ * objects if they're never used, so they're only created lazily.  (See
+ * js::StackFrame::setArgsObj and friends.)
  *
  * Arguments objects are complicated because, for non-strict mode code, they
  * must alias any named arguments which were provided to the function.  Gnarly
  * example:
  *
  *   function f(a, b, c, d)
  *   {
  *     arguments[0] = "seta";
@@ -86,60 +70,96 @@ struct ArgumentsData
  *
  *   INITIAL_LENGTH_SLOT
  *     Stores the initial value of arguments.length, plus a bit indicating
  *     whether arguments.length has been modified.  Use initialLength() and
  *     hasOverriddenLength() to access these values.  If arguments.length has
  *     been modified, then the current value of arguments.length is stored in
  *     another slot associated with a new property.
  *   DATA_SLOT
- *     Stores an ArgumentsData*, described above.
+ *     Stores an ArgumentsData* storing argument values and the callee, or
+ *     sentinels for any of these if the corresponding property is modified.
+ *     Use callee() to access the callee/sentinel, and use
+ *     element/addressOfElement/setElement to access the values stored in
+ *     the ArgumentsData.  If you're simply looking to get arguments[i],
+ *     however, use getElement or getElements to avoid spreading arguments
+ *     object implementation details around too much.
+ *   STACK_FRAME_SLOT
+ *     Stores the function's stack frame for non-strict arguments objects until
+ *     the function returns, when it is replaced with null.  When an arguments
+ *     object is created on-trace its private is JS_ARGUMENTS_OBJECT_ON_TRACE,
+ *     and when the trace exits its private is replaced with the stack frame or
+ *     null, as appropriate. This slot is used by strict arguments objects as
+ *     well, but the slot is always null. Conceptually it would be better to
+ *     remove this oddity, but preserving it allows us to work with arguments
+ *     objects of either kind more abstractly, so we keep it for now.
  */
 class ArgumentsObject : public JSObject
 {
-  protected:
     static const uint32_t INITIAL_LENGTH_SLOT = 0;
     static const uint32_t DATA_SLOT = 1;
-    static const uint32_t MAYBE_CALL_SLOT = 2;
+    static const uint32_t STACK_FRAME_SLOT = 2;
 
+    /* Lower-order bit stolen from the length slot. */
     static const uint32_t LENGTH_OVERRIDDEN_BIT = 0x1;
     static const uint32_t PACKED_BITS_COUNT = 1;
 
-    static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
-    inline ArgumentsData *data() const;
+    void initInitialLength(uint32_t length);
+    void initData(ArgumentsData *data);
+    static ArgumentsObject *create(JSContext *cx, uint32_t argc, HandleObject callee);
 
   public:
     static const uint32_t RESERVED_SLOTS = 3;
     static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
 
     /* Create an arguments object for a frame that is expecting them. */
-    static ArgumentsObject *createExpected(JSContext *cx, StackFrame *fp);
+    static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
 
     /*
      * Purposefully disconnect the returned arguments object from the frame
      * by always creating a new copy that does not alias formal parameters.
      * This allows function-local analysis to determine that formals are
      * not aliased and generally simplifies arguments objects.
      */
     static ArgumentsObject *createUnexpected(JSContext *cx, StackFrame *fp);
 
     /*
      * Return the initial length of the arguments.  This may differ from the
      * current value of arguments.length!
      */
     inline uint32_t initialLength() const;
 
-    /* The script for the function containing this arguments object. */
-    JSScript *containingScript() const;
-
     /* True iff arguments.length has been assigned or its attributes changed. */
     inline bool hasOverriddenLength() const;
     inline void markLengthOverridden();
 
     /*
+     * Attempt to speedily and efficiently access the i-th element of this
+     * arguments object.  Return true if the element was speedily returned.
+     * Return false if the element must be looked up more slowly using
+     * getProperty or some similar method.
+     *
+     * NB: Returning false does not indicate error!
+     */
+    inline bool getElement(uint32_t i, js::Value *vp);
+
+    /*
+     * Attempt to speedily and efficiently get elements [start, start + count)
+     * of this arguments object into the locations starting at |vp|.  Return
+     * true if all elements were copied.  Return false if the elements must be
+     * gotten more slowly, perhaps using a getProperty or some similar method
+     * in a loop.
+     *
+     * NB: Returning false does not indicate error!
+     */
+    inline bool getElements(uint32_t start, uint32_t count, js::Value *vp);
+
+    inline js::ArgumentsData *data() const;
+
+    /*
      * Because the arguments object is a real object, its elements may be
      * deleted. This is implemented by setting a 'deleted' flag for the arg
      * which is read by argument object resolve and getter/setter hooks.
      *
      * NB: an element, once deleted, stays deleted. Thus:
      *
      *   function f(x) { delete arguments[0]; arguments[0] = 42; return x }
      *   assertEq(f(1), 1);
@@ -147,61 +167,28 @@ class ArgumentsObject : public JSObject
      * This works because, once a property is deleted from an arguments object,
      * it gets regular properties with regular getters/setters that don't alias
      * ArgumentsData::slots.
      */
     inline bool isElementDeleted(uint32_t i) const;
     inline bool isAnyElementDeleted() const;
     inline void markElementDeleted(uint32_t i);
 
-    /*
-     * An ArgumentsObject serves two roles:
-     *  - a real object, accessed through regular object operations, e.g..,
-     *    JSObject::getElement corresponding to 'arguments[i]';
-     *  - a VM-internal data structure, storing the value of arguments (formal
-     *    and actual) that are accessed directly by the VM when a reading the
-     *    value of a formal parameter.
-     * There are two ways to access the ArgumentsData::args corresponding to
-     * these two use cases:
-     *  - object access should use elements(i) which will take care of
-     *    forwarding when the value is JS_FORWARD_TO_CALL_OBJECT;
-     *  - VM argument access should use arg(i) which will assert that the
-     *    value is not JS_FORWARD_TO_CALL_OBJECT (since, if such forwarding was
-     *    needed, the frontend should have emitted JSOP_GETALIASEDVAR.
-     */
-    inline const Value &element(uint32_t i) const;
-    inline void setElement(uint32_t i, const Value &v);
-    inline const Value &arg(unsigned i) const;
-    inline void setArg(unsigned i, const Value &v);
+    inline const js::Value &element(uint32_t i) const;
+    inline void setElement(uint32_t i, const js::Value &v);
 
-    /*
-     * Attempt to speedily and efficiently access the i-th element of this
-     * arguments object.  Return true if the element was speedily returned.
-     * Return false if the element must be looked up more slowly using
-     * getProperty or some similar method. The second overload copies the
-     * elements [start, start + count) into the locations starting at 'vp'.
-     *
-     * NB: Returning false does not indicate error!
-     */
-    inline bool maybeGetElement(uint32_t i, Value *vp);
-    inline bool maybeGetElements(uint32_t start, uint32_t count, js::Value *vp);
+    /* The stack frame for this ArgumentsObject, if the frame is still active. */
+    inline js::StackFrame *maybeStackFrame() const;
+    inline void setStackFrame(js::StackFrame *frame);
 
     /*
      * Measures things hanging off this ArgumentsObject that are counted by the
      * |miscSize| argument in JSObject::sizeOfExcludingThis().
      */
     inline size_t sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const;
-
-    static void finalize(FreeOp *fop, JSObject *obj);
-    static void trace(JSTracer *trc, JSObject *obj);
-
-    /* For jit use: */
-    static size_t getDataSlotOffset() {
-        return getFixedSlotOffset(DATA_SLOT);
-    }
 };
 
 class NormalArgumentsObject : public ArgumentsObject
 {
   public:
     /*
      * Stores arguments.callee, or MagicValue(JS_ARGS_HOLE) if the callee has
      * been cleared.
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -3132,26 +3132,20 @@ DebuggerArguments_getArg(JSContext *cx, 
     THIS_FRAME(cx, argc, vp, "get argument", ca2, thisobj, fp);
 
     /*
      * Since getters can be extracted and applied to other objects,
      * there is no guarantee this object has an ith argument.
      */
     JS_ASSERT(i >= 0);
     Value arg;
-    if (unsigned(i) < fp->numActualArgs()) {
-        if (unsigned(i) < fp->numFormalArgs() && fp->script()->formalLivesInCallObject(i))
-            arg = fp->callObj().arg(i);
-        else if (fp->script()->argsObjAliasesFormals())
-            arg = fp->argsObj().arg(i);
-        else
-            arg = fp->unaliasedActual(i);
-    } else {
+    if (unsigned(i) < fp->numActualArgs())
+        arg = fp->canonicalActualArg(i);
+    else
         arg.setUndefined();
-    }
 
     if (!Debugger::fromChildJSObject(thisobj)->wrapDebuggeeValue(cx, &arg))
         return false;
     args.rval() = arg;
     return true;
 }
 
 static JSBool
@@ -3371,17 +3365,16 @@ js::EvaluateInEnv(JSContext *cx, Handle<
     JSScript *script = frontend::CompileScript(cx, env, fp, prin, prin,
                                                compileAndGo, noScriptRval, needScriptGlobal,
                                                chars, length, filename, lineno,
                                                cx->findVersion(), NULL,
                                                UpvarCookie::UPVAR_LEVEL_LIMIT);
     if (!script)
         return false;
 
-    script->isActiveEval = true;
     return ExecuteKernel(cx, script, *env, fp->thisValue(), EXECUTE_DEBUG, fp, rval);
 }
 
 enum EvalBindingsMode { WithoutBindings, WithBindings };
 
 static JSBool
 DebuggerFrameEval(JSContext *cx, unsigned argc, Value *vp, EvalBindingsMode mode)
 {
--- a/js/src/vm/ScopeObject-inl.h
+++ b/js/src/vm/ScopeObject-inl.h
@@ -9,53 +9,55 @@
 #define ScopeObject_inl_h___
 
 #include "ScopeObject.h"
 
 namespace js {
 
 inline
 ScopeCoordinate::ScopeCoordinate(jsbytecode *pc)
-  : hops(GET_UINT16(pc)), slot(GET_UINT16(pc + 2))
+  : hops(GET_UINT16(pc)), binding(GET_UINT16(pc + 2))
 {
     JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
 }
 
+inline JSAtom *
+ScopeCoordinateAtom(JSScript *script, jsbytecode *pc)
+{
+    JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
+    return script->getAtom(GET_UINT32_INDEX(pc + 2 * sizeof(uint16_t)));
+}
+
 inline JSObject &
 ScopeObject::enclosingScope() const
 {
     return getReservedSlot(SCOPE_CHAIN_SLOT).toObject();
 }
 
 inline bool
 ScopeObject::setEnclosingScope(JSContext *cx, HandleObject obj)
 {
     RootedObject self(cx, this);
     if (!obj->setDelegate(cx))
         return false;
     self->setFixedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*obj));
     return true;
 }
 
-inline const Value &
-ScopeObject::aliasedVar(ScopeCoordinate sc)
+inline StackFrame *
+ScopeObject::maybeStackFrame() const
 {
-    JS_ASSERT(isCall() || isClonedBlock());
-    JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
-    JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
-    return getSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot);
+    JS_ASSERT(!isStaticBlock() && !isWith());
+    return reinterpret_cast<StackFrame *>(JSObject::getPrivate());
 }
 
 inline void
-ScopeObject::setAliasedVar(ScopeCoordinate sc, const Value &v)
+ScopeObject::setStackFrame(StackFrame *frame)
 {
-    JS_ASSERT(isCall() || isClonedBlock());
-    JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
-    JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
-    setSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot, v);
+    return setPrivate(frame);
 }
 
 /*static*/ inline size_t
 ScopeObject::offsetOfEnclosingScope()
 {
     return getFixedSlotOffset(SCOPE_CHAIN_SLOT);
 }
 
@@ -83,43 +85,69 @@ CallObject::getCallee() const
 
 inline JSFunction *
 CallObject::getCalleeFunction() const
 {
     return getReservedSlot(CALLEE_SLOT).toObject().toFunction();
 }
 
 inline const Value &
-CallObject::arg(unsigned i, MaybeCheckAliasing checkAliasing) const
+CallObject::arg(unsigned i) const
 {
-    JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
+    JS_ASSERT(i < getCalleeFunction()->nargs);
     return getSlot(RESERVED_SLOTS + i);
 }
 
 inline void
-CallObject::setArg(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
+CallObject::setArg(unsigned i, const Value &v)
 {
-    JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
+    JS_ASSERT(i < getCalleeFunction()->nargs);
     setSlot(RESERVED_SLOTS + i, v);
 }
 
+inline void
+CallObject::initArgUnchecked(unsigned i, const Value &v)
+{
+    JS_ASSERT(i < getCalleeFunction()->nargs);
+    initSlotUnchecked(RESERVED_SLOTS + i, v);
+}
+
 inline const Value &
-CallObject::var(unsigned i, MaybeCheckAliasing checkAliasing) const
+CallObject::var(unsigned i) const
 {
     JSFunction *fun = getCalleeFunction();
-    JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
+    JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
+    JS_ASSERT(i < fun->script()->bindings.numVars());
     return getSlot(RESERVED_SLOTS + fun->nargs + i);
 }
 
 inline void
-CallObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
+CallObject::setVar(unsigned i, const Value &v)
+{
+    JSFunction *fun = getCalleeFunction();
+    JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
+    JS_ASSERT(i < fun->script()->bindings.numVars());
+    setSlot(RESERVED_SLOTS + fun->nargs + i, v);
+}
+
+inline void
+CallObject::initVarUnchecked(unsigned i, const Value &v)
 {
     JSFunction *fun = getCalleeFunction();
-    JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
-    setSlot(RESERVED_SLOTS + fun->nargs + i, v);
+    JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
+    JS_ASSERT(i < fun->script()->bindings.numVars());
+    initSlotUnchecked(RESERVED_SLOTS + fun->nargs + i, v);
+}
+
+inline void
+CallObject::copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots)
+{
+    JS_ASSERT(slotInRange(RESERVED_SLOTS + nargs + nvars, SENTINEL_ALLOWED));
+    copySlotRange(RESERVED_SLOTS, argv, nargs);
+    copySlotRange(RESERVED_SLOTS + nargs, slots, nvars);
 }
 
 inline HeapSlotArray
 CallObject::argArray()
 {
     DebugOnly<JSFunction*> fun = getCalleeFunction();
     JS_ASSERT(hasContiguousSlots(RESERVED_SLOTS, fun->nargs));
     return HeapSlotArray(getSlotAddress(RESERVED_SLOTS));
@@ -153,37 +181,23 @@ WithObject::object() const
 }
 
 inline uint32_t
 BlockObject::slotCount() const
 {
     return propertyCount();
 }
 
-inline unsigned
-BlockObject::slotToFrameLocal(JSScript *script, unsigned i)
-{
-    JS_ASSERT(i < slotCount());
-    return script->nfixed + stackDepth() + i;
-}
-
-inline const Value &
+inline HeapSlot &
 BlockObject::slotValue(unsigned i)
 {
     JS_ASSERT(i < slotCount());
     return getSlotRef(RESERVED_SLOTS + i);
 }
 
-inline void
-BlockObject::setSlotValue(unsigned i, const Value &v)
-{
-    JS_ASSERT(i < slotCount());
-    setSlot(RESERVED_SLOTS + i, v);
-}
-
 inline StaticBlockObject *
 StaticBlockObject::enclosingBlock() const
 {
     JSObject *obj = getReservedSlot(SCOPE_CHAIN_SLOT).toObjectOrNull();
     return obj ? &obj->asStaticBlock() : NULL;
 }
 
 inline void
@@ -198,75 +212,65 @@ StaticBlockObject::setStackDepth(uint32_
     JS_ASSERT(getReservedSlot(DEPTH_SLOT).isUndefined());
     initReservedSlot(DEPTH_SLOT, PrivateUint32Value(depth));
 }
 
 inline void
 StaticBlockObject::setDefinitionParseNode(unsigned i, Definition *def)
 {
     JS_ASSERT(slotValue(i).isUndefined());
-    setSlotValue(i, PrivateValue(def));
+    slotValue(i).init(this, i, PrivateValue(def));
 }
 
 inline Definition *
 StaticBlockObject::maybeDefinitionParseNode(unsigned i)
 {
     Value v = slotValue(i);
     return v.isUndefined() ? NULL : reinterpret_cast<Definition *>(v.toPrivate());
 }
 
 inline void
 StaticBlockObject::setAliased(unsigned i, bool aliased)
 {
-    JS_ASSERT_IF(i > 0, slotValue(i-1).isBoolean());
-    setSlotValue(i, BooleanValue(aliased));
-    if (aliased && !needsClone()) {
-        setSlotValue(0, MagicValue(JS_BLOCK_NEEDS_CLONE));
-        JS_ASSERT(needsClone());
-    }
+    slotValue(i).init(this, i, BooleanValue(aliased));
+    if (aliased)
+        JSObject::setPrivate(reinterpret_cast<void *>(1));
 }
 
 inline bool
 StaticBlockObject::isAliased(unsigned i)
 {
     return slotValue(i).isTrue();
 }
 
 inline bool
-StaticBlockObject::needsClone()
+StaticBlockObject::needsClone() const
 {
-    return !slotValue(0).isFalse();
+    return JSObject::getPrivate() != NULL;
 }
 
 inline bool
 StaticBlockObject::containsVarAtDepth(uint32_t depth)
 {
     return depth >= stackDepth() && depth < stackDepth() + slotCount();
 }
 
 inline StaticBlockObject &
 ClonedBlockObject::staticBlock() const
 {
     return getProto()->asStaticBlock();
 }
 
 inline const Value &
-ClonedBlockObject::var(unsigned i, MaybeCheckAliasing checkAliasing)
+ClonedBlockObject::closedSlot(unsigned i)
 {
-    JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
+    JS_ASSERT(!maybeStackFrame());
     return slotValue(i);
 }
 
-inline void
-ClonedBlockObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
-{
-    JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
-    setSlotValue(i, v);
-}
-
 }  /* namespace js */
 
 inline js::ScopeObject &
 JSObject::asScope()
 {
     JS_ASSERT(isScope());
     return *static_cast<js::ScopeObject *>(this);
 }
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -18,71 +18,117 @@
 
 #include "ScopeObject-inl.h"
 
 using namespace js;
 using namespace js::types;
 
 /*****************************************************************************/
 
-StaticBlockObject *
-js::ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc)
+void
+js_PutCallObject(StackFrame *fp, CallObject &callobj)
 {
-    ScopeCoordinate sc(pc);
+    JS_ASSERT(callobj.maybeStackFrame() == fp);
+    JS_ASSERT_IF(fp->isEvalFrame(), fp->isStrictEvalFrame());
+    JS_ASSERT(fp->isEvalFrame() == callobj.isForEval());
+
+    JSScript *script = fp->script();
+    Bindings &bindings = script->bindings;
+
+    if (callobj.isForEval()) {
+        JS_ASSERT(script->strictModeCode);
+        JS_ASSERT(bindings.numArgs() == 0);
 
-    uint32_t blockIndex = GET_UINT32_INDEX(pc + 2 * sizeof(uint16_t));
-    if (blockIndex == UINT32_MAX)
-        return NULL;
+        /* This could be optimized as below, but keep it simple for now. */
+        callobj.copyValues(0, NULL, bindings.numVars(), fp->slots());
+    } else {
+        JSFunction *fun = fp->fun();
+        JS_ASSERT(script == callobj.getCalleeFunction()->script());
+        JS_ASSERT(script == fun->script());
+
+        unsigned n = bindings.count();
+        if (n > 0) {
+            uint32_t nvars = bindings.numVars();
+            uint32_t nargs = bindings.numArgs();
+            JS_ASSERT(fun->nargs == nargs);
+            JS_ASSERT(nvars + nargs == n);
 
-    StaticBlockObject *block = &script->getObject(blockIndex)->asStaticBlock();
-    unsigned i = 0;
-    while (true) {
-        while (block && !block->needsClone())
-            block = block->enclosingBlock();
-        if (i++ == sc.hops)
-            break;
-        block = block->enclosingBlock();
+            JSScript *script = fun->script();
+            if (script->bindingsAccessedDynamically
+#ifdef JS_METHODJIT
+                || script->debugMode
+#endif
+                ) {
+                callobj.copyValues(nargs, fp->formalArgs(), nvars, fp->slots());
+            } else {
+                /*
+                 * For each arg & var that is closed over, copy it from the stack
+                 * into the call object. We use initArg/VarUnchecked because,
+                 * when you call a getter on a call object, js_NativeGetInline
+                 * caches the return value in the slot, so we can't assert that
+                 * it's undefined.
+                 */
+                uint32_t nclosed = script->numClosedArgs();
+                for (uint32_t i = 0; i < nclosed; i++) {
+                    uint32_t e = script->getClosedArg(i);
+#ifdef JS_GC_ZEAL
+                    callobj.setArg(e, fp->formalArg(e));
+#else
+                    callobj.initArgUnchecked(e, fp->formalArg(e));
+#endif
+                }
+
+                nclosed = script->numClosedVars();
+                for (uint32_t i = 0; i < nclosed; i++) {
+                    uint32_t e = script->getClosedVar(i);
+#ifdef JS_GC_ZEAL
+                    callobj.setVar(e, fp->slots()[e]);
+#else
+                    callobj.initVarUnchecked(e, fp->slots()[e]);
+#endif
+                }
+            }
+
+            /*
+             * Update the args and vars for the active call if this is an outer
+             * function in a script nesting.
+             */
+            types::TypeScriptNesting *nesting = script->nesting();
+            if (nesting && script->isOuterFunction) {
+                nesting->argArray = callobj.argArray();
+                nesting->varArray = callobj.varArray();
+            }
+        }
+
+        /* Clear private pointers to fp, which is about to go away. */
+        if (js_IsNamedLambda(fun)) {
+            JSObject &env = callobj.enclosingScope();
+            JS_ASSERT(env.asDeclEnv().maybeStackFrame() == fp);
+            env.setPrivate(NULL);
+        }
     }
-    return block;
+
+    callobj.setStackFrame(NULL);
 }
 
-PropertyName *
-js::ScopeCoordinateName(JSScript *script, jsbytecode *pc)
-{
-    StaticBlockObject *maybeBlock = ScopeCoordinateBlockChain(script, pc);
-    ScopeCoordinate sc(pc);
-    uint32_t targetSlot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
-    Shape *shape = maybeBlock ? maybeBlock->lastProperty() : script->bindings.lastShape();
-    Shape::Range r = shape->all();
-    while (r.front().slot() != targetSlot)
-        r.popFront();
-    return JSID_TO_ATOM(r.front().propid())->asPropertyName();
-}
-
-/*****************************************************************************/
-
 /*
  * Construct a call object for the given bindings.  If this is a call object
  * for a function invocation, callee should be the function being called.
  * Otherwise it must be a call object for eval of strict mode code, and callee
  * must be null.
  */
 CallObject *
-CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee)
+CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee)
 {
     RootedShape shape(cx);
     shape = script->bindings.callObjectShape(cx);
     if (shape == NULL)
         return NULL;
 
-    gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
-#ifdef JS_THREADSAFE
-    JS_ASSERT(CanBeFinalizedInBackground(kind, &CallClass));
-    kind = gc::GetBackgroundAllocKind(kind);
-#endif
+    gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots() + 1);
 
     RootedTypeObject type(cx);
     type = cx->compartment->getEmptyType(cx);
     if (!type)
         return NULL;
 
     HeapSlot *slots;
     if (!PreallocateObjectDynamicSlots(cx, shape, &slots))
@@ -98,19 +144,31 @@ CallObject::create(JSContext *cx, JSScri
      * to be updated dynamically.
      */
     if (&enclosing->global() != obj->getParent()) {
         JS_ASSERT(obj->getParent() == NULL);
         if (!JSObject::setParent(cx, obj, RootedObject(cx, &enclosing->global())))
             return NULL;
     }
 
+#ifdef DEBUG
+    JS_ASSERT(!obj->inDictionaryMode());
+    for (Shape::Range r = obj->lastProperty(); !r.empty(); r.popFront()) {
+        const Shape &s = r.front();
+        if (s.hasSlot()) {
+            JS_ASSERT(s.slot() + 1 == obj->slotSpan());
+            break;
+        }
+    }
+#endif
+
     if (!obj->asScope().setEnclosingScope(cx, enclosing))
         return NULL;
 
+    JS_ASSERT_IF(callee, callee->isFunction());
     obj->initFixedSlot(CALLEE_SLOT, ObjectOrNullValue(callee));
 
     /*
      * If |bindings| is for a function that has extensible parents, that means
      * its Call should have its own shape; see BaseShape::extensibleParents.
      */
     if (obj->lastProperty()->extensibleParents()) {
         if (!obj->generateOwnShape(cx))
@@ -119,137 +177,181 @@ CallObject::create(JSContext *cx, JSScri
 
     return &obj->asCall();
 }
 
 CallObject *
 CallObject::createForFunction(JSContext *cx, StackFrame *fp)
 {
     JS_ASSERT(fp->isNonEvalFunctionFrame());
+    JS_ASSERT(!fp->hasCallObj());
 
     RootedObject scopeChain(cx, fp->scopeChain());
 
     /*
      * For a named function expression Call's parent points to an environment
      * object holding function's name.
      */
     if (js_IsNamedLambda(fp->fun())) {
         scopeChain = DeclEnvObject::create(cx, fp);
         if (!scopeChain)
             return NULL;
     }
 
-    JSScript *script = fp->script();
-    CallObject *callobj = create(cx, script, scopeChain, RootedFunction(cx, &fp->callee()));
+    CallObject *callobj = create(cx, fp->script(), scopeChain, RootedObject(cx, &fp->callee()));
     if (!callobj)
         return NULL;
 
-    /* Copy in the closed-over formal arguments. */
-    if (script->bindingsAccessedDynamically) {
-        Value *formals = fp->formals();
-        for (unsigned slot = 0, n = fp->fun()->nargs; slot < n; ++slot)
-            callobj->setArg(slot, formals[slot]);
-    } else if (unsigned n = script->numClosedArgs()) {
-        Value *formals = fp->formals();
-        for (unsigned i = 0; i < n; ++i) {
-            uint32_t slot = script->getClosedArg(i);
-            callobj->setArg(slot, formals[slot]);
-        }
-    }
-
+    callobj->setStackFrame(fp);
     return callobj;
 }
 
-void
-CallObject::copyUnaliasedValues(StackFrame *fp)
-{
-    JS_ASSERT(fp->script() == getCalleeFunction()->script());
-    JSScript *script = fp->script();
-
-    /* If bindings are accessed dynamically, everything is aliased. */
-    if (script->bindingsAccessedDynamically)
-        return;
-
-    /* Copy the unaliased formals. */
-    for (unsigned i = 0; i < script->bindings.numArgs(); ++i) {
-        if (!script->formalLivesInCallObject(i)) {
-            if (script->argsObjAliasesFormals())
-                setArg(i, fp->argsObj().arg(i), DONT_CHECK_ALIASING);
-            else
-                setArg(i, fp->unaliasedFormal(i), DONT_CHECK_ALIASING);
-        }
-    }
-
-    /* Copy the unaliased var/let bindings. */
-    for (unsigned i = 0; i < script->bindings.numVars(); ++i) {
-        if (!script->varIsAliased(i))
-            setVar(i, fp->unaliasedLocal(i), DONT_CHECK_ALIASING);
-    }
-}
-
 CallObject *
 CallObject::createForStrictEval(JSContext *cx, StackFrame *fp)
 {
-    JS_ASSERT(fp->isStrictEvalFrame());
-    JS_ASSERT(cx->fp() == fp);
-    JS_ASSERT(cx->regs().pc == fp->script()->code);
+    CallObject *callobj = create(cx, fp->script(), fp->scopeChain(), RootedObject(cx));
+    if (!callobj)
+        return NULL;
+
+    callobj->setStackFrame(fp);
+    fp->initScopeChain(*callobj);
+    return callobj;
+}
 
-    return create(cx, fp->script(), fp->scopeChain(), RootedFunction(cx));
+JSBool
+CallObject::getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
+{
+    CallObject &callobj = obj->asCall();
+
+    JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
+    unsigned i = (uint16_t) JSID_TO_INT(id);
+
+    DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
+    JS_ASSERT_IF(!cx->okToAccessUnaliasedBindings, script->formalLivesInCallObject(i));
+
+    if (StackFrame *fp = callobj.maybeStackFrame())
+        *vp = fp->formalArg(i);
+    else
+        *vp = callobj.arg(i);
+    return true;
 }
 
 JSBool
 CallObject::setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
 {
     CallObject &callobj = obj->asCall();
 
     JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
     unsigned i = (uint16_t) JSID_TO_INT(id);
 
     JSScript *script = callobj.getCalleeFunction()->script();
-    JS_ASSERT(script->formalLivesInCallObject(i));
+    JS_ASSERT_IF(!cx->okToAccessUnaliasedBindings, script->formalLivesInCallObject(i));
 
-    callobj.setArg(i, *vp);
+    if (StackFrame *fp = callobj.maybeStackFrame())
+        fp->formalArg(i) = *vp;
+    else
+        callobj.setArg(i, *vp);
 
     if (!script->ensureHasTypes(cx))
         return false;
 
     TypeScript::SetArgument(cx, script, i, *vp);
+
+    return true;
+}
+
+JSBool
+CallObject::getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
+{
+    CallObject &callobj = obj->asCall();
+
+    JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
+    unsigned i = (uint16_t) JSID_TO_INT(id);
+
+    DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
+    JS_ASSERT_IF(!cx->okToAccessUnaliasedBindings, script->varIsAliased(i));
+
+    if (StackFrame *fp = callobj.maybeStackFrame())
+        *vp = fp->varSlot(i);
+    else
+        *vp = callobj.var(i);
+
+    JS_ASSERT(!vp->isMagic(JS_OPTIMIZED_ARGUMENTS));
     return true;
 }
 
 JSBool
 CallObject::setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
 {
     CallObject &callobj = obj->asCall();
 
     JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
     unsigned i = (uint16_t) JSID_TO_INT(id);
 
     JSScript *script = callobj.getCalleeFunction()->script();
-    JS_ASSERT(script->varIsAliased(i));
+    JS_ASSERT_IF(!cx->okToAccessUnaliasedBindings, script->varIsAliased(i));
 
-    callobj.setVar(i, *vp);
+    if (StackFrame *fp = callobj.maybeStackFrame())
+        fp->varSlot(i) = *vp;
+    else
+        callobj.setVar(i, *vp);
 
     if (!script->ensureHasTypes(cx))
         return false;
 
     TypeScript::SetLocal(cx, script, i, *vp);
     return true;
 }
 
+bool
+CallObject::containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx)
+{
+    jsid id = NameToId(name);
+    const Shape *shape = nativeLookup(cx, id);
+    if (!shape)
+        return false;
+
+    PropertyOp op = shape->getterOp();
+    if (op != getVarOp && op != getArgOp)
+        return false;
+
+    JS_ALWAYS_TRUE(op(cx, RootedObject(cx, this), RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
+    return true;
+}
+
+static void
+call_trace(JSTracer *trc, JSObject *obj)
+{
+    JS_ASSERT(obj->isCall());
+
+    /* Mark any generator frame, as for arguments objects. */
+#if JS_HAS_GENERATORS
+    StackFrame *fp = (StackFrame *) obj->getPrivate();
+    if (fp && fp->isFloatingGenerator())
+        MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
+#endif
+}
+
 JS_PUBLIC_DATA(Class) js::CallClass = {
     "Call",
-    JSCLASS_IS_ANONYMOUS | JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS |
+    JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
-    NULL                     /* convert: Leave it NULL so we notice if calls ever escape */
+    NULL,                    /* convert: Leave it NULL so we notice if calls ever escape */
+    NULL,                    /* finalize */
+    NULL,                    /* checkAccess */
+    NULL,                    /* call        */
+    NULL,                    /* construct   */
+    NULL,                    /* hasInstance */
+    call_trace
 };
 
 Class js::DeclEnvClass = {
     js_Object_str,
     JSCLASS_HAS_PRIVATE |
     JSCLASS_HAS_RESERVED_SLOTS(DeclEnvObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Object),
     JS_PropertyStub,         /* addProperty */
@@ -274,16 +376,17 @@ DeclEnvObject::create(JSContext *cx, Sta
                                                     &fp->global(), FINALIZE_KIND);
     if (!emptyDeclEnvShape)
         return NULL;
 
     RootedObject obj(cx, JSObject::create(cx, FINALIZE_KIND, emptyDeclEnvShape, type, NULL));
     if (!obj)
         return NULL;
 
+    obj->setPrivate(fp);
     if (!obj->asScope().setEnclosingScope(cx, fp->scopeChain()))
         return NULL;
 
 
     if (!DefineNativeProperty(cx, obj, RootedId(cx, AtomToId(fp->fun()->atom)),
                               ObjectValue(fp->callee()), NULL, NULL,
                               JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_READONLY,
                               0, 0)) {
@@ -577,43 +680,100 @@ ClonedBlockObject::create(JSContext *cx,
             return NULL;
     }
 
     JS_ASSERT(!obj->inDictionaryMode());
     JS_ASSERT(obj->slotSpan() >= block->slotCount() + RESERVED_SLOTS);
 
     obj->setReservedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*fp->scopeChain()));
     obj->setReservedSlot(DEPTH_SLOT, PrivateUint32Value(block->stackDepth()));
+    obj->setPrivate(js_FloatingFrameIfGenerator(cx, fp));
 
     if (obj->lastProperty()->extensibleParents() && !obj->generateOwnShape(cx))
         return NULL;
 
-    /*
-     * Copy in the closed-over locals. Closed-over locals don't need
-     * any fixup since the initial value is 'undefined'.
-     */
-    Value *src = fp->base() + block->stackDepth();
-    unsigned nslots = block->slotCount();
-    for (unsigned i = 0; i < nslots; ++i, ++src) {
-        if (block->isAliased(i))
-            obj->asClonedBlock().setVar(i, *src);
-    }
-
     return &obj->asClonedBlock();
 }
 
 void
-ClonedBlockObject::copyUnaliasedValues(StackFrame *fp)
+ClonedBlockObject::put(StackFrame *fp)
+{
+    uint32_t count = slotCount();
+    uint32_t depth = stackDepth();
+
+    /* See comments in CheckDestructuring in frontend/Parser.cpp. */
+    JS_ASSERT(count >= 1);
+
+    copySlotRange(RESERVED_SLOTS, fp->base() + depth, count);
+
+    /* We must clear the private slot even with errors. */
+    setPrivate(NULL);
+}
+
+static JSBool
+block_getProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
 {
-    StaticBlockObject &block = staticBlock();
-    unsigned base = block.slotToFrameLocal(fp->script(), 0);
-    for (unsigned i = 0; i < slotCount(); ++i) {
-        if (!block.isAliased(i))
-            setVar(i, fp->unaliasedLocal(base + i), DONT_CHECK_ALIASING);
+    /*
+     * Block objects are never exposed to script, and the engine handles them
+     * with care. So unlike other getters, this one can assert (rather than
+     * check) certain invariants about obj.
+     */
+    ClonedBlockObject &block = obj->asClonedBlock();
+    unsigned index = (unsigned) JSID_TO_INT(id);
+
+    JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
+
+    if (StackFrame *fp = block.maybeStackFrame()) {
+        fp = js_LiveFrameIfGenerator(fp);
+        index += fp->numFixed() + block.stackDepth();
+        JS_ASSERT(index < fp->numSlots());
+        *vp = fp->slots()[index];
+        return true;
     }
+
+    /* Values are in slots immediately following the class-reserved ones. */
+    JS_ASSERT(block.closedSlot(index) == *vp);
+    return true;
+}
+
+static JSBool
+block_setProperty(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
+{
+    ClonedBlockObject &block = obj->asClonedBlock();
+    unsigned index = (unsigned) JSID_TO_INT(id);
+
+    JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
+
+    if (StackFrame *fp = block.maybeStackFrame()) {
+        fp = js_LiveFrameIfGenerator(fp);
+        index += fp->numFixed() + block.stackDepth();
+        JS_ASSERT(index < fp->numSlots());
+        fp->slots()[index] = *vp;
+        return true;
+    }
+
+    /*
+     * The value in *vp will be written back to the slot in obj that was
+     * allocated when this let binding was defined.
+     */
+    return true;
+}
+
+bool
+ClonedBlockObject::containsVar(PropertyName *name, Value *vp, JSContext *cx)
+{
+    RootedObject self(cx, this);
+
+    const Shape *shape = nativeLookup(cx, NameToId(name));
+    if (!shape)
+        return false;
+
+    JS_ASSERT(shape->getterOp() == block_getProperty);
+    JS_ALWAYS_TRUE(block_getProperty(cx, self, RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
+    return true;
 }
 
 StaticBlockObject *
 StaticBlockObject::create(JSContext *cx)
 {
     RootedTypeObject type(cx);
     type = cx->compartment->getEmptyType(cx);
     if (!type)
@@ -623,16 +783,17 @@ StaticBlockObject::create(JSContext *cx)
     emptyBlockShape = EmptyShape::getInitialShape(cx, &BlockClass, NULL, NULL, FINALIZE_KIND);
     if (!emptyBlockShape)
         return NULL;
 
     JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyBlockShape, type, NULL);
     if (!obj)
         return NULL;
 
+    obj->setPrivate(NULL);
     return &obj->asStaticBlock();
 }
 
 const Shape *
 StaticBlockObject::addVar(JSContext *cx, jsid id, int index, bool *redeclared)
 {
     JS_ASSERT(JSID_IS_ATOM(id) || (JSID_IS_INT(id) && JSID_TO_INT(id) == index));
 
@@ -645,34 +806,54 @@ StaticBlockObject::addVar(JSContext *cx,
         return NULL;
     }
 
     /*
      * Don't convert this object to dictionary mode so that we can clone the
      * block's shape later.
      */
     uint32_t slot = JSSLOT_FREE(&BlockClass) + index;
-    return addPropertyInternal(cx, id, /* getter = */ NULL, /* setter = */ NULL,
+    return addPropertyInternal(cx, id, block_getProperty, block_setProperty,
                                slot, JSPROP_ENUMERATE | JSPROP_PERMANENT,
                                Shape::HAS_SHORTID, index, spp,
                                /* allowDictionary = */ false);
 }
 
+static void
+block_trace(JSTracer *trc, JSObject *obj)
+{
+    if (obj->isStaticBlock())
+        return;
+
+    /* XXX: this will be removed again with bug 659577. */
+#if JS_HAS_GENERATORS
+    StackFrame *fp = obj->asClonedBlock().maybeStackFrame();
+    if (fp && fp->isFloatingGenerator())
+        MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
+#endif
+}
+
 Class js::BlockClass = {
     "Block",
-    JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(BlockObject::RESERVED_SLOTS) |
     JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
-    JS_ConvertStub
+    JS_ConvertStub,
+    NULL,                    /* finalize */
+    NULL,                    /* checkAccess */
+    NULL,                    /* call        */
+    NULL,                    /* construct   */
+    NULL,                    /* hasInstance */
+    block_trace
 };
 
 #define NO_PARENT_INDEX UINT32_MAX
 
 /*
  * If there's a parent id, then get the parent out of our script's object
  * array. We know that we clone block objects in outer-to-inner order, which
  * means that getting the parent now will work.
@@ -779,17 +960,17 @@ js::XDRStaticBlockObject(XDRState<mode> 
         }
 
         /*
          * XDR the block object's properties. We know that there are 'count'
          * properties to XDR, stored as id/shortid pairs.
          */
         for (unsigned i = 0; i < count; i++) {
             const Shape *shape = shapes[i];
-            JS_ASSERT(shape->hasDefaultGetter());
+            JS_ASSERT(shape->getter() == block_getProperty);
             JS_ASSERT(unsigned(shape->shortid()) == i);
 
             jsid propid = shape->propid();
             JS_ASSERT(JSID_IS_ATOM(propid) || JSID_IS_INT(propid));
 
             /* The empty string indicates an int id. */
             JSAtom *atom = JSID_IS_ATOM(propid)
                            ? JSID_TO_ATOM(propid)
@@ -995,17 +1176,17 @@ ScopeIter::settle()
     } else if (fp_->isNonStrictDirectEvalFrame() && cur_ == fp_->prev()->scopeChain()) {
         if (block_) {
             JS_ASSERT(!block_->needsClone());
             type_ = Block;
             hasScopeObject_ = false;
         } else {
             fp_ = NULL;
         }
-    } else if (fp_->isNonEvalFunctionFrame() && fp_->beforeHeavyweightPrologue()) {
+    } else if (fp_->isNonEvalFunctionFrame() && !fp_->hasCallObj()) {
         JS_ASSERT(cur_ == fp_->fun()->environment());
         fp_ = NULL;
     } else if (cur_->isWith()) {
         JS_ASSERT_IF(fp_->isFunctionFrame(), fp_->fun()->isHeavyweight());
         JS_ASSERT_IF(block_, block_->needsClone());
         JS_ASSERT_IF(block_, block_->stackDepth() < cur_->asWith().stackDepth());
         type_ = With;
         hasScopeObject_ = true;
@@ -1049,138 +1230,26 @@ namespace js {
 
 /*
  * DebugScopeProxy is the handler for DebugScopeObject proxy objects and mostly
  * just wraps ScopeObjects. Having a custom handler (rather than trying to
  * reuse js::Wrapper) gives us several important abilities:
  *  - We want to pass the ScopeObject as the receiver to forwarded scope
  *    property ops so that Call/Block/With ops do not all require a
  *    'normalization' step.
- *  - The debug scope proxy can directly manipulate the stack frame to allow
- *    the debugger to read/write args/locals that were otherwise unaliased.
  *  - The engine has made certain assumptions about the possible reads/writes
  *    in a scope. DebugScopeProxy allows us to prevent the debugger from
  *    breaking those assumptions. Examples include adding shadowing variables
  *    or changing the property attributes of bindings.
  *  - The engine makes optimizations that are observable to the debugger. The
  *    proxy can either hide these optimizations or make the situation more
  *    clear to the debugger. An example is 'arguments'.
  */
 class DebugScopeProxy : public BaseProxyHandler
 {
-    enum Action { SET, GET };
-
-    /*
-     * This function handles access to unaliased locals/formals. If such
-     * accesses were passed on directly to the DebugScopeObject::scope, they
-     * would not be reading/writing the canonical location for the variable,
-     * which is on the stack. Thus, handleUn must translate would-be
-     * accesses to scope objects into analogous accesses of the stack frame.
-     *
-     * handleUnaliasedAccess returns 'true' if the access was unaliased and
-     * completed by handleUnaliasedAccess.
-     */
-    bool handleUnaliasedAccess(JSContext *cx, ScopeObject &scope, jsid id, Action action, Value *vp)
-    {
-        Shape *shape = scope.lastProperty()->search(cx, id);
-        if (!shape)
-            return false;
-
-        StackFrame *maybefp = cx->runtime->debugScopes->hasLiveFrame(scope);
-
-        if (scope.isCall() && !scope.asCall().isForEval()) {
-            CallObject &callobj = scope.asCall();
-            JSScript *script = callobj.getCalleeFunction()->script();
-            if (!script->ensureHasTypes(cx))
-                return false;
-
-            if (shape->setterOp() == CallObject::setVarOp) {
-                unsigned i = shape->shortid();
-                if (script->varIsAliased(i))
-                    return false;
-                
-                if (maybefp) {
-                    if (action == GET)
-                        *vp = maybefp->unaliasedVar(i);
-                    else
-                        maybefp->unaliasedVar(i) = *vp;
-                } else {
-                    if (action == GET)
-                        *vp = callobj.var(i, DONT_CHECK_ALIASING);
-                    else
-                        callobj.setVar(i, *vp, DONT_CHECK_ALIASING);
-                }
-
-                if (action == SET)
-                    TypeScript::SetLocal(cx, script, i, *vp);
-
-                return true;
-            }
-
-            if (shape->setterOp() == CallObject::setArgOp) {
-                unsigned i = shape->shortid();
-                if (script->formalLivesInCallObject(i))
-                    return false;
-                
-                if (maybefp) {
-                    if (script->argsObjAliasesFormals()) {
-                        if (action == GET)
-                            *vp = maybefp->argsObj().arg(i);
-                        else
-                            maybefp->argsObj().setArg(i, *vp);
-                    } else {
-                        if (action == GET)
-                            *vp = maybefp->unaliasedFormal(i);
-                        else
-                            maybefp->unaliasedFormal(i) = *vp;
-                    }
-                } else {
-                    if (action == GET)
-                        *vp = callobj.arg(i, DONT_CHECK_ALIASING);
-                    else
-                        callobj.setArg(i, *vp, DONT_CHECK_ALIASING);
-                }
-
-                if (action == SET)
-                    TypeScript::SetArgument(cx, script, i, *vp);
-
-                return true;
-            }
-
-            return false;
-        }
-
-        if (scope.isClonedBlock()) {
-            ClonedBlockObject &block = scope.asClonedBlock();
-            unsigned i = shape->shortid();
-            if (block.staticBlock().isAliased(i))
-                return false;
-
-            if (maybefp) {
-                JSScript *script = maybefp->script();
-                    unsigned local = block.slotToFrameLocal(maybefp->script(), i);
-                if (action == GET)
-                    *vp = maybefp->unaliasedLocal(local);
-                else
-                    maybefp->unaliasedLocal(local) = *vp;
-                JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script));
-            } else {
-                if (action == GET)
-                    *vp = block.var(i, DONT_CHECK_ALIASING);
-                else
-                    block.setVar(i, *vp, DONT_CHECK_ALIASING);
-            }
-
-            return true;
-        }
-
-        JS_ASSERT(scope.isDeclEnv() || scope.isWith() || scope.asCall().isForEval());
-        return false;
-    }
-
     static bool isArguments(JSContext *cx, jsid id)
     {
         return id == NameToId(cx->runtime->atomState.argumentsAtom);
     }
 
     static bool isFunctionScope(ScopeObject &scope)
     {
         return scope.isCall() && !scope.asCall().isForEval();
@@ -1211,24 +1280,24 @@ class DebugScopeProxy : public BaseProxy
 
         if (!isArguments(cx, id) || !isFunctionScope(scope))
             return true;
 
         JSScript *script = scope.asCall().getCalleeFunction()->script();
         if (script->needsArgsObj())
             return true;
 
-        StackFrame *maybefp = cx->runtime->debugScopes->hasLiveFrame(scope);
-        if (!maybefp) {
+        StackFrame *fp = scope.maybeStackFrame();
+        if (!fp) {
             JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_DEBUG_NOT_LIVE,
                                  "Debugger scope");
             return false;
         }
 
-        *maybeArgsObj = ArgumentsObject::createUnexpected(cx, maybefp);
+        *maybeArgsObj = ArgumentsObject::createUnexpected(cx, fp);
         return true;
     }
 
   public:
     static int family;
     static DebugScopeProxy singleton;
 
     DebugScopeProxy() : BaseProxyHandler(&family) {}
@@ -1251,55 +1320,42 @@ class DebugScopeProxy : public BaseProxy
         if (maybeArgsObj) {
             PodZero(desc);
             desc->obj = proxy;
             desc->attrs = JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;
             desc->value = ObjectValue(*maybeArgsObj);
             return true;
         }
 
-        Value v;
-        if (handleUnaliasedAccess(cx, scope, id, GET, &v)) {
-            PodZero(desc);
-            desc->obj = proxy;
-            desc->attrs = JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;
-            desc->value = v;
-            return true;
-        }
-
+        AutoAllowUnaliasedVarAccess a(cx);
         return JS_GetPropertyDescriptorById(cx, &scope, id, JSRESOLVE_QUALIFIED, desc);
     }
 
     bool get(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, Value *vp) MOZ_OVERRIDE
     {
         ScopeObject &scope = proxy->asDebugScope().scope();
 
         ArgumentsObject *maybeArgsObj;
         if (!checkForMissingArguments(cx, id, scope, &maybeArgsObj))
             return false;
 
         if (maybeArgsObj) {
             *vp = ObjectValue(*maybeArgsObj);
             return true;
         }
 
-        if (handleUnaliasedAccess(cx, scope, id, GET, vp))
-            return true;
-            
+        AutoAllowUnaliasedVarAccess a(cx);
         return scope.getGeneric(cx, RootedObject(cx, &scope), RootedId(cx, id), vp);
     }
 
     bool set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, bool strict,
                      Value *vp) MOZ_OVERRIDE
     {
+        AutoAllowUnaliasedVarAccess a(cx);
         ScopeObject &scope = proxy->asDebugScope().scope();
-
-        if (handleUnaliasedAccess(cx, scope, id, SET, vp))
-            return true;
-
         return scope.setGeneric(cx, RootedId(cx, id), vp, strict);
     }
 
     bool defineProperty(JSContext *cx, JSObject *proxy, jsid id, PropertyDescriptor *desc) MOZ_OVERRIDE
     {
         bool found;
         if (!has(cx, proxy, id, &found))
             return false;
@@ -1407,88 +1463,53 @@ js_IsDebugScopeSlow(const JSObject *obj)
     return obj->getClass() == &ObjectProxyClass &&
            GetProxyHandler(obj) == &DebugScopeProxy::singleton;
 }
 
 /*****************************************************************************/
 
 DebugScopes::DebugScopes(JSRuntime *rt)
  : proxiedScopes(rt),
-   missingScopes(rt),
-   liveScopes(rt)
+   missingScopes(rt)
 {}
 
 DebugScopes::~DebugScopes()
 {
     JS_ASSERT(missingScopes.empty());
 }
 
 bool
 DebugScopes::init()
 {
-    if (!liveScopes.init() ||
-        !proxiedScopes.init() ||
+    if (!proxiedScopes.init() ||
         !missingScopes.init())
     {
         return false;
     }
     return true;
 }
 
 void
 DebugScopes::mark(JSTracer *trc)
 {
     proxiedScopes.trace(trc);
 }
 
 void
-DebugScopes::sweep(JSRuntime *rt)
+DebugScopes::sweep()
 {
     /*
      * Note: missingScopes points to debug scopes weakly not just so that debug
      * scopes can be released more eagerly, but, more importantly, to avoid
      * creating an uncollectable cycle with suspended generator frames.
      */
     for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
         if (!IsObjectMarked(&e.front().value))
             e.removeFront();
     }
-
-    for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
-        ScopeObject *scope = e.front().key;
-        StackFrame *fp = e.front().value;
-
-        /*
-         * Scopes can be finalized when a debugger-synthesized ScopeObject is
-         * no longer reachable via its DebugScopeObject.
-         */
-        if (JS_IsAboutToBeFinalized(scope)) {
-            e.removeFront();
-            continue;
-        }
-
-        /*
-         * As explained in onGeneratorFrameChange, liveScopes includes
-         * suspended generator frames. Since a generator can be finalized while
-         * its scope is live, we must explicitly detect finalized generators.
-         * Since the scope is still live, we simulate the onPop* call by
-         * copying unaliased variables into the scope object.
-         */
-        if (JSGenerator *gen = fp->maybeSuspendedGenerator(rt)) {
-            JS_ASSERT(gen->state == JSGEN_OPEN);
-            if (!IsMarked(&gen->obj)) {
-                if (scope->isCall())
-                    scope->asCall().copyUnaliasedValues(fp);
-                else if (scope->isBlock())
-                    scope->asClonedBlock().copyUnaliasedValues(fp);
-                e.removeFront();
-                continue;
-            }
-        }
-    }
 }
 
 /*
  * Unfortunately, GetDebugScopeForFrame needs to work even outside debug mode
  * (in particular, JS_GetFrameScopeChain does not require debug mode). Since
  * DebugScopes::onPop* are only called in debug mode, this means we cannot
  * use any of the maps in DebugScopes. This will produce debug scope chains
  * that do not obey the debugger invariants but that is just fine.
@@ -1509,17 +1530,16 @@ DebugScopes::hasDebugScope(JSContext *cx
     return NULL;
 }
 
 bool
 DebugScopes::addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope)
 {
     if (!CanUseDebugScopeMaps(cx))
         return true;
-
     JS_ASSERT(!proxiedScopes.has(&scope));
     if (!proxiedScopes.put(&scope, &debugScope)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
     return true;
 }
 
@@ -1535,163 +1555,83 @@ DebugScopes::hasDebugScope(JSContext *cx
 }
 
 bool
 DebugScopes::addDebugScope(JSContext *cx, ScopeIter si, DebugScopeObject &debugScope)
 {
     JS_ASSERT(!si.hasScopeObject());
     if (!CanUseDebugScopeMaps(cx))
         return true;
-
     JS_ASSERT(!missingScopes.has(si));
     if (!missingScopes.put(si, &debugScope)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
-
-    JS_ASSERT(!liveScopes.has(&debugScope.scope()));
-    if (!liveScopes.put(&debugScope.scope(), si.fp())) {
-        js_ReportOutOfMemory(cx);
-        return false;
-    }
     return true;
 }
 
 void
 DebugScopes::onPopCall(StackFrame *fp)
 {
-    if (fp->isGeneratorFrame())
+    if (fp->isYielding())
         return;
 
-    if (fp->fun()->isHeavyweight()) {
-        CallObject &callobj = fp->scopeChain()->asCall();
-        callobj.copyUnaliasedValues(fp);
-        liveScopes.remove(&callobj);
-    } else {
+    if (!fp->fun()->isHeavyweight()) {
+        JS_ASSERT(!fp->hasCallObj());
         if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
-            CallObject &callobj = p->value->scope().asCall();
-            callobj.copyUnaliasedValues(fp);
-            liveScopes.remove(&callobj);
+            js_PutCallObject(fp, p->value->scope().asCall());
             missingScopes.remove(p);
         }
     }
 }
 
 void
 DebugScopes::onPopBlock(JSContext *cx, StackFrame *fp)
 {
-    StaticBlockObject &staticBlock = *fp->maybeBlockChain();
-    if (staticBlock.needsClone()) {
-        ClonedBlockObject &clone = fp->scopeChain()->asClonedBlock();
-        clone.copyUnaliasedValues(fp);
-        liveScopes.remove(&clone);
-    } else {
+    StaticBlockObject &block = *fp->maybeBlockChain();
+    if (!block.needsClone()) {
+        JS_ASSERT(!fp->scopeChain()->isBlock() ||
+                  fp->scopeChain()->asClonedBlock().staticBlock() != block);
         if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
-            ClonedBlockObject &clone = p->value->scope().asClonedBlock();
-            clone.copyUnaliasedValues(fp);
-            liveScopes.remove(&clone);
+            p->value->scope().asClonedBlock().put(fp);
             missingScopes.remove(p);
         }
     }
 }
 
 void
-DebugScopes::onPopWith(StackFrame *fp)
-{
-    liveScopes.remove(&fp->scopeChain()->asWith());
-}
-
-void
-DebugScopes::onPopStrictEvalScope(StackFrame *fp)
-{
-    liveScopes.remove(&fp->scopeChain()->asCall());
-}
-
-void
 DebugScopes::onGeneratorFrameChange(StackFrame *from, StackFrame *to)
 {
     for (ScopeIter toIter(to); !toIter.done(); toIter = toIter.enclosing()) {
-        if (toIter.hasScopeObject()) {
-            /*
-             * Not only must we correctly replace mappings [scope -> from] with
-             * mappings [scope -> to], but we must add [scope -> to] if it
-             * doesn't already exist so that if we need to proxy a generator's
-             * scope while it is suspended, we can find its frame (which would
-             * otherwise not be found by AllFramesIter).
-             */
-            LiveScopeMap::AddPtr livePtr = liveScopes.lookupForAdd(&toIter.scope());
-            if (livePtr)
-                livePtr->value = to;
-            else
-                liveScopes.add(livePtr, &toIter.scope(), to);
-        } else {
+        if (!toIter.hasScopeObject()) {
             if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(toIter, from))) {
                 DebugScopeObject &debugScope = *p->value;
-                liveScopes.lookup(&debugScope.scope())->value = to;
+                ScopeObject &scope = debugScope.scope();
+                if (scope.isCall()) {
+                    JS_ASSERT(scope.maybeStackFrame() == from);
+                    scope.setStackFrame(to);
+                    if (scope.enclosingScope().isDeclEnv()) {
+                        JS_ASSERT(scope.enclosingScope().asDeclEnv().maybeStackFrame() == from);
+                        scope.enclosingScope().asDeclEnv().setStackFrame(to);
+                    }
+                }
                 missingScopes.remove(p);
                 missingScopes.put(toIter, &debugScope);
             }
         }
     }
 }
 
 void
 DebugScopes::onCompartmentLeaveDebugMode(JSCompartment *c)
 {
     for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
         if (e.front().key.fp()->compartment() == c)
             e.removeFront();
     }
-    for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
-        if (e.front().key->compartment() == c)
-            e.removeFront();
-    }
-}
-
-bool
-DebugScopes::updateLiveScopes(JSContext *cx)
-{
-    JS_CHECK_RECURSION(cx, return false);
-
-    /*
-     * Note that we must always update the top frame's scope objects' entries
-     * in liveScopes because we can't be sure code hasn't run in that frame to
-     * change the scope chain since we were last called. The fp->prevUpToDate()
-     * flag indicates whether the scopes of frames older than fp are already
-     * included in liveScopes. It might seem simpler to have fp instead carry a
-     * flag indicating whether fp itself is accurately described, but then we
-     * would need to clear that flag whenever fp ran code. By storing the 'up
-     * to date' bit for fp->prev() in fp, simply popping fp effectively clears
-     * the flag for us, at exactly the time when execution resumes fp->prev().
-     */
-    for (AllFramesIter i(cx->runtime->stackSpace); !i.done(); ++i) {
-        StackFrame *fp = i.fp();
-        if (fp->isDummyFrame() || fp->scopeChain()->compartment() != cx->compartment)
-            continue;
-
-        for (ScopeIter si(fp); !si.done(); si = si.enclosing()) {
-            if (si.hasScopeObject() && !liveScopes.put(&si.scope(), fp))
-                return false;
-        }
-
-        if (fp->prevUpToDate())
-            return true;
-        JS_ASSERT(fp->compartment()->debugMode());
-        fp->setPrevUpToDate();
-    }
-
-    return true;
-}
-
-StackFrame *
-DebugScopes::hasLiveFrame(ScopeObject &scope)
-{
-    if (LiveScopeMap::Ptr p = liveScopes.lookup(&scope))
-        return p->value;
-    return NULL;
 }
 
 /*****************************************************************************/
 
 static JSObject *
 GetDebugScope(JSContext *cx, ScopeIter si);
 
 static DebugScopeObject *
@@ -1742,42 +1682,50 @@ GetDebugScopeForMissing(JSContext *cx, S
      */
     DebugScopeObject *debugScope = NULL;
     switch (si.type()) {
       case ScopeIter::Call: {
         CallObject *callobj = CallObject::createForFunction(cx, si.fp());
         if (!callobj)
             return NULL;
 
-        if (callobj->enclosingScope().isDeclEnv()) {
+        JSObject &maybeDecl = callobj->enclosingScope();
+        if (maybeDecl.isDeclEnv()) {
             JS_ASSERT(CallObjectLambdaName(callobj->getCalleeFunction()));
-            DeclEnvObject &declenv = callobj->enclosingScope().asDeclEnv();
-            enclosingDebug = DebugScopeObject::create(cx, declenv, *enclosingDebug);
+            enclosingDebug = DebugScopeObject::create(cx, maybeDecl.asDeclEnv(), *enclosingDebug);
             if (!enclosingDebug)
                 return NULL;
         }
 
         debugScope = DebugScopeObject::create(cx, *callobj, *enclosingDebug);
+        if (!debugScope)
+            return NULL;
+
+        if (!CanUseDebugScopeMaps(cx))
+            js_PutCallObject(si.fp(), *callobj);
         break;
       }
       case ScopeIter::Block: {
         Rooted<StaticBlockObject *> staticBlock(cx, &si.staticBlock());
         ClonedBlockObject *block = ClonedBlockObject::create(cx, staticBlock, si.fp());
         if (!block)
             return NULL;
 
         debugScope = DebugScopeObject::create(cx, *block, *enclosingDebug);
+        if (!debugScope)
+            return NULL;
+
+        if (!CanUseDebugScopeMaps(cx))
+            block->put(si.fp());
         break;
       }
       case ScopeIter::With:
       case ScopeIter::StrictEvalScope:
         JS_NOT_REACHED("should already have a scope");
     }
-    if (!debugScope)
-        return NULL;
 
     if (!debugScopes.addDebugScope(cx, si, *debugScope))
         return NULL;
 
     return debugScope;
 }
 
 static JSObject *
@@ -1793,19 +1741,30 @@ GetDebugScope(JSContext *cx, JSObject &o
 #ifdef DEBUG
         JSObject *o = &obj;
         while ((o = o->enclosingScope()))
             JS_ASSERT(!o->isScope());
 #endif
         return &obj;
     }
 
+    /*
+     * If 'scope' is a 'with' block, then the chain is fully reified from that
+     * point outwards, and there's no point in bothering with a ScopeIter. If
+     * |scope| has an associated stack frame, we can get more detailed scope
+     * chain information from that.
+     * Note: all this frame hackery will be removed by bug 659577.
+     */
     ScopeObject &scope = obj.asScope();
-    if (StackFrame *fp = cx->runtime->debugScopes->hasLiveFrame(scope))
+    if (!scope.isWith() && scope.maybeStackFrame()) {
+        StackFrame *fp = scope.maybeStackFrame();
+        if (scope.isClonedBlock())
+            fp = js_LiveFrameIfGenerator(fp);
         return GetDebugScope(cx, ScopeIter(fp, scope));
+    }
     return GetDebugScopeForScope(cx, scope, ScopeIter(scope.enclosingScope()));
 }
 
 static JSObject *
 GetDebugScope(JSContext *cx, ScopeIter si)
 {
     JS_CHECK_RECURSION(cx, return NULL);
 
@@ -1818,21 +1777,18 @@ GetDebugScope(JSContext *cx, ScopeIter s
     return GetDebugScopeForScope(cx, si.scope(), si.enclosing());
 }
 
 JSObject *
 js::GetDebugScopeForFunction(JSContext *cx, JSFunction *fun)
 {
     assertSameCompartment(cx, fun);
     JS_ASSERT(cx->compartment->debugMode());
-    if (!cx->runtime->debugScopes->updateLiveScopes(cx))
-        return NULL;
     return GetDebugScope(cx, *fun->environment());
 }
 
 JSObject *
 js::GetDebugScopeForFrame(JSContext *cx, StackFrame *fp)
 {
     assertSameCompartment(cx, fp);
-    if (CanUseDebugScopeMaps(cx) && !cx->runtime->debugScopes->updateLiveScopes(cx))
-        return NULL;
+    /* Unfortunately, we cannot JS_ASSERT(debugMode); see CanUseDebugScopeMaps. */
     return GetDebugScope(cx, ScopeIter(fp));
 }
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -16,36 +16,34 @@ namespace js {
 
 /*****************************************************************************/
 
 /*
  * A "scope coordinate" describes how to get from head of the scope chain to a
  * given lexically-enclosing variable. A scope coordinate has two dimensions:
  *  - hops: the number of scope objects on the scope chain to skip
  *  - binding: which binding on the scope object
- * Additionally (as described in jsopcode.tbl) there is a 'block' index, but
- * this is only needed for decompilation/inference so it is not included in the
- * main ScopeCoordinate struct: use ScopeCoordinate{BlockChain,Atom} instead.
+ *
+ * XXX: Until bug 659577 lands, this is all for show and all ScopeCoordinates
+ * have hops fixed at 0 and 'binding' is just the js::Bindings binding for args
+ * and vars and the stack depth for let bindings. Thus, aliased-var access
+ * touches the StackFrame like it always did and 'binding' must be first
+ * converted to either an arg or local slot (using Bindings::bindingToLocal or
+ * bindingToArg). With bug 659577, ScopeObject will have a 'var' function that
+ * takes a ScopeCoordinate.
  */
 struct ScopeCoordinate
 {
     uint16_t hops;
-    uint16_t slot;
-
+    uint16_t binding;
     inline ScopeCoordinate(jsbytecode *pc);
-    inline ScopeCoordinate() {}
 };
 
-/* Return the static block chain (or null) accessed by *pc. */
-extern StaticBlockObject *
-ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc);
-
-/* Return the name being accessed by the given ALIASEDVAR op. */
-extern PropertyName *
-ScopeCoordinateName(JSScript *script, jsbytecode *pc);
+inline JSAtom *
+ScopeCoordinateAtom(JSScript *script, jsbytecode *pc);
 
 /*****************************************************************************/
 
 /*
  * Scope objects
  *
  * Scope objects are technically real JSObjects but only belong on the scope
  * chain (that is, fp->scopeChain() or fun->environment()). The hierarchy of
@@ -79,89 +77,94 @@ ScopeCoordinateName(JSScript *script, js
  * are cloned at runtime. These objects should never escape into the wild and
  * support a restricted set of ScopeObject operations.
  *
  * See also "Debug scope objects" below.
  */
 
 class ScopeObject : public JSObject
 {
+    /* Use maybeStackFrame() instead. */
+    void *getPrivate() const;
+
   protected:
     static const uint32_t SCOPE_CHAIN_SLOT = 0;
 
   public:
-    /* Number of reserved slots for both CallObject and BlockObject. */
-    static const uint32_t CALL_BLOCK_RESERVED_SLOTS = 2;
-
     /*
      * Since every scope chain terminates with a global object and GlobalObject
      * does not derive ScopeObject (it has a completely different layout), the
      * enclosing scope of a ScopeObject is necessarily non-null.
      */
     inline JSObject &enclosingScope() const;
     inline bool setEnclosingScope(JSContext *cx, HandleObject obj);
 
     /*
-     * Get or set an aliased variable contained in this scope. Unaliased
-     * variables should instead access the StackFrame. Aliased variable access
-     * is primarily made through JOF_SCOPECOORD ops which is why these members
-     * take a ScopeCoordinate instead of just the slot index.
+     * The stack frame for this scope object, if the frame is still active.
+     * Note: these members may not be called for a StaticBlockObject or
+     * WithObject.
      */
-    inline const Value &aliasedVar(ScopeCoordinate sc);
-    inline void setAliasedVar(ScopeCoordinate sc, const Value &v);
+    inline StackFrame *maybeStackFrame() const;
+    inline void setStackFrame(StackFrame *frame);
 
     /* For jit access. */
     static inline size_t offsetOfEnclosingScope();
 };
 
 class CallObject : public ScopeObject
 {
     static const uint32_t CALLEE_SLOT = 1;
 
     static CallObject *
-    create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee);
+    create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee);
 
   public:
-    static const uint32_t RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
+    static const uint32_t RESERVED_SLOTS = 3;
 
     static CallObject *createForFunction(JSContext *cx, StackFrame *fp);
     static CallObject *createForStrictEval(JSContext *cx, StackFrame *fp);
 
     /* True if this is for a strict mode eval frame or for a function call. */
     inline bool isForEval() const;
 
     /*
      * The callee function if this CallObject was created for a function
      * invocation, or null if it was created for a strict mode eval frame.
      */
     inline JSObject *getCallee() const;
     inline JSFunction *getCalleeFunction() const;
     inline void setCallee(JSObject *callee);
 
     /* Returns the formal argument at the given index. */
-    inline const Value &arg(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
-    inline void setArg(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
+    inline const Value &arg(unsigned i) const;
+    inline void setArg(unsigned i, const Value &v);
+    inline void initArgUnchecked(unsigned i, const Value &v);
 
     /* Returns the variable at the given index. */
-    inline const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
-    inline void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
+    inline const Value &var(unsigned i) const;
+    inline void setVar(unsigned i, const Value &v);
+    inline void initVarUnchecked(unsigned i, const Value &v);
 
     /*
      * Get the actual arrays of arguments and variables. Only call if type
      * inference is enabled, where we ensure that call object variables are in
      * contiguous slots (see NewCallObject).
      */
     inline HeapSlotArray argArray();
     inline HeapSlotArray varArray();
 
+    inline void copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots);
+
+    static JSBool getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
+    static JSBool getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
     static JSBool setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
     static JSBool setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
 
-    /* Copy in all the unaliased formals and locals. */
-    void copyUnaliasedValues(StackFrame *fp);
+    /* Return whether this environment contains 'name' and, if so, its value. */
+    bool containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx);
 };
 
 class DeclEnvObject : public ScopeObject
 {
   public:
     static const uint32_t RESERVED_SLOTS = 1;
     static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT2;
 
@@ -176,59 +179,59 @@ class NestedScopeObject : public ScopeOb
 
   public:
     /* Return the abstract stack depth right before entering this nested scope. */
     uint32_t stackDepth() const;
 };
 
 class WithObject : public NestedScopeObject
 {
+    /* These ScopeObject operations are not valid on a with object. */
+    js::StackFrame *maybeStackFrame() const;
+    void setStackFrame(StackFrame *frame);
+
     static const unsigned THIS_SLOT = 2;
 
     /* Use WithObject::object() instead. */
     JSObject *getProto() const;
 
   public:
     static const unsigned RESERVED_SLOTS = 3;
-    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
+    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
 
     static WithObject *
     create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
 
     /* Return object for the 'this' class hook. */
     JSObject &withThis() const;
 
     /* Return the 'o' in 'with (o)'. */
     JSObject &object() const;
 };
 
 class BlockObject : public NestedScopeObject
 {
   public:
-    static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
-    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
+    static const unsigned RESERVED_SLOTS = 2;
+    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
 
     /* Return the number of variables associated with this block. */
     inline uint32_t slotCount() const;
 
-    /*
-     * Return the local corresponding to the ith binding where i is in the
-     * range [0, slotCount()) and the return local index is in the range
-     * [script->nfixed, script->nfixed + script->nslots).
-     */
-    unsigned slotToFrameLocal(JSScript *script, unsigned i);
-
   protected:
     /* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */
-    inline const Value &slotValue(unsigned i);
-    inline void setSlotValue(unsigned i, const Value &v);
+    inline HeapSlot &slotValue(unsigned i);
 };
 
 class StaticBlockObject : public BlockObject
 {
+    /* These ScopeObject operations are not valid on a static block object. */
+    StackFrame *maybeStackFrame() const;
+    void setStackFrame(StackFrame *frame);
+
   public:
     static StaticBlockObject *create(JSContext *cx);
 
     inline StaticBlockObject *enclosingBlock() const;
     inline void setEnclosingBlock(StaticBlockObject *blockObj);
 
     void setStackDepth(uint32_t depth);
     bool containsVarAtDepth(uint32_t depth);
@@ -246,36 +249,41 @@ class StaticBlockObject : public BlockOb
      */
     void setAliased(unsigned i, bool aliased);
     bool isAliased(unsigned i);
 
     /*
      * A static block object is cloned (when entering the block) iff some
      * variable of the block isAliased.
      */
-    bool needsClone();
+    bool needsClone() const;
 
     const Shape *addVar(JSContext *cx, jsid id, int index, bool *redeclared);
 };
 
 class ClonedBlockObject : public BlockObject
 {
   public:
     static ClonedBlockObject *create(JSContext *cx, Handle<StaticBlockObject *> block,
                                      StackFrame *fp);
 
     /* The static block from which this block was cloned. */
     StaticBlockObject &staticBlock() const;
 
+    /*
+     * When this block's stack slots are about to be popped, 'put' must be
+     * called to copy the slot values into this block's object slots.
+     */
+    void put(StackFrame *fp);
+
     /* Assuming 'put' has been called, return the value of the ith let var. */
-    const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
-    void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
+    const Value &closedSlot(unsigned i);
 
-    /* Copy in all the unaliased formals and locals. */
-    void copyUnaliasedValues(StackFrame *fp);
+    /* Return whether this environment contains 'name' and, if so, its value. */
+    bool containsVar(PropertyName *name, Value *vp, JSContext *cx);
 };
 
 template<XDRMode mode>
 bool
 XDRStaticBlockObject(XDRState<mode> *xdr, JSScript *script, StaticBlockObject **objp);
 
 extern JSObject *
 CloneStaticBlockObject(JSContext *cx, StaticBlockObject &srcBlock,
@@ -409,58 +417,37 @@ class DebugScopes
     /* The map from (non-debug) scopes to debug scopes. */
     typedef WeakMap<HeapPtrObject, HeapPtrObject> ObjectWeakMap;
     ObjectWeakMap proxiedScopes;
 
     /*
      * The map from live frames which have optimized-away scopes to the
      * corresponding debug scopes.
      */
-    typedef HashMap<ScopeIter,
-                    DebugScopeObject *,
-                    ScopeIter,
-                    RuntimeAllocPolicy> MissingScopeMap;
+    typedef HashMap<ScopeIter, DebugScopeObject *, ScopeIter, RuntimeAllocPolicy> MissingScopeMap;
     MissingScopeMap missingScopes;
 
-    /*
-     * The map from scope objects of live frames to the live frame. This map
-     * updated lazily whenever the debugger needs the information. In between
-     * two lazy updates, liveScopes becomes incomplete (but not invalid, onPop*
-     * removes scopes as they are popped). Thus, two consecutive debugger lazy
-     * updates of liveScopes need only fill in the new scopes.
-     */
-    typedef HashMap<ScopeObject *,
-                    StackFrame *,
-                    DefaultHasher<ScopeObject *>,
-                    RuntimeAllocPolicy> LiveScopeMap;
-    LiveScopeMap liveScopes;
-
   public:
     DebugScopes(JSRuntime *rt);
     ~DebugScopes();
     bool init();
 
     void mark(JSTracer *trc);
-    void sweep(JSRuntime *rt);
+    void sweep();
 
     DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope) const;
     bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope);
 
     DebugScopeObject *hasDebugScope(JSContext *cx, ScopeIter si) const;
     bool addDebugScope(JSContext *cx, ScopeIter si, DebugScopeObject &debugScope);
 
-    bool updateLiveScopes(JSContext *cx);
-    StackFrame *hasLiveFrame(ScopeObject &scope);
-
     /*
      * In debug-mode, these must be called whenever exiting a call/block or
      * when activating/yielding a generator.
      */
     void onPopCall(StackFrame *fp);
     void onPopBlock(JSContext *cx, StackFrame *fp);
-    void onPopWith(StackFrame *fp);
-    void onPopStrictEvalScope(StackFrame *fp);
     void onGeneratorFrameChange(StackFrame *from, StackFrame *to);
     void onCompartmentLeaveDebugMode(JSCompartment *c);
 };
 
 }  /* namespace js */
 #endif /* ScopeObject_h___ */
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -87,17 +87,17 @@ StackFrame::initPrev(JSContext *cx)
         prevpc_ = regs->pc;
         prevInline_ = regs->inlined();
         JS_ASSERT_IF(!prev_->isDummyFrame(),
                      uint32_t(prevpc_ - prev_->script()->code) < prev_->script()->length);
     } else {
         prev_ = NULL;
 #ifdef DEBUG
         prevpc_ = (jsbytecode *)0xbadc;
-        prevInline_ = (InlinedSite *)0xbadc;
+        prevInline_ = (JSInlinedSite *)0xbadc;
 #endif
     }
 }
 
 inline void
 StackFrame::resetGeneratorPrev(JSContext *cx)
 {
     flags_ |= HAS_PREVPC;
@@ -142,18 +142,19 @@ StackFrame::initCallFrame(JSContext *cx,
     u.nactual = nactual;
     scopeChain_ = callee.environment();
     ncode_ = NULL;
     initPrev(cx);
     blockChain_= NULL;
     JS_ASSERT(!hasBlockChain());
     JS_ASSERT(!hasHookData());
     JS_ASSERT(annotation() == NULL);
+    JS_ASSERT(!hasCallObj());
 
-    initVarsToUndefined();
+    SetValueRangeToUndefined(slots(), script->nfixed);
 }
 
 /*
  * Reinitialize the StackFrame fields that have been initialized up to the
  * point of FixupArity in the function prologue.
  */
 inline void
 StackFrame::initFixupFrame(StackFrame *prev, StackFrame::Flags flags, void *ncode, unsigned nactual)
@@ -165,208 +166,253 @@ StackFrame::initFixupFrame(StackFrame *p
                          UNDERFLOW_ARGS)) == 0);
 
     flags_ = FUNCTION | flags;
     prev_ = prev;
     ncode_ = ncode;
     u.nactual = nactual;
 }
 
-inline bool
-StackFrame::heavyweightFunctionPrologue(JSContext *cx)
-{
-    JS_ASSERT(isNonEvalFunctionFrame());
-    JS_ASSERT(fun()->isHeavyweight());
-
-    CallObject *callobj = CallObject::createForFunction(cx, this);
-    if (!callobj)
-        return false;
-
-    pushOnScopeChain(*callobj);
-    flags_ |= HAS_CALL_OBJ;
-
-    if (script()->nesting())
-        types::NestingPrologue(cx, this);
-
-    return true;
-}
-
-
-inline void
-StackFrame::initVarsToUndefined()
-{
-    SetValueRangeToUndefined(slots(), script()->nfixed);
-}
-
 inline JSObject *
 StackFrame::createRestParameter(JSContext *cx)
 {
     JS_ASSERT(fun()->hasRest());
     unsigned nformal = fun()->nargs - 1, nactual = numActualArgs();
     unsigned nrest = (nactual > nformal) ? nactual - nformal : 0;
-    return NewDenseCopiedArray(cx, nrest, actuals() + nformal);
-}
-
-inline Value &
-StackFrame::unaliasedVar(unsigned i, MaybeCheckAliasing checkAliasing)
-{
-    JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
-    JS_ASSERT(i < script()->nfixed);
-    return slots()[i];
-}
-
-inline Value &
-StackFrame::unaliasedLocal(unsigned i, MaybeCheckAliasing checkAliasing)
-{
-#ifdef DEBUG
-    if (checkAliasing) {
-        JS_ASSERT(i < script()->nslots);
-        if (i < script()->nfixed) {
-            JS_ASSERT(!script()->varIsAliased(i));
-        } else {
-            unsigned depth = i - script()->nfixed;
-            for (StaticBlockObject *b = maybeBlockChain(); b; b = b->enclosingBlock()) {
-                if (b->containsVarAtDepth(depth)) {
-                    JS_ASSERT(!b->isAliased(depth - b->stackDepth()));
-                    break;
-                }
-            }
-        }
-    }
-#endif
-    return slots()[i];
+    return NewDenseCopiedArray(cx, nrest, actualArgs() + nformal);
 }
 
 inline Value &
-StackFrame::unaliasedFormal(unsigned i, MaybeCheckAliasing checkAliasing)
+StackFrame::canonicalActualArg(unsigned i) const
 {
-    JS_ASSERT(i < numFormalArgs());
-    JS_ASSERT_IF(checkAliasing, !script()->formalIsAliased(i));
-    return formals()[i];
-}
-
-inline Value &
-StackFrame::unaliasedActual(unsigned i)
-{
+    if (i < numFormalArgs())
+        return formalArg(i);
     JS_ASSERT(i < numActualArgs());
-    JS_ASSERT(!script()->formalIsAliased(i));
-    return i < numFormalArgs() ? formals()[i] : actuals()[i];
+    return actualArgs()[i];
 }
 
 template <class Op>
-inline void
-StackFrame::forEachUnaliasedActual(Op op)
+inline bool
+StackFrame::forEachCanonicalActualArg(Op op, unsigned start /* = 0 */, unsigned count /* = unsigned(-1) */)
 {
-    JS_ASSERT(script()->numClosedArgs() == 0);
-    JS_ASSERT(!script()->needsArgsObj());
+    unsigned nformal = fun()->nargs;
+    JS_ASSERT(start <= nformal);
 
-    unsigned nformal = numFormalArgs();
+    Value *formals = formalArgsEnd() - nformal;
     unsigned nactual = numActualArgs();
+    if (count == unsigned(-1))
+        count = nactual - start;
 
-    const Value *formalsEnd = (const Value *)this;
-    const Value *formals = formalsEnd - nformal;
+    unsigned end = start + count;
+    JS_ASSERT(end >= start);
+    JS_ASSERT(end <= nactual);
 
-    if (nactual <= nformal) {
-        const Value *actualsEnd = formals + nactual;
-        for (const Value *p = formals; p < actualsEnd; ++p)
-            op(*p);
+    if (end <= nformal) {
+        Value *p = formals + start;
+        for (; start < end; ++p, ++start) {
+            if (!op(start, p))
+                return false;
+        }
     } else {
-        for (const Value *p = formals; p < formalsEnd; ++p)
-            op(*p);
+        for (Value *p = formals + start; start < nformal; ++p, ++start) {
+            if (!op(start, p))
+                return false;
+        }
+        JS_ASSERT(start >= nformal);
+        Value *actuals = formals - (nactual + 2) + start;
+        for (Value *p = actuals; start < end; ++p, ++start) {
+            if (!op(start, p))
+                return false;
+        }
+    }
+    return true;
+}
 
-        const Value *actualsEnd = formals - 2;
-        const Value *actuals = actualsEnd - nactual;
-        for (const Value *p = actuals + nformal; p < actualsEnd; ++p)
-            op(*p);
+template <class Op>
+inline bool
+StackFrame::forEachFormalArg(Op op)
+{
+    Value *formals = formalArgsEnd() - fun()->nargs;
+    Value *formalsEnd = formalArgsEnd();
+    unsigned i = 0;
+    for (Value *p = formals; p != formalsEnd; ++p, ++i) {
+        if (!op(i, p))
+            return false;
     }
+    return true;
 }
 
 struct CopyTo
 {
     Value *dst;
     CopyTo(Value *dst) : dst(dst) {}
-    void operator()(const Value &src) { *dst++ = src; }
+    bool operator()(unsigned, Value *src) {
+        *dst++ = *src;
+        return true;
+    }
 };
 
 inline unsigned
-StackFrame::numFormalArgs() const
-{
-    JS_ASSERT(hasArgs());
-    return fun()->nargs;
-}
-
-inline unsigned
 StackFrame::numActualArgs() const
 {
     /*
      * u.nactual is always coherent, except for method JIT frames where the
      * callee does not access its arguments and the number of actual arguments
      * matches the number of formal arguments. The JIT requires that all frames
      * which do not have an arguments object and use their arguments have a
      * coherent u.nactual (even though the below code may not use it), as
      * JIT code may access the field directly.
      */
     JS_ASSERT(hasArgs());
     if (JS_UNLIKELY(flags_ & (OVERFLOW_ARGS | UNDERFLOW_ARGS)))
         return u.nactual;
     return numFormalArgs();
 }
 
-inline ArgumentsObject &
-StackFrame::argsObj() const
+inline Value *
+StackFrame::actualArgs() const
 {
-    JS_ASSERT(script()->needsArgsObj());
-    JS_ASSERT(flags_ & HAS_ARGS_OBJ);
-    return *argsObj_;
+    JS_ASSERT(hasArgs());
+    Value *argv = formalArgs();
+    if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
+        return argv - (2 + u.nactual);
+    return argv;
+}
+
+inline Value *
+StackFrame::actualArgsEnd() const
+{
+    JS_ASSERT(hasArgs());
+    if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
+        return formalArgs() - 2;
+    return formalArgs() + numActualArgs();
 }
 
 inline void
-StackFrame::initArgsObj(ArgumentsObject &argsobj)
-{
-    JS_ASSERT(script()->needsArgsObj());
-    flags_ |= HAS_ARGS_OBJ;
-    argsObj_ = &argsobj;
-}
-
-inline ScopeObject &
-StackFrame::aliasedVarScope(ScopeCoordinate sc) const
+StackFrame::setScopeChain(JSObject &obj)
 {
-    JSObject *scope = &scopeChain()->asScope();
-    for (unsigned i = sc.hops; i; i--)
-        scope = &scope->asScope().enclosingScope();
-    return scope->asScope();
-}
-
-inline void
-StackFrame::pushOnScopeChain(ScopeObject &scope)
-{
-    JS_ASSERT(*scopeChain() == scope.enclosingScope() ||
-              *scopeChain() == scope.asCall().enclosingScope().asDeclEnv().enclosingScope());
-    scopeChain_ = &scope;
+#ifdef DEBUG
+    JS_ASSERT(&obj != NULL);
+    if (hasCallObj()) {
+        JSObject *pobj = &obj;
+        while (pobj && !pobj->isWith() && pobj->asScope().maybeStackFrame() != this)
+            pobj = pobj->enclosingScope();
+        JS_ASSERT(pobj);
+    } else {
+        for (JSObject *pobj = &obj; pobj->isScope() && !pobj->isWith(); pobj = pobj->enclosingScope())
+            JS_ASSERT_IF(pobj->isCall(), pobj->asScope().maybeStackFrame() != this);
+    }
+#endif
+    scopeChain_ = &obj;
     flags_ |= HAS_SCOPECHAIN;
 }
 
 inline void
-StackFrame::popOffScopeChain()
+StackFrame::initScopeChain(CallObject &obj)
 {
-    JS_ASSERT(flags_ & HAS_SCOPECHAIN);
-    scopeChain_ = &scopeChain_->asScope().enclosingScope();
+    JS_ASSERT(&obj != NULL);
+    JS_ASSERT(!hasCallObj() && obj.maybeStackFrame() == this);
+    scopeChain_ = &obj;
+    flags_ |= HAS_SCOPECHAIN | HAS_CALL_OBJ;
 }
 
 inline CallObject &
 StackFrame::callObj() const
 {
-    JS_ASSERT(fun()->isHeavyweight());
+    JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj());
 
     JSObject *pobj = scopeChain();
     while (JS_UNLIKELY(!pobj->isCall()))
         pobj = pobj->enclosingScope();
     return pobj->asCall();
 }
 
+inline bool
+StackFrame::maintainNestingState() const
+{
+    /*
+     * Whether to invoke the nesting epilogue/prologue to maintain active
+     * frame counts and check for reentrant outer functions.
+     */
+    return isNonEvalFunctionFrame() && !isGeneratorFrame() && script()->nesting();
+}
+
+inline bool
+StackFrame::functionPrologue(JSContext *cx)
+{
+    JS_ASSERT(isNonEvalFunctionFrame());
+    JS_ASSERT(!isGeneratorFrame());
+
+    if (fun()->isHeavyweight()) {
+        CallObject *callobj = CallObject::createForFunction(cx, this);
+        if (!callobj)
+            return false;
+        initScopeChain(*callobj);
+    } else {
+        /* Force instantiation of the scope chain, for JIT frames. */
+        scopeChain();
+    }
+
+    if (script()->nesting()) {
+        JS_ASSERT(maintainNestingState());
+        types::NestingPrologue(cx, this);
+    }
+
+    return true;
+}
+
+inline void
+StackFrame::functionEpilogue(JSContext *cx)
+{
+    JS_ASSERT(isNonEvalFunctionFrame());
+
+    if (cx->compartment->debugMode())
+        cx->runtime->debugScopes->onPopCall(this);
+
+    if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
+        if (hasCallObj())
+            js_PutCallObject(this, scopeChain_->asCall());
+        if (hasArgsObj())
+            js_PutArgsObject(this);
+    }
+
+    if (maintainNestingState())
+        types::NestingEpilogue(this);
+}
+
+inline void
+StackFrame::updateEpilogueFlags()
+{
+    if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
+        if (hasArgsObj() && !argsObj().maybeStackFrame())
+            flags_ &= ~HAS_ARGS_OBJ;
+        if (hasCallObj() && !callObj().maybeStackFrame()) {
+            /*
+             * For function frames, the call object may or may not have have an
+             * enclosing DeclEnv object, so we use the callee's parent, since
+             * it was the initial scope chain. For global (strict) eval frames,
+             * there is no callee, but the call object's parent is the initial
+             * scope chain.
+             */
+            scopeChain_ = isFunctionFrame()
+                          ? callee().environment()
+                          : &scopeChain_->asScope().enclosingScope();
+            flags_ &= ~HAS_CALL_OBJ;
+        }
+    }
+
+    /*
+     * For outer/inner function frames, undo the active frame balancing so that
+     * when we redo it in the epilogue we get the right final value. The other
+     * nesting epilogue changes (update active args/vars) are idempotent.
+     */
+    if (maintainNestingState())
+        script()->nesting()->activeFrames++;
+}
+
 /*****************************************************************************/
 
 STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
 JS_ALWAYS_INLINE bool
 StackSpace::ensureSpace(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
                         JSCompartment *dest) const
 {
     assertInvariants();
@@ -378,17 +424,17 @@ StackSpace::ensureSpace(JSContext *cx, M
         return ensureSpaceSlow(cx, report, from, nvals, dest);
     return true;
 }
 
 inline Value *
 StackSpace::getStackLimit(JSContext *cx, MaybeReportError report)
 {
     FrameRegs &regs = cx->regs();
-    unsigned nvals = regs.fp()->script()->nslots + STACK_JIT_EXTRA;
+    unsigned nvals = regs.fp()->numSlots() + STACK_JIT_EXTRA;
     return ensureSpace(cx, report, regs.sp, nvals)
            ? conservativeEnd_
            : NULL;
 }
 
 /*****************************************************************************/
 
 JS_ALWAYS_INLINE StackFrame *
@@ -399,17 +445,17 @@ ContextStack::getCallFrame(JSContext *cx
     unsigned nformal = fun->nargs;
 
     Value *firstUnused = args.end();
     JS_ASSERT(firstUnused == space().firstUnused());
 
     /* Include extra space to satisfy the method-jit stackLimit invariant. */
     unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
 
-    /* Maintain layout invariant: &formals[0] == ((Value *)fp) - nformal. */
+    /* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */
 
     if (args.length() == nformal) {
         if (!space().ensureSpace(cx, report, firstUnused, nvals))
             return NULL;
         return reinterpret_cast<StackFrame *>(firstUnused);
     }
 
     if (args.length() < nformal) {
@@ -491,30 +537,32 @@ ContextStack::getFixupFrame(JSContext *c
 
 JS_ALWAYS_INLINE void
 ContextStack::popInlineFrame(FrameRegs &regs)
 {
     JS_ASSERT(onTop());
     JS_ASSERT(&regs == &seg_->regs());
 
     StackFrame *fp = regs.fp();
-    Value *newsp = fp->actuals() - 1;
+    fp->functionEpilogue(cx_);
+
+    Value *newsp = fp->actualArgs() - 1;
     JS_ASSERT(newsp >= fp->prev()->base());
 
     newsp[-1] = fp->returnValue();
     regs.popFrame(newsp);
 }
 
 inline void
 ContextStack::popFrameAfterOverflow()
 {
     /* Restore the regs to what they were on entry to JSOP_CALL. */
     FrameRegs &regs = seg_->regs();
     StackFrame *fp = regs.fp();
-    regs.popFrame(fp->actuals() + fp->numActualArgs());
+    regs.popFrame(fp->actualArgsEnd());
 }
 
 inline JSScript *
 ContextStack::currentScript(jsbytecode **ppc) const
 {
     if (ppc)
         *ppc = NULL;
 
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -85,103 +85,106 @@ StackFrame::initExecuteFrame(JSScript *s
 
 void
 StackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
 {
     PodZero(this);
     flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN;
     initPrev(cx);
     JS_ASSERT(chain.isGlobal());
-    scopeChain_ = &chain;
+    setScopeChain(chain);
 }
 
 template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier>
 void
-StackFrame::copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
+StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
                                StackFrame *otherfp, U *othervp, Value *othersp)
 {
     JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp));
-    JS_ASSERT((Value *)othervp == otherfp->generatorArgsSnapshotBegin());
+    JS_ASSERT((Value *)othervp == otherfp->actualArgs() - 2);
     JS_ASSERT(othersp >= otherfp->slots());
-    JS_ASSERT(othersp <= otherfp->generatorSlotsSnapshotBegin() + otherfp->script()->nslots);
+    JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots());
     JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp);
 
     /* Copy args, StackFrame, and slots. */
-    U *srcend = (U *)otherfp->generatorArgsSnapshotEnd();
+    U *srcend = (U *)otherfp->formalArgsEnd();
     T *dst = vp;
     for (U *src = othervp; src < srcend; src++, dst++)
         *dst = *src;
 
     *fp = *otherfp;
     if (doPostBarrier)
         fp->writeBarrierPost();
 
     srcend = (U *)othersp;
     dst = (T *)fp->slots();
     for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++)
         *dst = *src;
 
+    /*
+     * Repoint Call, Arguments, Block and With objects to the new live frame.
+     * Call and Arguments are done directly because we have pointers to them.
+     * Block and With objects are done indirectly through 'liveFrame'. See
+     * js_LiveFrameToFloating comment in jsiter.h.
+     */
+    if (hasCallObj()) {
+        CallObject &obj = callObj();
+        obj.setStackFrame(this);
+        otherfp->flags_ &= ~HAS_CALL_OBJ;
+        if (js_IsNamedLambda(fun())) {
+            DeclEnvObject &env = obj.enclosingScope().asDeclEnv();
+            env.setStackFrame(this);
+        }
+    }
+    if (hasArgsObj()) {
+        ArgumentsObject &argsobj = argsObj();
+        if (argsobj.isNormalArguments())
+            argsobj.setStackFrame(this);
+        else
+            JS_ASSERT(!argsobj.maybeStackFrame());
+        otherfp->flags_ &= ~HAS_ARGS_OBJ;
+    }
+
     if (cx->compartment->debugMode())
         cx->runtime->debugScopes->onGeneratorFrameChange(otherfp, this);
 }
 
 /* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
-template void StackFrame::copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
+template void StackFrame::stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
                                              JSContext *, StackFrame *, Value *,
                                              StackFrame *, HeapValue *, Value *);
-template void StackFrame::copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
+template void StackFrame::stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
                                              JSContext *, StackFrame *, HeapValue *,
                                              StackFrame *, Value *, Value *);
 
 void
 StackFrame::writeBarrierPost()
 {
     /* This needs to follow the same rules as in js_TraceStackFrame. */
     if (scopeChain_)
         JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_);
     if (isDummyFrame())
         return;
-    if (flags_ & HAS_ARGS_OBJ)
+    if (hasArgsObj())
         JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_);
     if (isScriptFrame()) {
         if (isFunctionFrame()) {
             JSFunction::writeBarrierPost((JSObject *)exec.fun, (void *)&exec.fun);
             if (isEvalFrame())
                 JSScript::writeBarrierPost(u.evalScript, (void *)&u.evalScript);
         } else {
             JSScript::writeBarrierPost(exec.script, (void *)&exec.script);
         }
     }
     if (hasReturnValue())
         HeapValue::writeBarrierPost(rval_, &rval_);
 }
 
-JSGenerator *
-StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
-{
-    /*
-     * A suspended generator's frame is embedded inside the JSGenerator object
-     * instead of on the contiguous stack like all active frames.
-     */
-    if (!isGeneratorFrame() || rt->stackSpace.containsFast(this))
-        return NULL;
-
-    /*
-     * Once we know we have a suspended generator frame, there is a static
-     * offset from the frame's snapshot to beginning of the JSGenerator.
-     */
-    char *vp = reinterpret_cast<char *>(generatorArgsSnapshotBegin());
-    char *p = vp - offsetof(JSGenerator, stackSnapshot);
-    JSGenerator *gen = reinterpret_cast<JSGenerator *>(p);
-    JS_ASSERT(gen->fp == this);
-    return gen;
-}
-
 jsbytecode *
-StackFrame::prevpcSlow(InlinedSite **pinlined)
+StackFrame::prevpcSlow(JSInlinedSite **pinlined)
 {
     JS_ASSERT(!(flags_ & HAS_PREVPC));
 #if defined(JS_METHODJIT) && defined(JS_MONOIC)
     StackFrame *p = prev();
     mjit::JITScript *jit = p->script()->getJIT(p->isConstructing(), p->compartment()->needsBarrier());
     prevpc_ = jit->nativeToPC(ncode_, &prevInline_);
     flags_ |= HAS_PREVPC;
     if (pinlined)
@@ -189,17 +192,17 @@ StackFrame::prevpcSlow(InlinedSite **pin
     return prevpc_;
 #else
     JS_NOT_REACHED("Unknown PC for frame");
     return NULL;
 #endif
 }
 
 jsbytecode *
-StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, InlinedSite **pinlined)
+StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSite **pinlined)
 {
     JS_ASSERT_IF(next, next->prev() == this);
 
     StackSegment &seg = stack.space().containingSegment(this);
     FrameRegs &regs = seg.regs();
 
     /*
      * This isn't just an optimization; seg->computeNextFrame(fp) is only
@@ -212,165 +215,71 @@ StackFrame::pcQuadratic(const ContextSta
     }
 
     if (!next)
         next = seg.computeNextFrame(this);
     return next->prevpc(pinlined);
 }
 
 bool
-StackFrame::prologue(JSContext *cx, bool newType)
-{
-    JS_ASSERT(!isDummyFrame());
-    JS_ASSERT(!isGeneratorFrame());
-
-    if (isEvalFrame()) {
-        if (script()->strictModeCode) {
-            CallObject *callobj = CallObject::createForStrictEval(cx, this);
-            if (!callobj)
-                return false;
-            pushOnScopeChain(*callobj);
-        }
-        return true;
-    }
-
-    if (isGlobalFrame())
-        return true;
-
-    JS_ASSERT(isNonEvalFunctionFrame());
-
-    if (fun()->isHeavyweight()) {
-        CallObject *callobj = CallObject::createForFunction(cx, this);
-        if (!callobj)
-            return false;
-        pushOnScopeChain(*callobj);
-        flags_ |= HAS_CALL_OBJ;
-    }
-
-    if (script()->nesting())
-        types::NestingPrologue(cx, this);
-
-    if (isConstructing()) {
-        RootedObject callee(cx, &this->callee());
-        JSObject *obj = js_CreateThisForFunction(cx, callee, newType);
-        if (!obj)
-            return false;
-        functionThis() = ObjectValue(*obj);
-    }
-
-    Probes::enterJSFun(cx, fun(), script());
-    return true;
-}
-
-void
-StackFrame::epilogue(JSContext *cx)
-{
-    JS_ASSERT(!isDummyFrame());
-    JS_ASSERT(!isGeneratorFrame() || !isYielding());
-    JS_ASSERT(!hasBlockChain());
-
-    if (isEvalFrame()) {
-        if (isStrictEvalFrame()) {
-            JS_ASSERT(scopeChain()->asCall().isForEval());
-            if (cx->compartment->debugMode())
-                cx->runtime->debugScopes->onPopStrictEvalScope(this);
-        } else if (isDirectEvalFrame()) {
-            if (isDebuggerFrame())
-                JS_ASSERT(!scopeChain()->isScope());
-            else
-                JS_ASSERT(scopeChain() == prev()->scopeChain());
-        } else {
-            JS_ASSERT(scopeChain()->isGlobal());
-        }
-        return;
-    }
-
-    if (isGlobalFrame()) {
-        JS_ASSERT(!scopeChain()->isScope());
-        return;
-    }
-
-    JS_ASSERT(isNonEvalFunctionFrame());
-    if (fun()->isHeavyweight()) {
-        JS_ASSERT(scopeChain()->asCall().getCalleeFunction()->script() == script());
-    } else {
-        JS_ASSERT(!scopeChain()->isCall() || scopeChain()->asCall().isForEval() ||
-                  scopeChain()->asCall().getCalleeFunction()->script() != script());
-    }
-
-    if (cx->compartment->debugMode())
-        cx->runtime->debugScopes->onPopCall(this);
-
-    Probes::exitJSFun(cx, fun(), script());
-
-    if (script()->nesting())
-        types::NestingEpilogue(this);
-
-    if (isConstructing() && returnValue().isPrimitive())
-        setReturnValue(ObjectValue(constructorThis()));
-}
-
-bool
 StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
 {
     JS_ASSERT_IF(hasBlockChain(), blockChain_ == block.enclosingBlock());
 
     if (block.needsClone()) {
         Rooted<StaticBlockObject *> blockHandle(cx, &block);
         ClonedBlockObject *clone = ClonedBlockObject::create(cx, blockHandle, this);
         if (!clone)
             return false;
 
-        pushOnScopeChain(*clone);
+        scopeChain_ = clone;
     }
 
     flags_ |= HAS_BLOCKCHAIN;
     blockChain_ = &block;
     return true;
 }
 
 void
 StackFrame::popBlock(JSContext *cx)
 {
     JS_ASSERT(hasBlockChain());
 
     if (cx->compartment->debugMode())
         cx->runtime->debugScopes->onPopBlock(cx, this);
 
     if (blockChain_->needsClone()) {
-        JS_ASSERT(scopeChain_->asClonedBlock().staticBlock() == *blockChain_);
-        popOffScopeChain();
+        ClonedBlockObject &clone = scopeChain()->asClonedBlock();
+        JS_ASSERT(clone.staticBlock() == *blockChain_);
+        clone.put(cx->fp());
+        scopeChain_ = &clone.enclosingScope();
     }
 
     blockChain_ = blockChain_->enclosingBlock();
 }
 
 void
 StackFrame::popWith(JSContext *cx)
 {
-    if (cx->compartment->debugMode())
-        cx->runtime->debugScopes->onPopWith(this);
-
-    JS_ASSERT(scopeChain()->isWith());
-    popOffScopeChain();
+    setScopeChain(scopeChain()->asWith().enclosingScope());
 }
 
 void
 StackFrame::mark(JSTracer *trc)
 {
     /*
      * Normally we would use MarkRoot here, except that generators also take
      * this path. However, generators use a special write barrier when the stack
      * frame is copied to the floating frame. Therefore, no barrier is needed.
      */
     if (flags_ & HAS_SCOPECHAIN)
         gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain");
     if (isDummyFrame())
         return;
-    if (flags_ & HAS_ARGS_OBJ)
+    if (hasArgsObj())
         gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments");
     if (isFunctionFrame()) {
         gc::MarkObjectUnbarriered(trc, &exec.fun, "fun");
         if (isEvalFrame())
             gc::MarkScriptUnbarriered(trc, &u.evalScript, "eval script");
     } else {
         gc::MarkScriptUnbarriered(trc, &exec.script, "script");
     }
@@ -545,17 +454,17 @@ StackSpace::containingSegment(const Stac
         if (s->contains(target))
             return *s;
     }
     JS_NOT_REACHED("frame not in stack space");
     return *(StackSegment *)NULL;
 }
 
 void
-StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
+StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
 {
     Value *slotsBegin = fp->slots();
 
     if (!fp->isScriptFrame()) {
         JS_ASSERT(fp->isDummyFrame());
         gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
         return;
     }
@@ -619,22 +528,22 @@ StackSpace::mark(JSTracer *trc)
          * calls. Thus, marking can view the stack as the regex:
          *   (segment slots (frame slots)*)*
          * which gets marked in reverse order.
          */
         Value *slotsEnd = nextSegEnd;
         jsbytecode *pc = seg->maybepc();
         for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
             /* Mark from fp->slots() to slotsEnd. */
-            markFrameValues(trc, fp, slotsEnd, pc);
+            markFrameSlots(trc, fp, slotsEnd, pc);
 
             fp->mark(trc);
             slotsEnd = (Value *)fp;
 
-            InlinedSite *site;
+            JSInlinedSite *site;
             pc = fp->prevpc(&site);
             JS_ASSERT_IF(fp->prev(), !site);
         }
         gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
         nextSegEnd = (Value *)seg;
     }
 }
 
@@ -784,17 +693,17 @@ ContextStack::ensureOnTop(JSContext *cx,
      * Execute() or so forth, any topmost inline frame will need to be
      * expanded (along with other inline frames in the compartment).
      * To avoid pathological behavior here, make sure to mark any topmost
      * function as uninlineable, which will expand inline frames if there are
      * any and prevent the function from being inlined in the future.
      */
     if (FrameRegs *regs = cx->maybeRegs()) {
         JSFunction *fun = NULL;
-        if (InlinedSite *site = regs->inlined()) {
+        if (JSInlinedSite *site = regs->inlined()) {
             mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
             fun = chunk->inlineFrames()[site->inlineIndex].fun;
         } else {
             StackFrame *fp = regs->fp();
             if (fp->isFunctionFrame()) {
                 JSFunction *f = fp->fun();
                 if (f->isInterpreted())
                     fun = f;
@@ -935,17 +844,17 @@ ContextStack::pushExecuteFrame(JSContext
     unsigned nvars = 2 /* callee, this */ + VALUES_PER_STACK_FRAME + script->nslots;
     Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, extend, &efg->pushedSeg_);
     if (!firstUnused)
         return NULL;
 
     StackFrame *prev = evalInFrame ? evalInFrame : maybefp();
     StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
     fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type);
-    fp->initVarsToUndefined();
+    SetValueRangeToUndefined(fp->slots(), script->nfixed);
     efg->regs_.prepareToRun(*fp, script);
 
     /* pushRegs() below links the prev-frame; manually link the prev-call. */
     if (evalInFrame && evalInFrameCalls)
         seg_->pointAtCall(*evalInFrameCalls);
 
     efg->prevRegs_ = seg_->pushRegs(efg->regs_);
     JS_ASSERT(space().firstUnused() == efg->regs_.sp);
@@ -977,36 +886,39 @@ ContextStack::pushDummyFrame(JSContext *
 void
 ContextStack::popFrame(const FrameGuard &fg)
 {
     JS_ASSERT(fg.pushed());
     JS_ASSERT(onTop());
     JS_ASSERT(space().firstUnused() == fg.regs_.sp);
     JS_ASSERT(&fg.regs_ == &seg_->regs());
 
+    if (fg.regs_.fp()->isNonEvalFunctionFrame())
+        fg.regs_.fp()->functionEpilogue(cx_);
+
     seg_->popRegs(fg.prevRegs_);
     if (fg.pushedSeg_)
         popSegment();
 
     /*
      * NB: this code can call out and observe the stack (e.g., through GC), so
      * it should only be called from a consistent stack state.
      */
     if (!hasfp())
         cx_->resetCompartment();
 }
 
 bool
 ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
 {
-    HeapValue *genvp = gen->stackSnapshot;
-    JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
-    unsigned vplen = HeapValueify(gen->fp->generatorArgsSnapshotEnd()) - genvp;
+    StackFrame *genfp = gen->floatingFrame();
+    HeapValue *genvp = gen->floatingStack;
+    unsigned vplen = (HeapValue *)genfp - genvp;
 
-    unsigned nvars = vplen + VALUES_PER_STACK_FRAME + gen->fp->script()->nslots;
+    unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots();
     Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
     if (!firstUnused)
         return false;
 
     StackFrame *stackfp = reinterpret_cast<StackFrame *>(firstUnused + vplen);
     Value *stackvp = (Value *)stackfp - vplen;
 
     /* Save this for popGeneratorFrame. */
@@ -1015,46 +927,49 @@ ContextStack::pushGeneratorFrame(JSConte
 
     /*
      * Trigger incremental barrier on the floating frame's generator object.
      * This is normally traced through only by associated arguments/call
      * objects, but only when the generator is not actually on the stack.
      * We don't need to worry about generational barriers as the generator
      * object has a trace hook and cannot be nursery allocated.
      */
-    JS_ASSERT(gen->obj->getClass()->trace);
-    JSObject::writeBarrierPre(gen->obj);
+    JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj;
+    JS_ASSERT(genobj->getClass()->trace);
+    JSObject::writeBarrierPre(genobj);
 
     /* Copy from the generator's floating frame to the stack. */
-    stackfp->copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
-                                cx, stackfp, stackvp, gen->fp, genvp, gen->regs.sp);
+    stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
+                                cx, stackfp, stackvp, genfp, genvp, gen->regs.sp);
     stackfp->resetGeneratorPrev(cx);
+    stackfp->unsetFloatingGenerator();
     gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
 
     gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
     JS_ASSERT(space().firstUnused() == gfg->regs_.sp);
     gfg->setPushed(*this);
     return true;
 }
 
 void
 ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
 {
     JSGenerator *gen = gfg.gen_;
-    HeapValue *genvp = gen->stackSnapshot;
-    JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
+    StackFrame *genfp = gen->floatingFrame();
+    HeapValue *genvp = gen->floatingStack;
 
     const FrameRegs &stackRegs = gfg.regs_;
     StackFrame *stackfp = stackRegs.fp();
     Value *stackvp = gfg.stackvp_;
 
     /* Copy from the stack to the generator's floating frame. */
-    gen->regs.rebaseFromTo(stackRegs, *gen->fp);
-    gen->fp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
-                                cx_, gen->fp, genvp, stackfp, stackvp, stackRegs.sp);
+    gen->regs.rebaseFromTo(stackRegs, *genfp);
+    genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
+                              cx_, genfp, genvp, stackfp, stackvp, stackRegs.sp);
+    genfp->setFloatingGenerator();
 
     /* ~FrameGuard/popFrame will finish the popping. */
     JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
 }
 
 bool
 ContextStack::saveFrameChain()
 {
@@ -1093,40 +1008,40 @@ StackIter::poisonRegs()
 
 void
 StackIter::popFrame()
 {
     StackFrame *oldfp = fp_;
     JS_ASSERT(seg_->contains(oldfp));
     fp_ = fp_->prev();
     if (seg_->contains(fp_)) {
-        InlinedSite *inline_;
+        JSInlinedSite *inline_;
         pc_ = oldfp->prevpc(&inline_);
         JS_ASSERT(!inline_);
 
         /*
          * If there is a CallArgsList element between oldfp and fp_, then sp_
          * is ignored, so we only consider the case where there is no
          * intervening CallArgsList. The stack representation is not optimized
          * for this operation so we need to do a full case analysis of how
          * frames are pushed by considering each ContextStack::push*Frame.
          */
         if (oldfp->isGeneratorFrame()) {
             /* Generator's args do not overlap with the caller's expr stack. */
-            sp_ = oldfp->generatorArgsSnapshotBegin();
+            sp_ = (Value *)oldfp->actualArgs() - 2;
         } else if (oldfp->isNonEvalFunctionFrame()) {
             /*
              * When Invoke is called from a native, there will be an enclosing
              * pushInvokeArgs which pushes a CallArgsList element so we can
              * ignore that case. The other two cases of function call frames are
              * Invoke called directly from script and pushInlineFrmae. In both
              * cases, the actual arguments of the callee should be included in
              * the caller's expr stack.
              */
-            sp_ = oldfp->actuals() + oldfp->numActualArgs();
+            sp_ = oldfp->actualArgsEnd();
         } else if (oldfp->isFramePushedByExecute()) {
             /* pushExecuteFrame pushes exactly (callee, this) before frame. */
             sp_ = (Value *)oldfp - 2;
         } else {
             /* pushDummyFrame pushes exactly 0 slots before frame. */
             JS_ASSERT(oldfp->isDummyFrame());
             sp_ = (Value *)oldfp;
         }
@@ -1171,18 +1086,17 @@ StackIter::startOnSegment(StackSegment *
     fp_ = seg_->maybefp();
     calls_ = seg_->maybeCalls();
     settleOnNewSegment();
 }
 
 static void JS_NEVER_INLINE
 CrashIfInvalidSlot(StackFrame *fp, Value *vp)
 {
-    Value *slots = (Value *)(fp + 1);
-    if (vp < slots || vp >= slots + fp->script()->nslots) {
+    if (vp < fp->slots() || vp >= fp->slots() + fp->script()->nslots) {
         JS_ASSERT(false && "About to dereference invalid slot");
         *(int *)0xbad = 0;  // show up nicely in crash-stats
         MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__);
     }
 }
 
 /*
  * Given that the iterator's current value of fp_ and calls_ (initialized on
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -9,17 +9,24 @@
 #define Stack_h__
 
 #include "jsfun.h"
 #include "jsautooplen.h"
 
 struct JSContext;
 struct JSCompartment;
 
-extern void js_DumpStackFrame(JSContext *, js::StackFrame *);
+#ifdef JS_METHODJIT
+namespace js { namespace mjit { struct CallSite; }}
+typedef js::mjit::CallSite JSInlinedSite;
+#else
+struct JSInlinedSite {};
+#endif
+
+typedef /* js::mjit::RejoinState */ size_t JSRejoinState;
 
 namespace js {
 
 class StackFrame;
 class FrameRegs;
 class StackSegment;
 class StackSpace;
 class ContextStack;
@@ -31,109 +38,100 @@ class ExecuteFrameGuard;
 class DummyFrameGuard;
 class GeneratorFrameGuard;
 
 class CallIter;
 class ScriptFrameIter;
 class AllFramesIter;
 
 class ArgumentsObject;
-class ScopeCoordinate;
-class ScopeObject;
 class StaticBlockObject;
 
 #ifdef JS_METHODJIT
 namespace mjit {
-    class CallCompiler;
-    class GetPropCompiler;
-    struct CallSite;
     struct JITScript;
     jsbytecode *NativeToPC(JITScript *jit, void *ncode, CallSite **pinline);
-    namespace ic { struct GetElementIC; }
 }
-typedef mjit::CallSite InlinedSite;
-#else
-struct InlinedSite {};
 #endif
-typedef size_t FrameRejoinState;
 
 namespace detail {
     struct OOMCheck;
 }
 
 /*****************************************************************************/
 
 /*
  * VM stack layout
  *
- * SpiderMonkey uses a per-runtime stack to store the activation records,
+ * SpiderMonkey uses a per-thread stack to store the activation records,
  * parameters, locals, and expression temporaries for the stack of actively
- * executing scripts, functions and generators.
+ * executing scripts, functions and generators. The stack is owned by the
+ * StackSpace object stored in the runtime.
  *
  * The stack is subdivided into contiguous segments of memory which
  * have a memory layout invariant that allows fixed offsets to be used for stack
  * access (by jit code) as well as fast call/return. This memory layout is
  * encapsulated by a set of types that describe different regions of memory.
  * This encapsulation has holes: to avoid calling into C++ from generated code,
  * JIT compilers generate code that simulates analogous operations in C++.
  *
  * A sample memory layout of a segment looks like:
  *
  *                          regs
- *       .------------------------------------------------.
- *       |                                                V
- *       |                                      fp .--FrameRegs--. sp
- *       |                                         V             V
- * |StackSegment| values |StackFrame| values |StackFrame| values |
- *                         |      ^            |
- *           ? <-----------'      `------------'
+ *       .---------------------------------------------.
+ *       |                                             V
+ *       |                                   fp .--FrameRegs--. sp
+ *       |                                      V             V
+ * |StackSegment| slots |StackFrame| slots |StackFrame| slots |
+ *                        |      ^           |
+ *           ? <----------'      `-----------'
  *                 prev               prev
  *
  * A segment starts with a fixed-size header (js::StackSegment) which logically
  * describes the segment, links it to the rest of the stack, and points to the
  * end of the stack.
  *
  * Each script activation (global or function code) is given a fixed-size header
- * (js::StackFrame) which is associated with the values before and after it.
- * The frame contains bookkeeping information about the activation and links to
- * the previous frame.
+ * (js::StackFrame) which is associated with the values (called "slots") before
+ * and after it. The frame contains bookkeeping information about the activation
+ * and links to the previous frame.
  *
- * The value preceding a (function) StackFrame in memory are the arguments of
- * the call. The values after a StackFrame in memory are its locals followed by
+ * The slots preceding a (function) StackFrame in memory are the arguments of
+ * the call. The slots after a StackFrame in memory are its locals followed by
  * its expression stack. There is no clean line between the arguments of a
- * frame and the expression stack of the previous frame since the top values of
+ * frame and the expression stack of the previous frame since the top slots of
  * the expression become the arguments of a call. There are also layout
  * invariants concerning the arguments and StackFrame; see "Arguments" comment
  * in StackFrame for more details.
  *
  * The top of a segment's current frame's expression stack is pointed to by the
  * segment's "current regs", which contains the stack pointer 'sp'. In the
  * interpreter, sp is adjusted as individual values are pushed and popped from
  * the stack and the FrameRegs struct (pointed by the StackSegment) is a local
  * var of js::Interpret. JIT code simulates this by lazily updating FrameRegs
  * when calling from JIT code into the VM. Ideally, we'd like to remove all
  * dependence on FrameRegs outside the interpreter.
  *
  * A call to a native (C++) function does not push a frame. Instead, an array
  * of values is passed to the native. The layout of this array is abstracted by
  * js::CallArgs. With respect to the StackSegment layout above, the args to a
- * native call are inserted anywhere there can be values. A sample memory layout
+ * native call are inserted anywhere there can be slots. A sample memory layout
  * looks like:
  *
  *                          regs
- *       .------------------------------------------.
- *       |                                          V
- *       |                                fp .--FrameRegs--. sp
- *       |                                   V             V
- * |StackSegment| native call | values |StackFrame| values | native call |
- *       |       vp <--argc--> end                        vp <--argc--> end
- *       |           CallArgs <------------------------------ CallArgs
- *       |                                 prev                  ^
- *       `-------------------------------------------------------'
- *                                    calls
+ *       .----------------------------------------.
+ *       |                                        V
+ *       |                              fp .--FrameRegs--. sp
+ *       |                                 V             V
+ * |StackSegment| native call | slots |StackFrame| slots | native call |
+ *       |     vp <--argc--> end                        vp <--argc--> end
+ *       |         CallArgs <------------------------------ CallArgs
+ *       |                               prev                  ^
+ *       `-----------------------------------------------------'
+ *                                  calls
  *
  * Here there are two native calls on the stack. The start of each native arg
  * range is recorded by a CallArgs element which is prev-linked like stack
  * frames. Note that, in full generality, native and scripted calls can
  * interleave arbitrarily. Thus, the end of a segment is the maximum of its
  * current frame and its current native call. Similarly, the top of the entire
  * thread stack is the end of its current segment.
  *
@@ -290,25 +288,21 @@ CallArgsListFromArgv(unsigned argc, Valu
 JS_ALWAYS_INLINE CallArgsList
 CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
 {
     return CallArgsListFromArgv(argc, vp + 2, prev);
 }
 
 /*****************************************************************************/
 
-enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
-
-/*****************************************************************************/
-
 /* Flags specified for a frame as it is constructed. */
 enum InitialFrameFlags {
     INITIAL_NONE           =          0,
-    INITIAL_CONSTRUCT      =       0x40, /* == StackFrame::CONSTRUCTING, asserted below */
-    INITIAL_LOWERED        =   0x100000  /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
+    INITIAL_CONSTRUCT      =       0x80, /* == StackFrame::CONSTRUCTING, asserted below */
+    INITIAL_LOWERED        =   0x200000  /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
 };
 
 enum ExecuteType {
     EXECUTE_GLOBAL         =        0x1, /* == StackFrame::GLOBAL */
     EXECUTE_DIRECT_EVAL    =        0x8, /* == StackFrame::EVAL */
     EXECUTE_INDIRECT_EVAL  =        0x9, /* == StackFrame::GLOBAL | EVAL */
     EXECUTE_DEBUG          =       0x18  /* == StackFrame::EVAL | DEBUGGER */
 };
@@ -323,154 +317,108 @@ class StackFrame
         GLOBAL             =        0x1,  /* frame pushed for a global script */
         FUNCTION           =        0x2,  /* frame pushed for a scripted call */
         DUMMY              =        0x4,  /* frame pushed for bookkeeping */
 
         /* Frame subtypes */
         EVAL               =        0x8,  /* frame pushed for eval() or debugger eval */
         DEBUGGER           =       0x10,  /* frame pushed for debugger eval */
         GENERATOR          =       0x20,  /* frame is associated with a generator */
-        CONSTRUCTING       =       0x40,  /* frame is for a constructor invocation */
+        FLOATING_GENERATOR =       0x40,  /* frame is is in generator obj, not on stack */
+        CONSTRUCTING       =       0x80,  /* frame is for a constructor invocation */
 
         /* Temporary frame states */
-        YIELDING           =       0x80,  /* Interpret dispatched JSOP_YIELD */
-        FINISHED_IN_INTERP =      0x100,  /* set if frame finished in Interpret() */
+        YIELDING           =      0x100,  /* js::Interpret dispatched JSOP_YIELD */
+        FINISHED_IN_INTERP =      0x200,  /* set if frame finished in Interpret() */
 
         /* Function arguments */
-        OVERFLOW_ARGS      =      0x200,  /* numActualArgs > numFormalArgs */
-        UNDERFLOW_ARGS     =      0x400,  /* numActualArgs < numFormalArgs */
-
-        /* Function prologue state */
-        HAS_CALL_OBJ       =      0x800,  /* CallObject created for heavyweight fun */
-        HAS_ARGS_OBJ       =     0x1000,  /* ArgumentsObject created for needsArgsObj script */
+        OVERFLOW_ARGS      =      0x400,  /* numActualArgs > numFormalArgs */
+        UNDERFLOW_ARGS     =      0x800,  /* numActualArgs < numFormalArgs */
 
         /* Lazy frame initialization */
-        HAS_HOOK_DATA      =     0x2000,  /* frame has hookData_ set */
-        HAS_ANNOTATION     =     0x4000,  /* frame has annotation_ set */
-        HAS_RVAL           =     0x8000,  /* frame has rval_ set */
-        HAS_SCOPECHAIN     =    0x10000,  /* frame has scopeChain_ set */
-        HAS_PREVPC         =    0x20000,  /* frame has prevpc_ and prevInline_ set */
-        HAS_BLOCKCHAIN     =    0x40000,  /* frame has blockChain_ set */
+        HAS_CALL_OBJ       =     0x1000,  /* frame has a callobj reachable from scopeChain_ */
+        HAS_ARGS_OBJ       =     0x2000,  /* frame has an argsobj in StackFrame::args */
+        HAS_HOOK_DATA      =     0x4000,  /* frame has hookData_ set */
+        HAS_ANNOTATION     =     0x8000,  /* frame has annotation_ set */
+        HAS_RVAL           =    0x10000,  /* frame has rval_ set */
+        HAS_SCOPECHAIN     =    0x20000,  /* frame has scopeChain_ set */
+        HAS_PREVPC         =    0x40000,  /* frame has prevpc_ and prevInline_ set */
+        HAS_BLOCKCHAIN     =    0x80000,  /* frame has blockChain_ set */
 
         /* Method JIT state */
-        DOWN_FRAMES_EXPANDED =  0x80000,  /* inlining in down frames has been expanded */
-        LOWERED_CALL_APPLY   = 0x100000,  /* Pushed by a lowered call/apply */
-
-        /* Debugger state */
-        PREV_UP_TO_DATE    =   0x200000   /* see DebugScopes::updateLiveScopes */
+        DOWN_FRAMES_EXPANDED = 0x100000,  /* inlining in down frames has been expanded */
+        LOWERED_CALL_APPLY   = 0x200000   /* Pushed by a lowered call/apply */
     };
 
   private:
     mutable uint32_t    flags_;         /* bits described by Flags */
     union {                             /* describes what code is executing in a */
         JSScript        *script;        /*   global frame */
         JSFunction      *fun;           /*   function frame, pre GetScopeChain */
     } exec;
     union {                             /* describes the arguments of a function */
-        unsigned        nactual;        /*   for non-eval frames */
+        unsigned           nactual;        /*   for non-eval frames */
         JSScript        *evalScript;    /*   the script of an eval-in-function */
     } u;
-    mutable JSObject    *scopeChain_;   /* if HAS_SCOPECHAIN, current scope chain */
-    StackFrame          *prev_;         /* if HAS_PREVPC, previous cx->regs->fp */
-    void                *ncode_;        /* for a jit frame, return address for method JIT */
-    Value               rval_;          /* if HAS_RVAL, return value of the frame */
-    StaticBlockObject   *blockChain_;   /* if HAS_BLOCKCHAIN, innermost let block */
-    ArgumentsObject     *argsObj_;      /* if HAS_ARGS_OBJ, the call's arguments object */
-    jsbytecode          *prevpc_;       /* if HAS_PREVPC, pc of previous frame*/
-    InlinedSite         *prevInline_;   /* for a jit frame, inlined site in previous frame */
-    void                *hookData_;     /* if HAS_HOOK_DATA, closure returned by call hook */
-    void                *annotation_;   /* if HAS_ANNOTATION, perhaps remove with bug 546848 */
-    FrameRejoinState    rejoin_;        /* for a jit frame rejoining the interpreter
+    mutable JSObject    *scopeChain_;   /* current scope chain */
+    StackFrame          *prev_;         /* previous cx->regs->fp */
+    void                *ncode_;        /* return address for method JIT */
+
+    /* Lazily initialized */
+    Value               rval_;          /* return value of the frame */
+    StaticBlockObject   *blockChain_;   /* innermost let block */
+    ArgumentsObject     *argsObj_;      /* if has HAS_ARGS_OBJ */
+    jsbytecode          *prevpc_;       /* pc of previous frame*/
+    JSInlinedSite       *prevInline_;   /* inlined site in previous frame */
+    void                *hookData_;     /* closure returned by call hook */
+    void                *annotation_;   /* perhaps remove with bug 546848 */
+    JSRejoinState       rejoin_;        /* If rejoining into the interpreter
                                          * from JIT code, state at rejoin. */
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(StackFrame, rval_) % sizeof(Value) == 0);
         JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(Value) == 0);
     }
 
     inline void initPrev(JSContext *cx);
-    jsbytecode *prevpcSlow(InlinedSite **pinlined);
-    void writeBarrierPost();
-
-    /*
-     * These utilities provide raw access to the values associated with a
-     * StackFrame (see "VM stack layout" comment). The utilities are private
-     * since they are not able to assert that only unaliased vars/formals are
-     * accessed. Normal code should prefer the StackFrame::unaliased* members
-     * (or FrameRegs::stackDepth for the usual "depth is at least" assertions).
-     */
-    Value *slots() const { return (Value *)(this + 1); }
-    Value *base() const { return slots() + script()->nfixed; }
-    Value *formals() const { return (Value *)this - fun()->nargs; }
-    Value *actuals() const { return formals() - (flags_ & OVERFLOW_ARGS ? 2 + u.nactual : 0); }
+    jsbytecode *prevpcSlow(JSInlinedSite **pinlined);
 
-    friend class FrameRegs;
-    friend class ContextStack;
-    friend class StackSpace;
-    friend class StackIter;
-    friend class CallObject;
-    friend class ClonedBlockObject;
-    friend class ArgumentsObject;
-    friend void ::js_DumpStackFrame(JSContext *, StackFrame *);
-    friend void ::js_ReportIsNotFunction(JSContext *, const js::Value *, unsigned);
-#ifdef JS_METHODJIT
-    friend class mjit::CallCompiler;
-    friend class mjit::GetPropCompiler;
-    friend class mjit::ic::GetElementIC;
-#endif
-
+  public:
     /*
-     * Frame initialization, called by ContextStack operations after acquiring
-     * the raw memory for the frame:
+     * Frame initialization
+     *
+     * After acquiring a pointer to an uninitialized stack frame on the VM
+     * stack from StackSpace, these members are used to initialize the stack
+     * frame before officially pushing the frame into the context.
      */
 
     /* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
     void initCallFrame(JSContext *cx, JSFunction &callee,
                        JSScript *script, uint32_t nactual, StackFrame::Flags flags);
 
     /* Used for getFixupFrame (for FixupArity). */
     void initFixupFrame(StackFrame *prev, StackFrame::Flags flags, void *ncode, unsigned nactual);
 
     /* Used for eval. */
     void initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs,
                           const Value &thisv, JSObject &scopeChain, ExecuteType type);
 
+    /* Used when activating generators. */
+    enum TriggerPostBarriers {
+        DoPostBarrier = true,
+        NoPostBarrier = false
+    };
+    template <class T, class U, TriggerPostBarriers doPostBarrier>
+    void stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
+                            StackFrame *otherfp, U *othervp, Value *othersp);
+    void writeBarrierPost();
+
     /* Perhaps one fine day we will remove dummy frames. */
     void initDummyFrame(JSContext *cx, JSObject &chain);
 
-  public:
-    /*
-     * Frame prologue/epilogue
-     *
-     * Every stack frame must have 'prologue' called before executing the
-     * first op and 'epilogue' called after executing the last op and before
-     * popping the frame (whether the exit is exceptional or not).
-     *
-     * For inline JS calls/returns, it is easy to call the prologue/epilogue
-     * exactly once. When calling JS from C++, Invoke/Execute push the stack
-     * frame but do *not* call the prologue/epilogue. That means Interpret
-     * must call the prologue/epilogue for the entry frame. This scheme
-     * simplifies jit compilation.
-     *
-     * The 'newType' option indicates whether the constructed 'this' value (if
-     * there is one) should be given a new singleton type.
-     */
-
-    bool prologue(JSContext *cx, bool newType);
-    void epilogue(JSContext *cx);
-
-    /*
-     * Optimized path for the jit heavyweight function frame prologue. This
-     * does not include constructing 'this'.
-     */
-    inline bool heavyweightFunctionPrologue(JSContext *cx);
-
-    /* Initialize local variables of newly-pushed frame. */
-    void initVarsToUndefined();
-
     /*
      * Stack frame type
      *
      * A stack frame may have one of three types, which determines which
      * members of the frame may be accessed and other invariants:
      *
      *  global frame:   execution of global code or an eval in global code
      *  function frame: execution of function code or an eval in a function
@@ -549,195 +497,136 @@ class StackFrame
      * to set cx->regs->fp to when this frame is popped.
      */
 
     StackFrame *prev() const {
         return prev_;
     }
 
     inline void resetGeneratorPrev(JSContext *cx);
+    inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
 
-    /*
-     * (Unaliased) locals and arguments
-     *
-     * Only non-eval function frames have arguments. The arguments pushed by
-     * the caller are the 'actual' arguments. The declared arguments of the
-     * callee are the 'formal' arguments. When the caller passes less or equal
-     * actual arguments, the actual and formal arguments are the same array
-     * (but with different extents). When the caller passes too many arguments,
-     * the formal subset of the actual arguments is copied onto the top of the
-     * stack. This allows the engine to maintain a jit-time constant offset of
-     * arguments from the frame pointer. Since the formal subset of the actual
-     * arguments is potentially on the stack twice, it is important for all
-     * reads/writes to refer to the same canonical memory location. This is
-     * abstracted by the unaliased{Formal,Actual} methods.
-     *
-     * When a local/formal variable is "aliased" (accessed by nested closures,
-     * dynamic scope operations, or 'arguments), the canonical location for
-     * that value is the slot of an activation object (scope or arguments).
-     * Currently, all variables are given slots in *both* the stack frame and
-     * heap objects, even though, as just described, only one should ever be
-     * accessed. Thus, it is up to the code performing an access to access the
-     * correct value. These functions assert that accesses to stack values are
-     * unaliased. For more about canonical values locations.
-     */
-
-    inline Value &unaliasedVar(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
-    inline Value &unaliasedLocal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
-
-    bool hasArgs() const { return isNonEvalFunctionFrame(); }
-    inline Value &unaliasedFormal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
-    inline Value &unaliasedActual(unsigned i);
-    template <class Op> inline void forEachUnaliasedActual(Op op);
-
-    inline unsigned numFormalArgs() const;
-    inline unsigned numActualArgs() const;
-
-    /*
-     * Arguments object
-     *
-     * If a non-eval function has script->needsArgsObj, an arguments object is
-     * created in the prologue and stored in the local variable for the
-     * 'arguments' binding (script->argumentsLocal). Since this local is
-     * mutable, the arguments object can be overwritten and we can "lose" the
-     * arguments object. Thus, StackFrame keeps an explicit argsObj_ field so
-     * that the original arguments object is always available.
-     */
-
-    ArgumentsObject &argsObj() const;
-    void initArgsObj(ArgumentsObject &argsobj);
+    inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
 
     inline JSObject *createRestParameter(JSContext *cx);
 
     /*
-     * Scope chain
-     *
-     * In theory, the scope chain would contain an object for every lexical
-     * scope. However, only objects that are required for dynamic lookup are
-     * actually created.
+     * Frame slots
      *
-     * Given that a (non-dummy) StackFrame corresponds roughly to a ES5
-     * Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
-     * VariableEnvironment component of a Exection Context. Intuitively, the
-     * variables object is where new bindings (variables and functions) are
-     * stored. One might expect that this is either the Call object or
-     * scopeChain.globalObj for function or global code, respectively, however
-     * the JSAPI allows calls of Execute to specify a variables object on the
-     * scope chain other than the call/global object. This allows embeddings to
-     * run multiple scripts under the same global, each time using a new
-     * variables object to collect and discard the script's global variables.
+     * A frame's 'slots' are the fixed slots associated with the frame (like
+     * local variables) followed by an expression stack holding temporary
+     * values. A frame's 'base' is the base of the expression stack.
      */
 
-    inline HandleObject scopeChain() const;
-
-    inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
-    inline GlobalObject &global() const;
-    inline CallObject &callObj() const;
-    inline JSObject &varObj();
-
-    inline void pushOnScopeChain(ScopeObject &scope);
-    inline void popOffScopeChain();
+    Value *slots() const {
+        return (Value *)(this + 1);
+    }
 
-    /*
-     * Block chain
-     *
-     * Entering/leaving a let (or exception) block may do 1 or 2 things: First,
-     * a static block object (created at compiled time and stored in the
-     * script) is pushed on StackFrame::blockChain. Second, if the static block
-     * may be cloned to hold the dynamic values if this is needed for dynamic
-     * scope access. A clone is created for a static block iff
-     * StaticBlockObject::needsClone.
-     */
-
-    bool hasBlockChain() const {
-        return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
+    Value *base() const {
+        return slots() + script()->nfixed;
     }
 
-    StaticBlockObject *maybeBlockChain() {
-        return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
-    }
-
-    StaticBlockObject &blockChain() const {
-        JS_ASSERT(hasBlockChain());
-        return *blockChain_;
+    Value &varSlot(unsigned i) {
+        JS_ASSERT(i < script()->nfixed);
+        JS_ASSERT_IF(maybeFun(), i < script()->bindings.numVars());
+        return slots()[i];
     }
 
-    bool pushBlock(JSContext *cx, StaticBlockObject &block);
-    void popBlock(JSContext *cx);
-
-    /*
-     * With 
-     *
-     * Entering/leaving a with (or E4X filter) block pushes/pops an object 
-     * on the scope chain. Pushing uses pushOnScopeChain, popping should use
-     * popWith.
-     */
-
-    void popWith(JSContext *cx);
+    Value &localSlot(unsigned i) {
+        /* Let variables can be above script->nfixed. */
+        JS_ASSERT(i < script()->nslots);
+        return slots()[i];
+    }
 
     /*
      * Script
      *
      * All function and global frames have an associated JSScript which holds
      * the bytecode being executed for the frame. This script/bytecode does
      * not reflect any inlining that has been performed by the method JIT.
      * If other frames were inlined into this one, the script/pc reflect the
      * point of the outermost call. Inlined frame invariants:
      *
      * - Inlined frames have the same scope chain as the outer frame.
      * - Inlined frames have the same strictness as the outer frame.
      * - Inlined frames can only make calls to other JIT frames associated with
      *   the same VMFrame. Other calls force expansion of the inlined frames.
      */
 
-    JSScript *script() const {
-        JS_ASSERT(isScriptFrame());
-        return isFunctionFrame()
-               ? isEvalFrame() ? u.evalScript : fun()->script()
-               : exec.script;
-    }
-
-    JSScript *maybeScript() const {
-        return isScriptFrame() ? script() : NULL;
-    }
-
     /*
      * Get the frame's current bytecode, assuming |this| is in |cx|. next is
      * frame whose prev == this, NULL if not known or if this == cx->fp().
      * If the frame is inside an inline call made within the pc, the pc will
      * be that of the outermost call and the state of any inlined frame(s) is
      * returned through pinlined.
      *
      * Beware, as the name implies, pcQuadratic can lead to quadratic behavior
      * in loops such as:
      *
      *   for ( ...; fp; fp = fp->prev())
      *     ... fp->pcQuadratic(cx->stack);
      *
      * Using next can avoid this, but in most cases prefer ScriptFrameIter;
      * it is amortized O(1).
+     *
+     *   When I get to the bottom I go back to the top of the stack
+     *   Where I stop and I turn and I go right back
+     *   Till I get to the bottom and I see you again...
      */
-
     jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
-                            InlinedSite **pinlined = NULL);
+                            JSInlinedSite **pinlined = NULL);
 
-    jsbytecode *prevpc(InlinedSite **pinlined) {
+    jsbytecode *prevpc(JSInlinedSite **pinlined) {
         if (flags_ & HAS_PREVPC) {
             if (pinlined)
                 *pinlined = prevInline_;
             return prevpc_;
         }
         return prevpcSlow(pinlined);
     }
 
-    InlinedSite *prevInline() {
+    JSInlinedSite *prevInline() {
         JS_ASSERT(flags_ & HAS_PREVPC);
         return prevInline_;
     }
 
+    JSScript *script() const {
+        JS_ASSERT(isScriptFrame());
+        return isFunctionFrame()
+               ? isEvalFrame() ? u.evalScript : fun()->script()
+               : exec.script;
+    }
+
+    JSScript *functionScript() const {
+        JS_ASSERT(isFunctionFrame());
+        return isEvalFrame() ? u.evalScript : fun()->script();
+    }
+
+    JSScript *globalScript() const {
+        JS_ASSERT(isGlobalFrame());
+        return exec.script;
+    }
+
+    JSScript *maybeScript() const {
+        return isScriptFrame() ? script() : NULL;
+    }
+
+    size_t numFixed() const {
+        return script()->nfixed;
+    }
+
+    size_t numSlots() const {
+        return script()->nslots;
+    }
+
+    size_t numGlobalVars() const {
+        JS_ASSERT(isGlobalFrame());
+        return exec.script->nfixed;
+    }
+
     /*
      * Function
      *
      * All function frames have an associated interpreted JSFunction. The
      * function returned by fun() and maybeFun() is not necessarily the
      * original canonical function which the frame's script was compiled
      * against. To get this function, use maybeScriptFunction().
      */
@@ -756,44 +645,138 @@ class StackFrame
             return NULL;
         const StackFrame *fp = this;
         while (fp->isEvalFrame())
             fp = fp->prev();
         return fp->script()->function();
     }
 
     /*
+     * Arguments
+     *
+     * Only non-eval function frames have arguments. A frame follows its
+     * arguments contiguously in memory. The arguments pushed by the caller are
+     * the 'actual' arguments. The declared arguments of the callee are the
+     * 'formal' arguments. When the caller passes less or equal actual
+     * arguments, the actual and formal arguments are the same array (but with
+     * different extents). When the caller passes too many arguments, the
+     * formal subset of the actual arguments is copied onto the top of the
+     * stack. This allows the engine to maintain a jit-time constant offset of
+     * arguments from the frame pointer. Since the formal subset of the actual
+     * arguments is potentially on the stack twice, it is important for all
+     * reads/writes to refer to the same canonical memory location.
+     *
+     * An arguments object (the object returned by the 'arguments' keyword) is
+     * lazily created, so a given function frame may or may not have one.
+     */
+
+    /* True if this frame has arguments. Contrast with hasArgsObj. */
+    bool hasArgs() const {
+        return isNonEvalFunctionFrame();
+    }
+
+    unsigned numFormalArgs() const {
+        JS_ASSERT(hasArgs());
+        return fun()->nargs;
+    }
+
+    Value &formalArg(unsigned i) const {
+        JS_ASSERT(i < numFormalArgs());
+        return formalArgs()[i];
+    }
+
+    Value *formalArgs() const {
+        JS_ASSERT(hasArgs());
+        return (Value *)this - numFormalArgs();
+    }
+
+    Value *formalArgsEnd() const {
+        JS_ASSERT(hasArgs());
+        return (Value *)this;
+    }
+
+    Value *maybeFormalArgs() const {
+        return (flags_ & (FUNCTION | EVAL)) == FUNCTION
+               ? formalArgs()
+               : NULL;
+    }
+
+    inline unsigned numActualArgs() const;
+    inline Value *actualArgs() const;
+    inline Value *actualArgsEnd() const;
+
+    inline Value &canonicalActualArg(unsigned i) const;
+    template <class Op>
+    inline bool forEachCanonicalActualArg(Op op, unsigned start = 0, unsigned count = unsigned(-1));
+    template <class Op> inline bool forEachFormalArg(Op op);
+
+    /* XXX: all these argsObj functions will be removed with bug 659577. */
+
+    bool hasArgsObj() const {
+        /*
+         * HAS_ARGS_OBJ is still technically not equivalent to
+         * script()->needsArgsObj() during functionPrologue (where GC can
+         * observe a frame that needsArgsObj but has not yet been given the
+         * args). This can be fixed by creating and rooting the args/call
+         * object before pushing the frame, which should be done eventually.
+         */
+        return !!(flags_ & HAS_ARGS_OBJ);
+    }
+
+    ArgumentsObject &argsObj() const {
+        JS_ASSERT(hasArgsObj());
+        return *argsObj_;
+    }
+
+    ArgumentsObject *maybeArgsObj() const {
+        return hasArgsObj() ? &argsObj() : NULL;
+    }
+
+    void initArgsObj(ArgumentsObject &argsObj) {
+        JS_ASSERT(script()->needsArgsObj());
+        JS_ASSERT(!hasArgsObj());
+        argsObj_ = &argsObj;
+        flags_ |= HAS_ARGS_OBJ;
+    }
+
+    /*
      * This value
      *
      * Every frame has a this value although, until 'this' is computed, the
      * value may not be the semantically-correct 'this' value.
      *
      * The 'this' value is stored before the formal arguments for function
      * frames and directly before the frame for global frames. The *Args
      * members assert !isEvalFrame(), so we implement specialized inline
      * methods for accessing 'this'. When the caller has static knowledge that
-     * a frame is a function, 'functionThis' allows more efficient access.
+     * a frame is a function or global frame, 'functionThis' and 'globalThis',
+     * respectively, allow more efficient access.
      */
 
     Value &functionThis() const {
         JS_ASSERT(isFunctionFrame());
         if (isEvalFrame())
             return ((Value *)this)[-1];
-        return formals()[-1];
+        return formalArgs()[-1];
     }
 
     JSObject &constructorThis() const {
         JS_ASSERT(hasArgs());
-        return formals()[-1].toObject();
+        return formalArgs()[-1].toObject();
+    }
+
+    Value &globalThis() const {
+        JS_ASSERT(isGlobalFrame());
+        return ((Value *)this)[-1];
     }
 
     Value &thisValue() const {
         if (flags_ & (EVAL | GLOBAL))
             return ((Value *)this)[-1];
-        return formals()[-1];
+        return formalArgs()[-1];
     }
 
     /*
      * Callee
      *
      * Only function frames have a callee. An eval frame in a function has the
      * same callee as its containing function frame. maybeCalleev can be used
      * to return a value that is either the callee object (for function frames) or
@@ -809,32 +792,136 @@ class StackFrame
         JS_ASSERT(isFunctionFrame());
         return mutableCalleev();
     }
 
     const Value &maybeCalleev() const {
         JS_ASSERT(isScriptFrame());
         Value &calleev = flags_ & (EVAL | GLOBAL)
                          ? ((Value *)this)[-2]
-                         : formals()[-2];
+                         : formalArgs()[-2];
         JS_ASSERT(calleev.isObjectOrNull());
         return calleev;
     }
 
     Value &mutableCalleev() const {
         JS_ASSERT(isFunctionFrame());
         if (isEvalFrame())
             return ((Value *)this)[-2];
-        return formals()[-2];
+        return formalArgs()[-2];
     }
 
     CallReceiver callReceiver() const {
-        return CallReceiverFromArgv(formals());
+        return CallReceiverFromArgv(formalArgs());
+    }
+
+    /*
+     * Scope chain
+     *
+     * Every frame has a scopeChain which, when traversed via the 'parent' link
+     * to the root, indicates the current global object. A 'call object' is a
+     * node on a scope chain representing a function's activation record. A
+     * call object is used for dynamically-scoped name lookup and lexically-
+     * scoped upvar access. The call object holds the values of locals and
+     * arguments when a function returns (and its stack frame is popped). For
+     * performance reasons, call objects are created lazily for 'lightweight'
+     * functions, i.e., functions which are not statically known to require a
+     * call object. Thus, a given function frame may or may not have a call
+     * object. When a function does have a call object, it is found by walking
+     * up the scope chain until the first call object. Thus, it is important,
+     * when setting the scope chain, to indicate whether the new scope chain
+     * contains a new call object and thus changes the 'hasCallObj' state.
+     *
+     * The method JIT requires that HAS_SCOPECHAIN be set for all frames which
+     * use NAME or related opcodes that can access the scope chain (so it does
+     * not have to test the bit). To ensure this, we always initialize the
+     * scope chain when pushing frames in the VM, and only initialize it when
+     * pushing frames in JIT code when the above situation applies.
+     *
+     * NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
+     * the frame is popped. Since the scope chain of a non-strict eval frame
+     * contains the call object of the parent (function) frame, it is possible
+     * to have:
+     *   !fp->hasCall() && fp->scopeChain().isCall()
+     */
+
+    inline HandleObject scopeChain() const;
+    inline GlobalObject &global() const;
+
+    bool hasCallObj() const {
+        bool ret = !!(flags_ & HAS_CALL_OBJ);
+        JS_ASSERT_IF(ret, !isNonStrictEvalFrame());
+        return ret;
     }
 
+    inline CallObject &callObj() const;
+    inline void initScopeChain(CallObject &callobj);
+    inline void setScopeChain(JSObject &obj);
+
+    /*
+     * Variables object
+     *
+     * Given that a (non-dummy) StackFrame corresponds roughly to a ES5
+     * Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
+     * VariableEnvironment component of a Exection Context. Intuitively, the
+     * variables object is where new bindings (variables and functions) are
+     * stored. One might expect that this is either the callObj or
+     * scopeChain.globalObj for function or global code, respectively, however
+     * the JSAPI allows calls of Execute to specify a variables object on the
+     * scope chain other than the call/global object. This allows embeddings to
+     * run multiple scripts under the same global, each time using a new
+     * variables object to collect and discard the script's global variables.
+     */
+
+    inline JSObject &varObj();
+
+    /* Block chain */
+
+    bool hasBlockChain() const {
+        return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
+    }
+
+    StaticBlockObject *maybeBlockChain() {
+        return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
+    }
+
+    StaticBlockObject &blockChain() const {
+        JS_ASSERT(hasBlockChain());
+        return *blockChain_;
+    }
+
+    /* Enter/exit execution of a lexical block. */
+    bool pushBlock(JSContext *cx, StaticBlockObject &block);
+    void popBlock(JSContext *cx);
+
+    /* Exits (via execution or exception) a with block. */
+    void popWith(JSContext *cx);
+
+    /*
+     * Prologue for function frames: make a call object for heavyweight
+     * functions, and maintain type nesting invariants.
+     */
+    inline bool functionPrologue(JSContext *cx);
+
+    /*
+     * Epilogue for function frames: put any args or call object for the frame
+     * which may still be live, and maintain type nesting invariants. Note:
+     * this does mark the epilogue as having been completed, since the frame is
+     * about to be popped. Use updateEpilogueFlags for this.
+     */
+    inline void functionEpilogue(JSContext *cx);
+
+    /*
+     * If callObj() or argsObj() have already been put, update our flags
+     * accordingly. This call must be followed by a later functionEpilogue.
+     */
+    inline void updateEpilogueFlags();
+
+    inline bool maintainNestingState() const;
+
     /*
      * Frame compartment
      *
      * A stack frame's compartment is the frame's containing context's
      * compartment when the frame was pushed.
      */
 
     inline JSCompartment *compartment() const;
@@ -847,21 +934,21 @@ class StackFrame
 
     void setAnnotation(void *annot) {
         flags_ |= HAS_ANNOTATION;
         annotation_ = annot;
     }
 
     /* JIT rejoin state */
 
-    FrameRejoinState rejoin() const {
+    JSRejoinState rejoin() const {
         return rejoin_;
     }
 
-    void setRejoin(FrameRejoinState state) {
+    void setRejoin(JSRejoinState state) {
         rejoin_ = state;
     }
 
     /* Down frame expansion state */
 
     void setDownFramesExpanded() {
         flags_ |= DOWN_FRAMES_EXPANDED;
     }
@@ -926,62 +1013,44 @@ class StackFrame
         ncode_ = addr;
     }
 
     void **addressOfNativeReturnAddress() {
         return &ncode_;
     }
 
     /*
-     * A "generator" frame is a function frame associated with a generator.
-     * Since generators are not executed LIFO, the VM copies a single abstract
-     * generator frame back and forth between the LIFO VM stack (when the
-     * generator is active) and a snapshot stored in JSGenerator (when the
-     * generator is inactive). A generator frame is comprised of a StackFrame
-     * structure and the values that make up the arguments, locals, and
-     * expression stack. The layout in the JSGenerator snapshot matches the
-     * layout on the stack (see the "VM stack layout" comment above).
+     * Generator-specific members
+     *
+     * A non-eval function frame may optionally be the activation of a
+     * generator. For the most part, generator frames act like ordinary frames.
+     * For exceptions, see js_FloatingFrameIfGenerator.
      */
 
     bool isGeneratorFrame() const {
-        bool ret = flags_ & GENERATOR;
-        JS_ASSERT_IF(ret, isNonEvalFunctionFrame());
-        return ret;
+        return !!(flags_ & GENERATOR);
     }
 
-    void initGeneratorFrame() const {
-        JS_ASSERT(!isGeneratorFrame());
-        JS_ASSERT(isNonEvalFunctionFrame());
-        flags_ |= GENERATOR;
-    }
-
-    Value *generatorArgsSnapshotBegin() const {
-        JS_ASSERT(isGeneratorFrame());
-        return actuals() - 2;
+    bool isFloatingGenerator() const {
+        JS_ASSERT_IF(flags_ & FLOATING_GENERATOR, isGeneratorFrame());
+        return !!(flags_ & FLOATING_GENERATOR);
     }
 
-    Value *generatorArgsSnapshotEnd() const {
-        JS_ASSERT(isGeneratorFrame());
-        return (Value *)this;
-    }
-
-    Value *generatorSlotsSnapshotBegin() const {
-        JS_ASSERT(isGeneratorFrame());
-        return (Value *)(this + 1);
+    void initFloatingGenerator() {
+        JS_ASSERT(!(flags_ & GENERATOR));
+        flags_ |= (GENERATOR | FLOATING_GENERATOR);
     }
 
-    enum TriggerPostBarriers {
-        DoPostBarrier = true,
-        NoPostBarrier = false
-    };
-    template <class T, class U, TriggerPostBarriers doPostBarrier>
-    void copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
-                            StackFrame *otherfp, U *othervp, Value *othersp);
+    void unsetFloatingGenerator() {
+        flags_ &= ~FLOATING_GENERATOR;
+    }
 
-    JSGenerator *maybeSuspendedGenerator(JSRuntime *rt);
+    void setFloatingGenerator() {
+        flags_ |= FLOATING_GENERATOR;
+    }
 
     /*
      * js::Execute pushes both global and function frames (since eval() in a
      * function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
      * code should not care where a frame was pushed, but if it is necessary to
      * pick out frames pushed by js::Execute, this is the right query:
      */
 
@@ -1001,42 +1070,32 @@ class StackFrame
         JS_ASSERT((flags_ & mask) != mask);
         return InitialFrameFlags(flags_ & mask);
     }
 
     bool isConstructing() const {
         return !!(flags_ & CONSTRUCTING);
     }
 
-    bool beforeHeavyweightPrologue() const {
-        JS_ASSERT(isNonEvalFunctionFrame());
-        JS_ASSERT(fun()->isHeavyweight());
-        return !(flags_ & HAS_CALL_OBJ);
-    }
-
     /*
      * The method JIT call/apply optimization can erase Function.{call,apply}
      * invocations from the stack and push the callee frame directly. The base
      * of these frames will be offset by one value, however, which the
      * interpreter needs to account for if it ends up popping the frame.
      */
     bool loweredCallOrApply() const {
         return !!(flags_ & LOWERED_CALL_APPLY);
     }
 
     bool isDebuggerFrame() const {
         return !!(flags_ & DEBUGGER);
     }
 
-    bool prevUpToDate() const {
-        return !!(flags_ & PREV_UP_TO_DATE);
-    }
-
-    void setPrevUpToDate() {
-        flags_ |= PREV_UP_TO_DATE;
+    bool hasOverflowArgs() const {
+        return !!(flags_ & OVERFLOW_ARGS);
     }
 
     bool isYielding() {
         return !!(flags_ & YIELDING);
     }
 
     void setYielding() {
         flags_ |= YIELDING;
@@ -1052,19 +1111,16 @@ class StackFrame
 
     bool finishedInInterpreter() const {
         return !!(flags_ & FINISHED_IN_INTERP);
     }
 
   public:
     /* Public, but only for JIT use: */
 
-    inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
-    inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
-
     static size_t offsetOfFlags() {
         return offsetof(StackFrame, flags_);
     }
 
     static size_t offsetOfExec() {
         return offsetof(StackFrame, exec);
     }
 
@@ -1079,24 +1135,24 @@ class StackFrame
     static size_t offsetOfPrev() {
         return offsetof(StackFrame, prev_);
     }
 
     static size_t offsetOfReturnValue() {
         return offsetof(StackFrame, rval_);
     }
 
+    static size_t offsetOfArgsObj() {
+        return offsetof(StackFrame, argsObj_);
+    }
+
     static ptrdiff_t offsetOfNcode() {
         return offsetof(StackFrame, ncode_);
     }
 
-    static ptrdiff_t offsetOfArgsObj() {
-        return offsetof(StackFrame, argsObj_);
-    }
-
     static ptrdiff_t offsetOfCallee(JSFunction *fun) {
         JS_ASSERT(fun != NULL);
         return -(fun->nargs + 2) * sizeof(Value);
     }
 
     static ptrdiff_t offsetOfThis(JSFunction *fun) {
         return fun == NULL
                ? -1 * ptrdiff_t(sizeof(Value))
@@ -1160,41 +1216,31 @@ static inline JSStackFrame * Jsvalify(St
 /*****************************************************************************/
 
 class FrameRegs
 {
   public:
     Value *sp;
     jsbytecode *pc;
   private:
-    InlinedSite *inlined_;
+    JSInlinedSite *inlined_;
     StackFrame *fp_;
   public:
     StackFrame *fp() const { return fp_; }
-    InlinedSite *inlined() const { return inlined_; }
+    JSInlinedSite *inlined() const { return inlined_; }
 
     /* For jit use (need constant): */
     static const size_t offsetOfFp = 3 * sizeof(void *);
     static const size_t offsetOfInlined = 2 * sizeof(void *);
     static void staticAssert() {
         JS_STATIC_ASSERT(offsetOfFp == offsetof(FrameRegs, fp_));
         JS_STATIC_ASSERT(offsetOfInlined == offsetof(FrameRegs, inlined_));
     }
     void clearInlined() { inlined_ = NULL; }
 
-    unsigned stackDepth() const {
-        JS_ASSERT(sp >= fp_->base());
-        return sp - fp_->base();
-    }
-
-    Value *spForStackDepth(unsigned depth) const {
-        JS_ASSERT(fp_->script()->nfixed + depth <= fp_->script()->nslots);
-        return fp_->base() + depth;
-    }
-
     /* For generator: */
     void rebaseFromTo(const FrameRegs &from, StackFrame &to) {
         fp_ = &to;
         sp = to.slots() + (from.sp - from.fp_->slots());
         pc = from.pc;
         inlined_ = from.inlined_;
         JS_ASSERT(fp_);
     }
@@ -1431,20 +1477,16 @@ class StackSpace
                             Value *from, ptrdiff_t nvals,
                             JSCompartment *dest = (JSCompartment *)CX_COMPARTMENT) const;
     JS_FRIEND_API(bool) ensureSpaceSlow(JSContext *cx, MaybeReportError report,
                                         Value *from, ptrdiff_t nvals,
                                         JSCompartment *dest) const;
 
     StackSegment &findContainingSegment(const StackFrame *target) const;
 
-    bool containsFast(StackFrame *fp) {
-        return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
-    }
-
   public:
     StackSpace();
     bool init();
     ~StackSpace();
 
     /*
      * Maximum supported value of arguments.length. This bounds the maximum
      * number of arguments that can be supplied to Function.prototype.apply.
@@ -1487,17 +1529,17 @@ class StackSpace
      * does indeed have this required space and reports an error and returns
      * NULL if this reserve space cannot be allocated.
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, unsigned nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
     void mark(JSTracer *trc);
-    void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+    void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
 
     /* Called during GC: sets active flag on compartments with active frames. */
     void markActiveCompartments();
 
     /* We only report the committed size;  uncommitted size is uninteresting. */
     JS_FRIEND_API(size_t) sizeOfCommitted();
 
 #ifdef DEBUG