Rename js::CodeGenerator to js::BytecodeEmitter. Bug 696876, part 1 of 3. r=dvander.
authorJason Orendorff <jorendorff@mozilla.com>
Thu, 27 Oct 2011 09:15:40 -0500
changeset 80665 c479d9c16a807481e61afae05427fe3832541ae6
parent 80664 f7a30d961569b5b6d3132f2783b963d6e084471c
child 80666 2518a604fb45a480dec031e4d725637be0416233
push idunknown
push userunknown
push dateunknown
reviewersdvander
bugs696876
milestone10.0a1
Rename js::CodeGenerator to js::BytecodeEmitter. Bug 696876, part 1 of 3. r=dvander.
js/src/Makefile.in
js/src/frontend/BytecodeCompiler.cpp
js/src/frontend/BytecodeEmitter.cpp
js/src/frontend/BytecodeEmitter.h
js/src/frontend/BytecodeGenerator.cpp
js/src/frontend/BytecodeGenerator.h
js/src/frontend/FoldConstants.cpp
js/src/frontend/ParseNode-inl.h
js/src/frontend/ParseNode.h
js/src/frontend/Parser.cpp
js/src/frontend/Parser.h
js/src/frontend/TokenStream.cpp
js/src/jsapi.cpp
js/src/jsdbgapi.cpp
js/src/jsfun.cpp
js/src/jsinterp.cpp
js/src/jsobj.cpp
js/src/jsopcode.cpp
js/src/jsopcode.h
js/src/jsprvtd.h
js/src/jsreflect.cpp
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jstracer.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/FastOps.cpp
js/src/shell/js.cpp
js/src/vm/Debugger.cpp
js/src/vm/GlobalObject.cpp
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -158,17 +158,17 @@ CPPSRCS		= \
 		prmjtime.cpp \
 		sharkctl.cpp \
 		CallObject.cpp \
 		Debugger.cpp \
 		GlobalObject.cpp \
 		Stack.cpp \
 		String.cpp \
 		BytecodeCompiler.cpp \
-		BytecodeGenerator.cpp \
+		BytecodeEmitter.cpp \
 		FoldConstants.cpp \
 		ParseMaps.cpp \
 		ParseNode.cpp \
 		Parser.cpp \
 		TokenStream.cpp \
 		LifoAlloc.cpp \
 		RegExpObject.cpp \
 		RegExpStatics.cpp \
--- a/js/src/frontend/BytecodeCompiler.cpp
+++ b/js/src/frontend/BytecodeCompiler.cpp
@@ -37,17 +37,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "frontend/BytecodeCompiler.h"
 
 #include "jsprobes.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/FoldConstants.h"
 #include "vm/GlobalObject.h"
 
 #include "jsinferinlines.h"
 
 using namespace js;
 using namespace js::frontend;
 
@@ -83,18 +83,18 @@ BytecodeCompiler::compileScript(JSContex
 
     BytecodeCompiler compiler(cx, principals, callerFrame);
     if (!compiler.init(chars, length, filename, lineno, version))
         return NULL;
 
     Parser &parser = compiler.parser;
     TokenStream &tokenStream = parser.tokenStream;
 
-    CodeGenerator cg(&parser, tokenStream.getLineno());
-    if (!cg.init(cx, TreeContext::USED_AS_TREE_CONTEXT))
+    BytecodeEmitter bce(&parser, tokenStream.getLineno());
+    if (!bce.init(cx, TreeContext::USED_AS_TREE_CONTEXT))
         return NULL;
 
     Probes::compileScriptBegin(cx, filename, lineno);
 
     MUST_FLOW_THROUGH("out");
 
     // We can specialize a bit for the given scope chain if that scope chain is the global object.
     JSObject *globalObj = scopeChain && scopeChain == scopeChain->getGlobal()
@@ -102,73 +102,73 @@ BytecodeCompiler::compileScript(JSContex
                         : NULL;
 
     JS_ASSERT_IF(globalObj, globalObj->isNative());
     JS_ASSERT_IF(globalObj, JSCLASS_HAS_GLOBAL_FLAG_AND_SLOTS(globalObj->getClass()));
 
     /* Null script early in case of error, to reduce our code footprint. */
     script = NULL;
 
-    GlobalScope globalScope(cx, globalObj, &cg);
-    cg.flags |= tcflags;
-    cg.setScopeChain(scopeChain);
+    GlobalScope globalScope(cx, globalObj, &bce);
+    bce.flags |= tcflags;
+    bce.setScopeChain(scopeChain);
     compiler.globalScope = &globalScope;
-    if (!SetStaticLevel(&cg, staticLevel))
+    if (!SetStaticLevel(&bce, staticLevel))
         goto out;
 
     /* If this is a direct call to eval, inherit the caller's strictness.  */
     if (callerFrame &&
         callerFrame->isScriptFrame() &&
         callerFrame->script()->strictModeCode) {
-        cg.flags |= TCF_STRICT_MODE_CODE;
+        bce.flags |= TCF_STRICT_MODE_CODE;
         tokenStream.setStrictMode();
     }
 
 #ifdef DEBUG
     bool savedCallerFun;
     savedCallerFun = false;
 #endif
     if (tcflags & TCF_COMPILE_N_GO) {
         if (source) {
             /*
              * Save eval program source in script->atoms[0] for the
              * eval cache (see EvalCacheLookup in jsobj.cpp).
              */
             JSAtom *atom = js_AtomizeString(cx, source);
             jsatomid _;
-            if (!atom || !cg.makeAtomIndex(atom, &_))
+            if (!atom || !bce.makeAtomIndex(atom, &_))
                 goto out;
         }
 
         if (callerFrame && callerFrame->isFunctionFrame()) {
             /*
              * An eval script in a caller frame needs to have its enclosing
              * function captured in case it refers to an upvar, and someone
              * wishes to decompile it while it's running.
              */
             ObjectBox *funbox = parser.newObjectBox(callerFrame->fun());
             if (!funbox)
                 goto out;
-            funbox->emitLink = cg.objectList.lastbox;
-            cg.objectList.lastbox = funbox;
-            cg.objectList.length++;
+            funbox->emitLink = bce.objectList.lastbox;
+            bce.objectList.lastbox = funbox;
+            bce.objectList.length++;
 #ifdef DEBUG
             savedCallerFun = true;
 #endif
         }
     }
 
     /*
      * Inline this->statements to emit as we go to save AST space. We must
      * generate our script-body blockid since we aren't calling Statements.
      */
     uint32 bodyid;
-    if (!GenerateBlockId(&cg, bodyid))
+    if (!GenerateBlockId(&bce, bodyid))
         goto out;
-    cg.bodyid = bodyid;
+    bce.bodyid = bodyid;
 
 #if JS_HAS_XML_SUPPORT
     pn = NULL;
     bool onlyXML;
     onlyXML = true;
 #endif
 
     inDirectivePrologue = true;
@@ -180,36 +180,36 @@ BytecodeCompiler::compileScript(JSContex
                 break;
             JS_ASSERT(tt == TOK_ERROR);
             goto out;
         }
 
         pn = parser.statement();
         if (!pn)
             goto out;
-        JS_ASSERT(!cg.blockNode);
+        JS_ASSERT(!bce.blockNode);
 
         if (inDirectivePrologue && !parser.recognizeDirectivePrologue(pn, &inDirectivePrologue))
             goto out;
 
-        if (!FoldConstants(cx, pn, &cg))
+        if (!FoldConstants(cx, pn, &bce))
             goto out;
 
-        if (!parser.analyzeFunctions(&cg))
+        if (!parser.analyzeFunctions(&bce))
             goto out;
-        cg.functionList = NULL;
+        bce.functionList = NULL;
 
-        if (!EmitTree(cx, &cg, pn))
+        if (!EmitTree(cx, &bce, pn))
             goto out;
 
 #if JS_HAS_XML_SUPPORT
         if (!pn->isKind(TOK_SEMI) || !pn->pn_kid || !TreeTypeIsXML(pn->pn_kid->getKind()))
             onlyXML = false;
 #endif
-        cg.freeTree(pn);
+        bce.freeTree(pn);
     }
 
 #if JS_HAS_XML_SUPPORT
     /*
      * Prevent XML data theft via <script src="http://victim.com/foo.xml">.
      * For background, see:
      *
      * https://bugzilla.mozilla.org/show_bug.cgi?id=336551
@@ -220,55 +220,55 @@ BytecodeCompiler::compileScript(JSContex
     }
 #endif
 
     /*
      * Global variables (gvars) share the atom index space with locals. Due to
      * incremental code generation we need to patch the bytecode to adjust the
      * local references to skip the globals.
      */
-    if (cg.hasSharps()) {
+    if (bce.hasSharps()) {
         jsbytecode *code, *end;
         JSOp op;
         const JSCodeSpec *cs;
         uintN len, slot;
 
-        code = CG_BASE(&cg);
-        for (end = code + CG_OFFSET(&cg); code != end; code += len) {
+        code = CG_BASE(&bce);
+        for (end = code + CG_OFFSET(&bce); code != end; code += len) {
             JS_ASSERT(code < end);
             op = (JSOp) *code;
             cs = &js_CodeSpec[op];
             len = (cs->length > 0)
                   ? (uintN) cs->length
                   : js_GetVariableBytecodeLength(code);
             if ((cs->format & JOF_SHARPSLOT) ||
                 JOF_TYPE(cs->format) == JOF_LOCAL ||
                 (JOF_TYPE(cs->format) == JOF_SLOTATOM)) {
                 JS_ASSERT_IF(!(cs->format & JOF_SHARPSLOT),
                              JOF_TYPE(cs->format) != JOF_SLOTATOM);
                 slot = GET_SLOTNO(code);
                 if (!(cs->format & JOF_SHARPSLOT))
-                    slot += cg.sharpSlots();
+                    slot += bce.sharpSlots();
                 if (slot >= SLOTNO_LIMIT)
                     goto too_many_slots;
                 SET_SLOTNO(code, slot);
             }
         }
     }
 
     /*
      * Nowadays the threaded interpreter needs a stop instruction, so we
      * do have to emit that here.
      */
-    if (Emit1(cx, &cg, JSOP_STOP) < 0)
+    if (Emit1(cx, &bce, JSOP_STOP) < 0)
         goto out;
 
-    JS_ASSERT(cg.version() == version);
+    JS_ASSERT(bce.version() == version);
 
-    script = JSScript::NewScriptFromCG(cx, &cg);
+    script = JSScript::NewScriptFromCG(cx, &bce);
     if (!script)
         goto out;
 
     JS_ASSERT(script->savedCallerFun == savedCallerFun);
 
     if (!defineGlobals(cx, globalScope, script))
         script = NULL;
 
@@ -396,81 +396,81 @@ BytecodeCompiler::compileFunctionBody(JS
     BytecodeCompiler compiler(cx, principals);
 
     if (!compiler.init(chars, length, filename, lineno, version))
         return false;
 
     Parser &parser = compiler.parser;
     TokenStream &tokenStream = parser.tokenStream;
 
-    CodeGenerator funcg(&parser, tokenStream.getLineno());
-    if (!funcg.init(cx, TreeContext::USED_AS_TREE_CONTEXT))
+    BytecodeEmitter funbce(&parser, tokenStream.getLineno());
+    if (!funbce.init(cx, TreeContext::USED_AS_TREE_CONTEXT))
         return false;
 
-    funcg.flags |= TCF_IN_FUNCTION;
-    funcg.setFunction(fun);
-    funcg.bindings.transfer(cx, bindings);
-    fun->setArgCount(funcg.bindings.countArgs());
-    if (!GenerateBlockId(&funcg, funcg.bodyid))
+    funbce.flags |= TCF_IN_FUNCTION;
+    funbce.setFunction(fun);
+    funbce.bindings.transfer(cx, bindings);
+    fun->setArgCount(funbce.bindings.countArgs());
+    if (!GenerateBlockId(&funbce, funbce.bodyid))
         return false;
 
     /* FIXME: make Function format the source for a function definition. */
     tokenStream.mungeCurrentToken(TOK_NAME);
-    ParseNode *fn = FunctionNode::create(&funcg);
+    ParseNode *fn = FunctionNode::create(&funbce);
     if (fn) {
         fn->pn_body = NULL;
         fn->pn_cookie.makeFree();
 
         uintN nargs = fun->nargs;
         if (nargs) {
             /*
              * NB: do not use AutoLocalNameArray because it will release space
              * allocated from cx->tempLifoAlloc by DefineArg.
              */
             Vector<JSAtom *> names(cx);
-            if (!funcg.bindings.getLocalNameArray(cx, &names)) {
+            if (!funbce.bindings.getLocalNameArray(cx, &names)) {
                 fn = NULL;
             } else {
                 for (uintN i = 0; i < nargs; i++) {
-                    if (!DefineArg(fn, names[i], i, &funcg)) {
+                    if (!DefineArg(fn, names[i], i, &funbce)) {
                         fn = NULL;
                         break;
                     }
                 }
             }
         }
     }
 
     /*
      * Farble the body so that it looks like a block statement to EmitTree,
-     * which is called from EmitFunctionBody (see BytecodeGenerator.cpp).
+     * which is called from EmitFunctionBody (see BytecodeEmitter.cpp).
      * After we're done parsing, we must fold constants, analyze any nested
      * functions, and generate code for this function, including a stop opcode
      * at the end.
      */
     tokenStream.mungeCurrentToken(TOK_LC);
     ParseNode *pn = fn ? parser.functionBody() : NULL;
     if (pn) {
-        if (!CheckStrictParameters(cx, &funcg)) {
+        if (!CheckStrictParameters(cx, &funbce)) {
             pn = NULL;
         } else if (!tokenStream.matchToken(TOK_EOF)) {
             parser.reportErrorNumber(NULL, JSREPORT_ERROR, JSMSG_SYNTAX_ERROR);
             pn = NULL;
-        } else if (!FoldConstants(cx, pn, &funcg)) {
+        } else if (!FoldConstants(cx, pn, &funbce)) {
             /* FoldConstants reported the error already. */
             pn = NULL;
-        } else if (!parser.analyzeFunctions(&funcg)) {
+        } else if (!parser.analyzeFunctions(&funbce)) {
             pn = NULL;
         } else {
             if (fn->pn_body) {
                 JS_ASSERT(fn->pn_body->isKind(TOK_ARGSBODY));
                 fn->pn_body->append(pn);
                 fn->pn_body->pn_pos = pn->pn_pos;
                 pn = fn->pn_body;
             }
 
-            if (!EmitFunctionScript(cx, &funcg, pn))
+            if (!EmitFunctionScript(cx, &funbce, pn))
                 pn = NULL;
         }
     }
 
     return pn != NULL;
 }
rename from js/src/frontend/BytecodeGenerator.cpp
rename to js/src/frontend/BytecodeEmitter.cpp
--- a/js/src/frontend/BytecodeGenerator.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -60,17 +60,17 @@
 #include "jsnum.h"
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsautooplen.h"        // generated headers last
 
 #include "ds/LifoAlloc.h"
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "frontend/TokenStream.h"
 #include "vm/RegExpObject.h"
 
 #include "jsatominlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
@@ -89,35 +89,35 @@ using namespace js;
 using namespace js::gc;
 using namespace js::frontend;
 
 #ifdef JS_TRACER
 extern uint8 js_opcode2extra[];
 #endif
 
 static JSBool
-NewTryNote(JSContext *cx, CodeGenerator *cg, JSTryNoteKind kind, uintN stackDepth,
+NewTryNote(JSContext *cx, BytecodeEmitter *bce, JSTryNoteKind kind, uintN stackDepth,
            size_t start, size_t end);
 
 static bool
-EmitIndexOp(JSContext *cx, JSOp op, uintN index, CodeGenerator *cg, JSOp *psuffix = NULL);
+EmitIndexOp(JSContext *cx, JSOp op, uintN index, BytecodeEmitter *bce, JSOp *psuffix = NULL);
 
 static JSBool
-EmitLeaveBlock(JSContext *cx, CodeGenerator *cg, JSOp op, ObjectBox *box);
+EmitLeaveBlock(JSContext *cx, BytecodeEmitter *bce, JSOp op, ObjectBox *box);
 
 static JSBool
-SetSrcNoteOffset(JSContext *cx, CodeGenerator *cg, uintN index, uintN which, ptrdiff_t offset);
+SetSrcNoteOffset(JSContext *cx, BytecodeEmitter *bce, uintN index, uintN which, ptrdiff_t offset);
 
 void
 TreeContext::trace(JSTracer *trc)
 {
     bindings.trace(trc);
 }
 
-CodeGenerator::CodeGenerator(Parser *parser, uintN lineno)
+BytecodeEmitter::BytecodeEmitter(Parser *parser, uintN lineno)
   : TreeContext(parser),
     atomIndices(parser->context),
     stackDepth(0), maxStackDepth(0),
     ntrynotes(0), lastTryNode(NULL),
     spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
     numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
     arrayCompDepth(0),
     emitLevel(0),
@@ -135,43 +135,43 @@ CodeGenerator::CodeGenerator(Parser *par
     flags = TCF_COMPILING;
     memset(&prolog, 0, sizeof prolog);
     memset(&main, 0, sizeof main);
     current = &main;
     firstLine = prolog.currentLine = main.currentLine = lineno;
 }
 
 bool
-CodeGenerator::init(JSContext *cx, TreeContext::InitBehavior ib)
+BytecodeEmitter::init(JSContext *cx, TreeContext::InitBehavior ib)
 {
     roLexdeps.init();
     return TreeContext::init(cx, ib) && constMap.init() && atomIndices.ensureMap(cx);
 }
 
-CodeGenerator::~CodeGenerator()
+BytecodeEmitter::~BytecodeEmitter()
 {
     JSContext *cx = parser->context;
 
     cx->free_(prolog.base);
     cx->free_(prolog.notes);
     cx->free_(main.base);
     cx->free_(main.notes);
 
     /* NB: non-null only after OOM. */
     if (spanDeps)
         cx->free_(spanDeps);
 }
 
 static ptrdiff_t
-EmitCheck(JSContext *cx, CodeGenerator *cg, ptrdiff_t delta)
-{
-    jsbytecode *base = CG_BASE(cg);
+EmitCheck(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t delta)
+{
+    jsbytecode *base = CG_BASE(bce);
     jsbytecode *newbase;
-    jsbytecode *next = CG_NEXT(cg);
-    jsbytecode *limit = CG_LIMIT(cg);
+    jsbytecode *next = CG_NEXT(bce);
+    jsbytecode *limit = CG_LIMIT(bce);
     ptrdiff_t offset = next - base;
     size_t minlength = offset + delta;
 
     if (next + delta > limit) {
         size_t newlength;
         if (!base) {
             JS_ASSERT(!next && !limit);
             newlength = BYTECODE_CHUNK_LENGTH;
@@ -185,170 +185,170 @@ EmitCheck(JSContext *cx, CodeGenerator *
                 newlength = RoundUpPow2(minlength);
             newbase = (jsbytecode *) cx->realloc_(base, BYTECODE_SIZE(newlength));
         }
         if (!newbase) {
             js_ReportOutOfMemory(cx);
             return -1;
         }
         JS_ASSERT(newlength >= size_t(offset + delta));
-        CG_BASE(cg) = newbase;
-        CG_LIMIT(cg) = newbase + newlength;
-        CG_NEXT(cg) = newbase + offset;
+        CG_BASE(bce) = newbase;
+        CG_LIMIT(bce) = newbase + newlength;
+        CG_NEXT(bce) = newbase + offset;
     }
     return offset;
 }
 
 static void
-UpdateDepth(JSContext *cx, CodeGenerator *cg, ptrdiff_t target)
+UpdateDepth(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t target)
 {
     jsbytecode *pc;
     JSOp op;
     const JSCodeSpec *cs;
     uintN extra, nuses;
     intN ndefs;
 
-    pc = CG_CODE(cg, target);
+    pc = CG_CODE(bce, target);
     op = (JSOp) *pc;
     cs = &js_CodeSpec[op];
 #ifdef JS_TRACER
     extra = js_opcode2extra[op];
 #else
     extra = 0;
 #endif
     if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
-        uintN depth = (uintN) cg->stackDepth +
+        uintN depth = (uintN) bce->stackDepth +
                       ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
                       extra;
-        if (depth > cg->maxStackDepth)
-            cg->maxStackDepth = depth;
+        if (depth > bce->maxStackDepth)
+            bce->maxStackDepth = depth;
     }
 
     nuses = js_GetStackUses(cs, op, pc);
-    cg->stackDepth -= nuses;
-    JS_ASSERT(cg->stackDepth >= 0);
-    if (cg->stackDepth < 0) {
+    bce->stackDepth -= nuses;
+    JS_ASSERT(bce->stackDepth >= 0);
+    if (bce->stackDepth < 0) {
         char numBuf[12];
         TokenStream *ts;
 
         JS_snprintf(numBuf, sizeof numBuf, "%d", target);
-        ts = &cg->parser->tokenStream;
+        ts = &bce->parser->tokenStream;
         JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
                                      js_GetErrorMessage, NULL,
                                      JSMSG_STACK_UNDERFLOW,
                                      ts->getFilename() ? ts->getFilename() : "stdin",
                                      numBuf);
     }
     ndefs = cs->ndefs;
     if (ndefs < 0) {
         JSObject *blockObj;
 
         /* We just executed IndexParsedObject */
         JS_ASSERT(op == JSOP_ENTERBLOCK);
         JS_ASSERT(nuses == 0);
-        blockObj = cg->objectList.lastbox->object;
+        blockObj = bce->objectList.lastbox->object;
         JS_ASSERT(blockObj->isStaticBlock());
         JS_ASSERT(blockObj->getSlot(JSSLOT_BLOCK_DEPTH).isUndefined());
 
-        OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
+        OBJ_SET_BLOCK_DEPTH(cx, blockObj, bce->stackDepth);
         ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
     }
-    cg->stackDepth += ndefs;
-    if ((uintN)cg->stackDepth > cg->maxStackDepth)
-        cg->maxStackDepth = cg->stackDepth;
+    bce->stackDepth += ndefs;
+    if ((uintN)bce->stackDepth > bce->maxStackDepth)
+        bce->maxStackDepth = bce->stackDepth;
 }
 
 static inline void
-UpdateDecomposeLength(CodeGenerator *cg, uintN start)
-{
-    uintN end = CG_OFFSET(cg);
+UpdateDecomposeLength(BytecodeEmitter *bce, uintN start)
+{
+    uintN end = CG_OFFSET(bce);
     JS_ASSERT(uintN(end - start) < 256);
-    CG_CODE(cg, start)[-1] = end - start;
+    CG_CODE(bce, start)[-1] = end - start;
 }
 
 ptrdiff_t
-frontend::Emit1(JSContext *cx, CodeGenerator *cg, JSOp op)
-{
-    ptrdiff_t offset = EmitCheck(cx, cg, 1);
+frontend::Emit1(JSContext *cx, BytecodeEmitter *bce, JSOp op)
+{
+    ptrdiff_t offset = EmitCheck(cx, bce, 1);
 
     if (offset >= 0) {
-        *CG_NEXT(cg)++ = (jsbytecode)op;
-        UpdateDepth(cx, cg, offset);
+        *CG_NEXT(bce)++ = (jsbytecode)op;
+        UpdateDepth(cx, bce, offset);
     }
     return offset;
 }
 
 ptrdiff_t
-frontend::Emit2(JSContext *cx, CodeGenerator *cg, JSOp op, jsbytecode op1)
-{
-    ptrdiff_t offset = EmitCheck(cx, cg, 2);
+frontend::Emit2(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1)
+{
+    ptrdiff_t offset = EmitCheck(cx, bce, 2);
 
     if (offset >= 0) {
-        jsbytecode *next = CG_NEXT(cg);
+        jsbytecode *next = CG_NEXT(bce);
         next[0] = (jsbytecode)op;
         next[1] = op1;
-        CG_NEXT(cg) = next + 2;
-        UpdateDepth(cx, cg, offset);
+        CG_NEXT(bce) = next + 2;
+        UpdateDepth(cx, bce, offset);
     }
     return offset;
 }
 
 ptrdiff_t
-frontend::Emit3(JSContext *cx, CodeGenerator *cg, JSOp op, jsbytecode op1,
+frontend::Emit3(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1,
                     jsbytecode op2)
 {
-    ptrdiff_t offset = EmitCheck(cx, cg, 3);
+    ptrdiff_t offset = EmitCheck(cx, bce, 3);
 
     if (offset >= 0) {
-        jsbytecode *next = CG_NEXT(cg);
+        jsbytecode *next = CG_NEXT(bce);
         next[0] = (jsbytecode)op;
         next[1] = op1;
         next[2] = op2;
-        CG_NEXT(cg) = next + 3;
-        UpdateDepth(cx, cg, offset);
+        CG_NEXT(bce) = next + 3;
+        UpdateDepth(cx, bce, offset);
     }
     return offset;
 }
 
 ptrdiff_t
-frontend::Emit5(JSContext *cx, CodeGenerator *cg, JSOp op, uint16 op1, uint16 op2)
-{
-    ptrdiff_t offset = EmitCheck(cx, cg, 5);
+frontend::Emit5(JSContext *cx, BytecodeEmitter *bce, JSOp op, uint16 op1, uint16 op2)
+{
+    ptrdiff_t offset = EmitCheck(cx, bce, 5);
 
     if (offset >= 0) {
-        jsbytecode *next = CG_NEXT(cg);
+        jsbytecode *next = CG_NEXT(bce);
         next[0] = (jsbytecode)op;
         next[1] = UINT16_HI(op1);
         next[2] = UINT16_LO(op1);
         next[3] = UINT16_HI(op2);
         next[4] = UINT16_LO(op2);
-        CG_NEXT(cg) = next + 5;
-        UpdateDepth(cx, cg, offset);
+        CG_NEXT(bce) = next + 5;
+        UpdateDepth(cx, bce, offset);
     }
     return offset;
 }
 
 ptrdiff_t
-frontend::EmitN(JSContext *cx, CodeGenerator *cg, JSOp op, size_t extra)
+frontend::EmitN(JSContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra)
 {
     ptrdiff_t length = 1 + (ptrdiff_t)extra;
-    ptrdiff_t offset = EmitCheck(cx, cg, length);
+    ptrdiff_t offset = EmitCheck(cx, bce, length);
 
     if (offset >= 0) {
-        jsbytecode *next = CG_NEXT(cg);
+        jsbytecode *next = CG_NEXT(bce);
         *next = (jsbytecode)op;
         memset(next + 1, 0, BYTECODE_SIZE(extra));
-        CG_NEXT(cg) = next + length;
+        CG_NEXT(bce) = next + length;
 
         /*
          * Don't UpdateDepth if op's use-count comes from the immediate
          * operand yet to be stored in the extra bytes after op.
          */
         if (js_CodeSpec[op].nuses >= 0)
-            UpdateDepth(cx, cg, offset);
+            UpdateDepth(cx, bce, offset);
     }
     return offset;
 }
 
 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
 const char js_with_statement_str[] = "with statement";
 const char js_finally_block_str[]  = "finally block";
 const char js_script_str[]         = "script";
@@ -369,28 +369,28 @@ static const char *statementName[] = {
     "for loop",              /* FOR_LOOP */
     "for/in loop",           /* FOR_IN_LOOP */
     "while loop",            /* WHILE_LOOP */
 };
 
 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
 
 static const char *
-StatementName(CodeGenerator *cg)
-{
-    if (!cg->topStmt)
+StatementName(BytecodeEmitter *bce)
+{
+    if (!bce->topStmt)
         return js_script_str;
-    return statementName[cg->topStmt->type];
+    return statementName[bce->topStmt->type];
 }
 
 static void
-ReportStatementTooLarge(JSContext *cx, CodeGenerator *cg)
+ReportStatementTooLarge(JSContext *cx, BytecodeEmitter *bce)
 {
     JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
-                         StatementName(cg));
+                         StatementName(bce));
 }
 
 /**
   Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
   and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
   into unconditional (gotos and gosubs), and conditional jumps or branches
   (which pop a value, test it, and jump depending on its value).  Most jumps
   have just one immediate operand, a signed offset from the jump opcode's pc
@@ -405,34 +405,34 @@ ReportStatementTooLarge(JSContext *cx, C
   formats have 32-bit signed immediates.  The span-dependency problem consists
   of selecting as few long instructions as possible, or about as few -- since
   jumps can span other jumps, extending one jump may cause another to need to
   be extended.
 
   Most JS scripts are short, so need no extended jumps.  We optimize for this
   case by generating short jumps until we know a long jump is needed.  After
   that point, we keep generating short jumps, but each jump's 16-bit immediate
-  offset operand is actually an unsigned index into cg->spanDeps, an array of
+  offset operand is actually an unsigned index into bce->spanDeps, an array of
   js::SpanDep structs.  Each struct tells the top offset in the script of the
   opcode, the "before" offset of the jump (which will be the same as top for
   simplex jumps, but which will index further into the bytecode array for a
   non-initial jump offset in a lookup or table switch), the after "offset"
   adjusted during span-dependent instruction selection (initially the same
   value as the "before" offset), and the jump target (more below).
 
-  Since we generate cg->spanDeps lazily, from within SetJumpOffset, we must
+  Since we generate bce->spanDeps lazily, from within SetJumpOffset, we must
   ensure that all bytecode generated so far can be inspected to discover where
-  the jump offset immediate operands lie within CG_CODE(cg).  But the bonus is
+  the jump offset immediate operands lie within CG_CODE(bce).  But the bonus is
   that we generate span-dependency records sorted by their offsets, so we can
   binary-search when trying to find a SpanDep for a given bytecode offset, or
   the nearest SpanDep at or above a given pc.
 
-  To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
+  To avoid limiting scripts to 64K jumps, if the bce->spanDeps index overflows
   65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand.  This
-  tells us that we need to binary-search for the cg->spanDeps entry by the
+  tells us that we need to binary-search for the bce->spanDeps entry by the
   jump opcode's bytecode offset (sd->before).
 
   Jump targets need to be maintained in a data structure that lets us look
   up an already-known target by its address (jumps may have a common target),
   and that also lets us update the addresses (script-relative, a.k.a. absolute
   offsets) of targets that come after a jump target (for when a jump below
   that target needs to be extended).  We use an AVL tree, implemented using
   recursion, but with some tricky optimizations to its height-balancing code
@@ -442,17 +442,17 @@ ReportStatementTooLarge(JSContext *cx, C
   positive sign, even though they link "backward" (i.e., toward lower bytecode
   address).  We don't want to waste space and search time in the AVL tree for
   such temporary backpatch deltas, so we use a single-bit wildcard scheme to
   tag true JumpTarget pointers and encode untagged, signed (positive) deltas in
   SpanDep::target pointers, depending on whether the SpanDep has a known
   target, or is still awaiting backpatching.
 
   Note that backpatch chains would present a problem for BuildSpanDepTable,
-  which inspects bytecode to build cg->spanDeps on demand, when the first
+  which inspects bytecode to build bce->spanDeps on demand, when the first
   short jump offset overflows.  To solve this temporary problem, we emit a
   proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
   nuses/ndefs counts help keep the stack balanced, but whose opcode format
   distinguishes its backpatch delta immediate operand from a normal jump
   offset.
  */
 static int
 BalanceJumpTargets(JumpTarget **jtp)
@@ -498,42 +498,42 @@ BalanceJumpTargets(JumpTarget **jtp)
         jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
     }
 
     return heightChanged;
 }
 
 struct AddJumpTargetArgs {
     JSContext           *cx;
-    CodeGenerator       *cg;
+    BytecodeEmitter     *bce;
     ptrdiff_t           offset;
     JumpTarget          *node;
 };
 
 static int
 AddJumpTarget(AddJumpTargetArgs *args, JumpTarget **jtp)
 {
     JumpTarget *jt = *jtp;
     if (!jt) {
-        CodeGenerator *cg = args->cg;
-
-        jt = cg->jtFreeList;
+        BytecodeEmitter *bce = args->bce;
+
+        jt = bce->jtFreeList;
         if (jt) {
-            cg->jtFreeList = jt->kids[JT_LEFT];
+            bce->jtFreeList = jt->kids[JT_LEFT];
         } else {
             jt = args->cx->tempLifoAlloc().new_<JumpTarget>();
             if (!jt) {
                 js_ReportOutOfMemory(args->cx);
                 return 0;
             }
         }
         jt->offset = args->offset;
         jt->balance = 0;
         jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
-        cg->numJumpTargets++;
+        bce->numJumpTargets++;
         args->node = jt;
         *jtp = jt;
         return 1;
     }
 
     if (jt->offset == args->offset) {
         args->node = jt;
         return 0;
@@ -564,109 +564,109 @@ AVLCheck(JumpTarget *jt)
     lh = AVLCheck(jt->kids[JT_LEFT]);
     rh = AVLCheck(jt->kids[JT_RIGHT]);
     JS_ASSERT(jt->balance == rh - lh);
     return 1 + JS_MAX(lh, rh);
 }
 #endif
 
 static JSBool
-SetSpanDepTarget(JSContext *cx, CodeGenerator *cg, SpanDep *sd, ptrdiff_t off)
+SetSpanDepTarget(JSContext *cx, BytecodeEmitter *bce, SpanDep *sd, ptrdiff_t off)
 {
     AddJumpTargetArgs args;
 
     if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
-        ReportStatementTooLarge(cx, cg);
+        ReportStatementTooLarge(cx, bce);
         return JS_FALSE;
     }
 
     args.cx = cx;
-    args.cg = cg;
+    args.bce = bce;
     args.offset = sd->top + off;
     args.node = NULL;
-    AddJumpTarget(&args, &cg->jumpTargets);
+    AddJumpTarget(&args, &bce->jumpTargets);
     if (!args.node)
         return JS_FALSE;
 
 #ifdef DEBUG_brendan
-    AVLCheck(cg->jumpTargets);
+    AVLCheck(bce->jumpTargets);
 #endif
 
     SD_SET_TARGET(sd, args.node);
     return JS_TRUE;
 }
 
 #define SPANDEPS_MIN            256
 #define SPANDEPS_SIZE(n)        ((n) * sizeof(js::SpanDep))
 #define SPANDEPS_SIZE_MIN       SPANDEPS_SIZE(SPANDEPS_MIN)
 
 static JSBool
-AddSpanDep(JSContext *cx, CodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2, ptrdiff_t off)
-{
-    uintN index = cg->numSpanDeps;
+AddSpanDep(JSContext *cx, BytecodeEmitter *bce, jsbytecode *pc, jsbytecode *pc2, ptrdiff_t off)
+{
+    uintN index = bce->numSpanDeps;
     if (index + 1 == 0) {
-        ReportStatementTooLarge(cx, cg);
+        ReportStatementTooLarge(cx, bce);
         return JS_FALSE;
     }
 
     SpanDep *sdbase;
     if ((index & (index - 1)) == 0 &&
-        (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN))
+        (!(sdbase = bce->spanDeps) || index >= SPANDEPS_MIN))
     {
         size_t size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
         sdbase = (SpanDep *) cx->realloc_(sdbase, size + size);
         if (!sdbase)
             return JS_FALSE;
-        cg->spanDeps = sdbase;
-    }
-
-    cg->numSpanDeps = index + 1;
-    SpanDep *sd = cg->spanDeps + index;
-    sd->top = pc - CG_BASE(cg);
-    sd->offset = sd->before = pc2 - CG_BASE(cg);
+        bce->spanDeps = sdbase;
+    }
+
+    bce->numSpanDeps = index + 1;
+    SpanDep *sd = bce->spanDeps + index;
+    sd->top = pc - CG_BASE(bce);
+    sd->offset = sd->before = pc2 - CG_BASE(bce);
 
     if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
         /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
         if (off != 0) {
             JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
             if (off > BPDELTA_MAX) {
-                ReportStatementTooLarge(cx, cg);
+                ReportStatementTooLarge(cx, bce);
                 return JS_FALSE;
             }
         }
         SD_SET_BPDELTA(sd, off);
     } else if (off == 0) {
         /* Jump offset will be patched directly, without backpatch chaining. */
         SD_SET_TARGET(sd, 0);
     } else {
         /* The jump offset in off is non-zero, therefore it's already known. */
-        if (!SetSpanDepTarget(cx, cg, sd, off))
+        if (!SetSpanDepTarget(cx, bce, sd, off))
             return JS_FALSE;
     }
 
     if (index > SPANDEP_INDEX_MAX)
         index = SPANDEP_INDEX_HUGE;
     SET_SPANDEP_INDEX(pc2, index);
     return JS_TRUE;
 }
 
 static jsbytecode *
-AddSwitchSpanDeps(JSContext *cx, CodeGenerator *cg, jsbytecode *pc)
+AddSwitchSpanDeps(JSContext *cx, BytecodeEmitter *bce, jsbytecode *pc)
 {
     JSOp op;
     jsbytecode *pc2;
     ptrdiff_t off;
     jsint low, high;
     uintN njumps, indexlen;
 
     op = (JSOp) *pc;
     JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
     pc2 = pc;
     off = GET_JUMP_OFFSET(pc2);
-    if (!AddSpanDep(cx, cg, pc, pc2, off))
+    if (!AddSpanDep(cx, bce, pc, pc2, off))
         return NULL;
     pc2 += JUMP_OFFSET_LEN;
     if (op == JSOP_TABLESWITCH) {
         low = GET_JUMP_OFFSET(pc2);
         pc2 += JUMP_OFFSET_LEN;
         high = GET_JUMP_OFFSET(pc2);
         pc2 += JUMP_OFFSET_LEN;
         njumps = (uintN) (high - low + 1);
@@ -675,110 +675,110 @@ AddSwitchSpanDeps(JSContext *cx, CodeGen
         njumps = GET_UINT16(pc2);
         pc2 += UINT16_LEN;
         indexlen = INDEX_LEN;
     }
     while (njumps) {
         --njumps;
         pc2 += indexlen;
         off = GET_JUMP_OFFSET(pc2);
-        if (!AddSpanDep(cx, cg, pc, pc2, off))
+        if (!AddSpanDep(cx, bce, pc, pc2, off))
             return NULL;
         pc2 += JUMP_OFFSET_LEN;
     }
     return 1 + pc2;
 }
 
 static JSBool
-BuildSpanDepTable(JSContext *cx, CodeGenerator *cg)
+BuildSpanDepTable(JSContext *cx, BytecodeEmitter *bce)
 {
     jsbytecode *pc, *end;
     JSOp op;
     const JSCodeSpec *cs;
     ptrdiff_t off;
 
-    pc = CG_BASE(cg) + cg->spanDepTodo;
-    end = CG_NEXT(cg);
+    pc = CG_BASE(bce) + bce->spanDepTodo;
+    end = CG_NEXT(bce);
     while (pc != end) {
         JS_ASSERT(pc < end);
         op = (JSOp)*pc;
         cs = &js_CodeSpec[op];
 
         switch (JOF_TYPE(cs->format)) {
           case JOF_TABLESWITCH:
           case JOF_LOOKUPSWITCH:
-            pc = AddSwitchSpanDeps(cx, cg, pc);
+            pc = AddSwitchSpanDeps(cx, bce, pc);
             if (!pc)
                 return JS_FALSE;
             break;
 
           case JOF_JUMP:
             off = GET_JUMP_OFFSET(pc);
-            if (!AddSpanDep(cx, cg, pc, pc, off))
+            if (!AddSpanDep(cx, bce, pc, pc, off))
                 return JS_FALSE;
             /* FALL THROUGH */
           default:
             pc += cs->length;
             break;
         }
     }
 
     return JS_TRUE;
 }
 
 static SpanDep *
-GetSpanDep(CodeGenerator *cg, jsbytecode *pc)
+GetSpanDep(BytecodeEmitter *bce, jsbytecode *pc)
 {
     uintN index;
     ptrdiff_t offset;
     int lo, hi, mid;
     SpanDep *sd;
 
     index = GET_SPANDEP_INDEX(pc);
     if (index != SPANDEP_INDEX_HUGE)
-        return cg->spanDeps + index;
-
-    offset = pc - CG_BASE(cg);
+        return bce->spanDeps + index;
+
+    offset = pc - CG_BASE(bce);
     lo = 0;
-    hi = cg->numSpanDeps - 1;
+    hi = bce->numSpanDeps - 1;
     while (lo <= hi) {
         mid = (lo + hi) / 2;
-        sd = cg->spanDeps + mid;
+        sd = bce->spanDeps + mid;
         if (sd->before == offset)
             return sd;
         if (sd->before < offset)
             lo = mid + 1;
         else
             hi = mid - 1;
     }
 
     JS_ASSERT(0);
     return NULL;
 }
 
 static JSBool
-SetBackPatchDelta(JSContext *cx, CodeGenerator *cg, jsbytecode *pc, ptrdiff_t delta)
+SetBackPatchDelta(JSContext *cx, BytecodeEmitter *bce, jsbytecode *pc, ptrdiff_t delta)
 {
     SpanDep *sd;
 
     JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
-    if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
+    if (!bce->spanDeps && delta < JUMP_OFFSET_MAX) {
         SET_JUMP_OFFSET(pc, delta);
         return JS_TRUE;
     }
 
     if (delta > BPDELTA_MAX) {
-        ReportStatementTooLarge(cx, cg);
+        ReportStatementTooLarge(cx, bce);
         return JS_FALSE;
     }
 
-    if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
+    if (!bce->spanDeps && !BuildSpanDepTable(cx, bce))
         return JS_FALSE;
 
-    sd = GetSpanDep(cg, pc);
+    sd = GetSpanDep(bce, pc);
     JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
     SD_SET_BPDELTA(sd, delta);
     return JS_TRUE;
 }
 
 static void
 UpdateJumpTargets(JumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
 {
@@ -787,22 +787,22 @@ UpdateJumpTargets(JumpTarget *jt, ptrdif
         if (jt->kids[JT_LEFT])
             UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
     }
     if (jt->kids[JT_RIGHT])
         UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
 }
 
 static SpanDep *
-FindNearestSpanDep(CodeGenerator *cg, ptrdiff_t offset, int lo, SpanDep *guard)
-{
-    int num = cg->numSpanDeps;
+FindNearestSpanDep(BytecodeEmitter *bce, ptrdiff_t offset, int lo, SpanDep *guard)
+{
+    int num = bce->numSpanDeps;
     JS_ASSERT(num > 0);
     int hi = num - 1;
-    SpanDep *sdbase = cg->spanDeps;
+    SpanDep *sdbase = bce->spanDeps;
     while (lo <= hi) {
         int mid = (lo + hi) / 2;
         SpanDep *sd = sdbase + mid;
         if (sd->before == offset)
             return sd;
         if (sd->before < offset)
             lo = mid + 1;
         else
@@ -811,45 +811,45 @@ FindNearestSpanDep(CodeGenerator *cg, pt
     if (lo == num)
         return guard;
     SpanDep *sd = sdbase + lo;
     JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
     return sd;
 }
 
 static void
-FreeJumpTargets(CodeGenerator *cg, JumpTarget *jt)
+FreeJumpTargets(BytecodeEmitter *bce, JumpTarget *jt)
 {
     if (jt->kids[JT_LEFT])
-        FreeJumpTargets(cg, jt->kids[JT_LEFT]);
+        FreeJumpTargets(bce, jt->kids[JT_LEFT]);
     if (jt->kids[JT_RIGHT])
-        FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
-    jt->kids[JT_LEFT] = cg->jtFreeList;
-    cg->jtFreeList = jt;
+        FreeJumpTargets(bce, jt->kids[JT_RIGHT]);
+    jt->kids[JT_LEFT] = bce->jtFreeList;
+    bce->jtFreeList = jt;
 }
 
 static JSBool
-OptimizeSpanDeps(JSContext *cx, CodeGenerator *cg)
+OptimizeSpanDeps(JSContext *cx, BytecodeEmitter *bce)
 {
     jsbytecode *pc, *oldpc, *base, *limit, *next;
     SpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
     ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
     JSBool done;
     JSOp op;
     uint32 type;
     jssrcnote *sn, *snlimit;
     JSSrcNoteSpec *spec;
     uintN i, n, noteIndex;
     TryNode *tryNode;
     DebugOnly<int> passes = 0;
 
-    base = CG_BASE(cg);
-    sdbase = cg->spanDeps;
-    sdlimit = sdbase + cg->numSpanDeps;
-    offset = CG_OFFSET(cg);
+    base = CG_BASE(bce);
+    sdbase = bce->spanDeps;
+    sdlimit = sdbase + bce->numSpanDeps;
+    offset = CG_OFFSET(bce);
     growth = 0;
 
     do {
         done = JS_TRUE;
         delta = 0;
         top = pivot = -1;
         sdtop = NULL;
         pc = NULL;
@@ -871,17 +871,17 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
                 pc = base + top;
                 op = (JSOp) *pc;
                 type = JOF_OPTYPE(op);
                 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
                     /*
                      * We already extended all the jump offset operands for
                      * the opcode at sd->top.  Jumps and branches have only
                      * one jump offset operand, but switches have many, all
-                     * of which are adjacent in cg->spanDeps.
+                     * of which are adjacent in bce->spanDeps.
                      */
                     continue;
                 }
 
                 JS_ASSERT(type == JOF_JUMP ||
                           type == JOF_TABLESWITCH ||
                           type == JOF_LOOKUPSWITCH);
             }
@@ -900,17 +900,17 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
                       case JSOP_OR:           op = JSOP_ORX; break;
                       case JSOP_AND:          op = JSOP_ANDX; break;
                       case JSOP_GOSUB:        op = JSOP_GOSUBX; break;
                       case JSOP_CASE:         op = JSOP_CASEX; break;
                       case JSOP_DEFAULT:      op = JSOP_DEFAULTX; break;
                       case JSOP_TABLESWITCH:  op = JSOP_TABLESWITCHX; break;
                       case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
                       default:
-                        ReportStatementTooLarge(cx, cg);
+                        ReportStatementTooLarge(cx, bce);
                         return JS_FALSE;
                     }
                     *pc = (jsbytecode) op;
 
                     for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
                         if (sd2 <= sd) {
                             /*
                              * sd2->offset already includes delta as it stood
@@ -930,54 +930,54 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
                              * sd2 comes after sd, and won't be revisited by
                              * the outer for loop, so we have to increase its
                              * offset by delta, not merely by deltaFromTop.
                              */
                             sd2->offset += delta;
                         }
 
                         delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
-                        UpdateJumpTargets(cg->jumpTargets, sd2->offset,
+                        UpdateJumpTargets(bce->jumpTargets, sd2->offset,
                                           JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
                     }
                     sd = sd2 - 1;
                 }
             }
         }
 
         growth += delta;
     } while (!done);
 
     if (growth) {
 #ifdef DEBUG_brendan
-        TokenStream *ts = &cg->parser->tokenStream;
+        TokenStream *ts = &bce->parser->tokenStream;
 
         printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
-               ts->filename ? ts->filename : "stdin", cg->firstLine,
-               growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
+               ts->filename ? ts->filename : "stdin", bce->firstLine,
+               growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), bce->numSpanDeps,
                passes, offset + growth, offset, growth);
 #endif
 
         /*
          * Ensure that we have room for the extended jumps, but don't round up
          * to a power of two -- we're done generating code, so we cut to fit.
          */
-        limit = CG_LIMIT(cg);
+        limit = CG_LIMIT(bce);
         length = offset + growth;
         next = base + length;
         if (next > limit) {
             base = (jsbytecode *) cx->realloc_(base, BYTECODE_SIZE(length));
             if (!base) {
                 js_ReportOutOfMemory(cx);
                 return JS_FALSE;
             }
-            CG_BASE(cg) = base;
-            CG_LIMIT(cg) = next = base + length;
-        }
-        CG_NEXT(cg) = next;
+            CG_BASE(bce) = base;
+            CG_LIMIT(bce) = next = base + length;
+        }
+        CG_NEXT(bce) = next;
 
         /*
          * Set up a fake span dependency record to guard the end of the code
          * being generated.  This guard record is returned as a fencepost by
          * FindNearestSpanDep if there is no real spandep at or above a given
          * unextended code offset.
          */
         guard.top = -1;
@@ -1009,17 +1009,17 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
             JS_ASSERT(top == sd2->before);
         }
 
         oldpc = base + sd->before;
         span = SD_SPAN(sd, pivot);
 
         /*
          * If this jump didn't need to be extended, restore its span immediate
-         * offset operand now, overwriting the index of sd within cg->spanDeps
+         * offset operand now, overwriting the index of sd within bce->spanDeps
          * that was stored temporarily after *pc when BuildSpanDepTable ran.
          *
          * Note that span might fit in 16 bits even for an extended jump op,
          * if the op has multiple span operands, not all of which overflowed
          * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
          * range for a short jump, but others are not).
          */
         if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
@@ -1062,17 +1062,17 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
          * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
          * at each sd that moved.  The future may bring different offset sizes
          * for span-dependent instruction operands.  However, we fix only main
          * notes here, not prolog notes -- we know that prolog opcodes are not
          * span-dependent, and aren't likely ever to be.
          */
         offset = growth = 0;
         sd = sdbase;
-        for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
+        for (sn = bce->main.notes, snlimit = sn + bce->main.noteCount;
              sn < snlimit;
              sn = SN_NEXT(sn)) {
             /*
              * Recall that the offset of a given note includes its delta, and
              * tells the offset of the annotated bytecode from the main entry
              * point of the script.
              */
             offset += SN_DELTA(sn);
@@ -1083,20 +1083,20 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
                  * how many bytes sd's instruction grew.
                  */
                 sd2 = sd + 1;
                 if (sd2 == sdlimit)
                     sd2 = &guard;
                 delta = sd2->offset - (sd2->before + growth);
                 if (delta > 0) {
                     JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
-                    sn = AddToSrcNoteDelta(cx, cg, sn, delta);
+                    sn = AddToSrcNoteDelta(cx, bce, sn, delta);
                     if (!sn)
                         return JS_FALSE;
-                    snlimit = cg->main.notes + cg->main.noteCount;
+                    snlimit = bce->main.notes + bce->main.noteCount;
                     growth += delta;
                 }
                 sd++;
             }
 
             /*
              * If sn has span-dependent offset operands, check whether each
              * covers further span-dependencies, and increase those operands
@@ -1109,62 +1109,62 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
             if (spec->isSpanDep) {
                 pivot = offset + spec->offsetBias;
                 n = spec->arity;
                 for (i = 0; i < n; i++) {
                     span = js_GetSrcNoteOffset(sn, i);
                     if (span == 0)
                         continue;
                     target = pivot + span * spec->isSpanDep;
-                    sd2 = FindNearestSpanDep(cg, target,
+                    sd2 = FindNearestSpanDep(bce, target,
                                              (target >= pivot)
                                              ? sd - sdbase
                                              : 0,
                                              &guard);
 
                     /*
                      * Increase target by sd2's before-vs-after offset delta,
                      * which is absolute (i.e., relative to start of script,
                      * as is target).  Recompute the span by subtracting its
                      * adjusted pivot from target.
                      */
                     target += sd2->offset - sd2->before;
                     span = target - (pivot + growth);
                     span *= spec->isSpanDep;
-                    noteIndex = sn - cg->main.notes;
-                    if (!SetSrcNoteOffset(cx, cg, noteIndex, i, span))
+                    noteIndex = sn - bce->main.notes;
+                    if (!SetSrcNoteOffset(cx, bce, noteIndex, i, span))
                         return JS_FALSE;
-                    sn = cg->main.notes + noteIndex;
-                    snlimit = cg->main.notes + cg->main.noteCount;
+                    sn = bce->main.notes + noteIndex;
+                    snlimit = bce->main.notes + bce->main.noteCount;
                 }
             }
         }
-        cg->main.lastNoteOffset += growth;
+        bce->main.lastNoteOffset += growth;
 
         /*
          * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
          * not clear how we can beat that).
          */
-        for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
+        for (tryNode = bce->lastTryNode; tryNode; tryNode = tryNode->prev) {
             /*
              * First, look for the nearest span dependency at/above tn->start.
              * There may not be any such spandep, in which case the guard will
              * be returned.
              */
             offset = tryNode->note.start;
-            sd = FindNearestSpanDep(cg, offset, 0, &guard);
+            sd = FindNearestSpanDep(bce, offset, 0, &guard);
             delta = sd->offset - sd->before;
             tryNode->note.start = offset + delta;
 
             /*
              * Next, find the nearest spandep at/above tn->start + tn->length.
              * Use its delta minus tn->start's delta to increase tn->length.
              */
             length = tryNode->note.length;
-            sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
+            sd2 = FindNearestSpanDep(bce, offset + length, sd - sdbase, &guard);
             if (sd2 != sd) {
                 tryNode->note.length =
                     length + sd2->offset - sd2->before - delta;
             }
         }
     }
 
 #ifdef DEBUG_brendan
@@ -1207,81 +1207,81 @@ OptimizeSpanDeps(JSContext *cx, CodeGene
         }
         JS_ASSERT(SD_SPAN(sd, pivot) == span);
     }
     JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
   }
 #endif
 
     /*
-     * Reset so we optimize at most once -- cg may be used for further code
+     * Reset so we optimize at most once -- bce may be used for further code
      * generation of successive, independent, top-level statements.  No jump
      * can span top-level statements, because JS lacks goto.
      */
-    cx->free_(cg->spanDeps);
-    cg->spanDeps = NULL;
-    FreeJumpTargets(cg, cg->jumpTargets);
-    cg->jumpTargets = NULL;
-    cg->numSpanDeps = cg->numJumpTargets = 0;
-    cg->spanDepTodo = CG_OFFSET(cg);
+    cx->free_(bce->spanDeps);
+    bce->spanDeps = NULL;
+    FreeJumpTargets(bce, bce->jumpTargets);
+    bce->jumpTargets = NULL;
+    bce->numSpanDeps = bce->numJumpTargets = 0;
+    bce->spanDepTodo = CG_OFFSET(bce);
     return JS_TRUE;
 }
 
 static ptrdiff_t
-EmitJump(JSContext *cx, CodeGenerator *cg, JSOp op, ptrdiff_t off)
+EmitJump(JSContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t off)
 {
     JSBool extend;
     ptrdiff_t jmp;
     jsbytecode *pc;
 
     extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
-    if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
+    if (extend && !bce->spanDeps && !BuildSpanDepTable(cx, bce))
         return -1;
 
-    jmp = Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
-    if (jmp >= 0 && (extend || cg->spanDeps)) {
-        pc = CG_CODE(cg, jmp);
-        if (!AddSpanDep(cx, cg, pc, pc, off))
+    jmp = Emit3(cx, bce, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
+    if (jmp >= 0 && (extend || bce->spanDeps)) {
+        pc = CG_CODE(bce, jmp);
+        if (!AddSpanDep(cx, bce, pc, pc, off))
             return -1;
     }
     return jmp;
 }
 
 static ptrdiff_t
-GetJumpOffset(CodeGenerator *cg, jsbytecode *pc)
-{
-    if (!cg->spanDeps)
+GetJumpOffset(BytecodeEmitter *bce, jsbytecode *pc)
+{
+    if (!bce->spanDeps)
         return GET_JUMP_OFFSET(pc);
 
-    SpanDep *sd = GetSpanDep(cg, pc);
+    SpanDep *sd = GetSpanDep(bce, pc);
     JumpTarget *jt = sd->target;
     if (!JT_HAS_TAG(jt))
         return JT_TO_BPDELTA(jt);
 
     ptrdiff_t top = sd->top;
-    while (--sd >= cg->spanDeps && sd->top == top)
+    while (--sd >= bce->spanDeps && sd->top == top)
         continue;
     sd++;
     return JT_CLR_TAG(jt)->offset - sd->offset;
 }
 
 JSBool
-frontend::SetJumpOffset(JSContext *cx, CodeGenerator *cg, jsbytecode *pc, ptrdiff_t off)
-{
-    if (!cg->spanDeps) {
+frontend::SetJumpOffset(JSContext *cx, BytecodeEmitter *bce, jsbytecode *pc, ptrdiff_t off)
+{
+    if (!bce->spanDeps) {
         if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
             SET_JUMP_OFFSET(pc, off);
             return JS_TRUE;
         }
 
-        if (!BuildSpanDepTable(cx, cg))
+        if (!BuildSpanDepTable(cx, bce))
             return JS_FALSE;
     }
 
-    return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
+    return SetSpanDepTarget(cx, bce, GetSpanDep(bce, pc), off);
 }
 
 bool
 TreeContext::inStatement(StmtType type)
 {
     for (StmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
         if (stmt->type == type)
             return true;
@@ -1399,185 +1399,185 @@ frontend::PushBlockScope(TreeContext *tc
     stmt->blockBox = blockBox;
 }
 
 /*
  * Emit a backpatch op with offset pointing to the previous jump of this type,
  * so that we can walk back up the chain fixing up the op and jump offset.
  */
 static ptrdiff_t
-EmitBackPatchOp(JSContext *cx, CodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
+EmitBackPatchOp(JSContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t *lastp)
 {
     ptrdiff_t offset, delta;
 
-    offset = CG_OFFSET(cg);
+    offset = CG_OFFSET(bce);
     delta = offset - *lastp;
     *lastp = offset;
     JS_ASSERT(delta > 0);
-    return EmitJump(cx, cg, op, delta);
+    return EmitJump(cx, bce, op, delta);
 }
 
 /* A macro for inlining at the top of EmitTree (whence it came). */
-#define UPDATE_LINE_NUMBER_NOTES(cx, cg, line)                                \
+#define UPDATE_LINE_NUMBER_NOTES(cx, bce, line)                               \
     JS_BEGIN_MACRO                                                            \
         uintN line_ = (line);                                                 \
-        uintN delta_ = line_ - CG_CURRENT_LINE(cg);                           \
+        uintN delta_ = line_ - CG_CURRENT_LINE(bce);                          \
         if (delta_ != 0) {                                                    \
             /*                                                                \
              * Encode any change in the current source line number by using   \
              * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
              * whichever consumes less space.                                 \
              *                                                                \
              * NB: We handle backward line number deltas (possible with for   \
              * loops where the update part is emitted after the body, but its \
              * line number is <= any line number in the body) here by letting \
              * unsigned delta_ wrap to a very large number, which triggers a  \
              * SRC_SETLINE.                                                   \
              */                                                               \
-            CG_CURRENT_LINE(cg) = line_;                                      \
+            CG_CURRENT_LINE(bce) = line_;                                     \
             if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
-                if (NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)   \
+                if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line_) < 0)  \
                     return JS_FALSE;                                          \
             } else {                                                          \
                 do {                                                          \
-                    if (NewSrcNote(cx, cg, SRC_NEWLINE) < 0)                  \
+                    if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0)                 \
                         return JS_FALSE;                                      \
                 } while (--delta_ != 0);                                      \
             }                                                                 \
         }                                                                     \
     JS_END_MACRO
 
 /* A function, so that we avoid macro-bloating all the other callsites. */
 static JSBool
-UpdateLineNumberNotes(JSContext *cx, CodeGenerator *cg, uintN line)
-{
-    UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
+UpdateLineNumberNotes(JSContext *cx, BytecodeEmitter *bce, uintN line)
+{
+    UPDATE_LINE_NUMBER_NOTES(cx, bce, line);
     return JS_TRUE;
 }
 
 static ptrdiff_t
-EmitTraceOp(JSContext *cx, CodeGenerator *cg, ParseNode *nextpn)
+EmitTraceOp(JSContext *cx, BytecodeEmitter *bce, ParseNode *nextpn)
 {
     if (nextpn) {
         /*
          * Try to give the JSOP_TRACE the same line number as the next
          * instruction. nextpn is often a block, in which case the next
          * instruction typically comes from the first statement inside.
          */
         if (nextpn->isKind(TOK_LC) && nextpn->isArity(PN_LIST) && nextpn->pn_head)
             nextpn = nextpn->pn_head;
-        if (!UpdateLineNumberNotes(cx, cg, nextpn->pn_pos.begin.lineno))
+        if (!UpdateLineNumberNotes(cx, bce, nextpn->pn_pos.begin.lineno))
             return -1;
     }
 
-    uint32 index = cg->traceIndex;
+    uint32 index = bce->traceIndex;
     if (index < UINT16_MAX)
-        cg->traceIndex++;
-    return Emit3(cx, cg, JSOP_TRACE, UINT16_HI(index), UINT16_LO(index));
+        bce->traceIndex++;
+    return Emit3(cx, bce, JSOP_TRACE, UINT16_HI(index), UINT16_LO(index));
 }
 
 /*
  * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
  * a type set to store its result.
  */
 static inline void
-CheckTypeSet(JSContext *cx, CodeGenerator *cg, JSOp op)
+CheckTypeSet(JSContext *cx, BytecodeEmitter *bce, JSOp op)
 {
     if (js_CodeSpec[op].format & JOF_TYPESET) {
-        if (cg->typesetCount < UINT16_MAX)
-            cg->typesetCount++;
+        if (bce->typesetCount < UINT16_MAX)
+            bce->typesetCount++;
     }
 }
 
 /*
  * Macro to emit a bytecode followed by a uint16 immediate operand stored in
  * big-endian order, used for arg and var numbers as well as for atomIndexes.
- * NB: We use cx and cg from our caller's lexical environment, and return
+ * NB: We use cx and bce from our caller's lexical environment, and return
  * false on error.
  */
 #define EMIT_UINT16_IMM_OP(op, i)                                             \
     JS_BEGIN_MACRO                                                            \
-        if (Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0)                \
+        if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0)               \
             return JS_FALSE;                                                  \
-        CheckTypeSet(cx, cg, op);                                             \
+        CheckTypeSet(cx, bce, op);                                            \
     JS_END_MACRO
 
 #define EMIT_UINT16PAIR_IMM_OP(op, i, j)                                      \
     JS_BEGIN_MACRO                                                            \
-        ptrdiff_t off_ = EmitN(cx, cg, op, 2 * UINT16_LEN);                   \
+        ptrdiff_t off_ = EmitN(cx, bce, op, 2 * UINT16_LEN);                  \
         if (off_ < 0)                                                         \
             return JS_FALSE;                                                  \
-        jsbytecode *pc_ = CG_CODE(cg, off_);                                  \
+        jsbytecode *pc_ = CG_CODE(bce, off_);                                 \
         SET_UINT16(pc_, i);                                                   \
         pc_ += UINT16_LEN;                                                    \
         SET_UINT16(pc_, j);                                                   \
     JS_END_MACRO
 
 #define EMIT_UINT16_IN_PLACE(offset, op, i)                                   \
     JS_BEGIN_MACRO                                                            \
-        CG_CODE(cg, offset)[0] = op;                                          \
-        CG_CODE(cg, offset)[1] = UINT16_HI(i);                                \
-        CG_CODE(cg, offset)[2] = UINT16_LO(i);                                \
+        CG_CODE(bce, offset)[0] = op;                                         \
+        CG_CODE(bce, offset)[1] = UINT16_HI(i);                               \
+        CG_CODE(bce, offset)[2] = UINT16_LO(i);                               \
     JS_END_MACRO
 
 static JSBool
-FlushPops(JSContext *cx, CodeGenerator *cg, intN *npops)
+FlushPops(JSContext *cx, BytecodeEmitter *bce, intN *npops)
 {
     JS_ASSERT(*npops != 0);
-    if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+    if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
         return JS_FALSE;
     EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
     *npops = 0;
     return JS_TRUE;
 }
 
 /*
  * Emit additional bytecode(s) for non-local jumps.
  */
 static JSBool
-EmitNonLocalJumpFixup(JSContext *cx, CodeGenerator *cg, StmtInfo *toStmt)
+EmitNonLocalJumpFixup(JSContext *cx, BytecodeEmitter *bce, StmtInfo *toStmt)
 {
     /*
-     * The non-local jump fixup we emit will unbalance cg->stackDepth, because
+     * The non-local jump fixup we emit will unbalance bce->stackDepth, because
      * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
-     * end of a with statement, so we save cg->stackDepth here and restore it
+     * end of a with statement, so we save bce->stackDepth here and restore it
      * just before a successful return.
      */
-    intN depth = cg->stackDepth;
+    intN depth = bce->stackDepth;
     intN npops = 0;
 
-#define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
-
-    for (StmtInfo *stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
+#define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return JS_FALSE
+
+    for (StmtInfo *stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) {
         switch (stmt->type) {
           case STMT_FINALLY:
             FLUSH_POPS();
-            if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+            if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                 return JS_FALSE;
-            if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
+            if (EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
                 return JS_FALSE;
             break;
 
           case STMT_WITH:
             /* There's a With object on the stack that we need to pop. */
             FLUSH_POPS();
-            if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+            if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                 return JS_FALSE;
-            if (Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
+            if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
                 return JS_FALSE;
             break;
 
           case STMT_FOR_IN_LOOP:
             /*
              * The iterator and the object being iterated need to be popped.
              */
             FLUSH_POPS();
-            if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+            if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                 return JS_FALSE;
-            if (Emit1(cx, cg, JSOP_ENDITER) < 0)
+            if (Emit1(cx, bce, JSOP_ENDITER) < 0)
                 return JS_FALSE;
             break;
 
           case STMT_SUBROUTINE:
             /*
              * There's a [exception or hole, retsub pc-index] pair on the
              * stack that we need to pop.
              */
@@ -1585,86 +1585,86 @@ EmitNonLocalJumpFixup(JSContext *cx, Cod
             break;
 
           default:;
         }
 
         if (stmt->flags & SIF_SCOPE) {
             /* There is a Block object with locals on the stack to pop. */
             FLUSH_POPS();
-            if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+            if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                 return JS_FALSE;
-            if (!EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, stmt->blockBox))
+            if (!EmitLeaveBlock(cx, bce, JSOP_LEAVEBLOCK, stmt->blockBox))
                 return JS_FALSE;
         }
     }
 
     FLUSH_POPS();
-    cg->stackDepth = depth;
+    bce->stackDepth = depth;
     return JS_TRUE;
 
 #undef FLUSH_POPS
 }
 
 static JSBool
-EmitKnownBlockChain(JSContext *cx, CodeGenerator *cg, ObjectBox *box)
+EmitKnownBlockChain(JSContext *cx, BytecodeEmitter *bce, ObjectBox *box)
 {
     if (box)
-        return EmitIndexOp(cx, JSOP_BLOCKCHAIN, box->index, cg);
-    return Emit1(cx, cg, JSOP_NULLBLOCKCHAIN) >= 0;
+        return EmitIndexOp(cx, JSOP_BLOCKCHAIN, box->index, bce);
+    return Emit1(cx, bce, JSOP_NULLBLOCKCHAIN) >= 0;
 }
 
 static JSBool
-EmitBlockChain(JSContext *cx, CodeGenerator *cg)
-{
-    return EmitKnownBlockChain(cx, cg, cg->blockChainBox);
+EmitBlockChain(JSContext *cx, BytecodeEmitter *bce)
+{
+    return EmitKnownBlockChain(cx, bce, bce->blockChainBox);
 }
 
 static const jsatomid INVALID_ATOMID = -1;
 
 static ptrdiff_t
-EmitGoto(JSContext *cx, CodeGenerator *cg, StmtInfo *toStmt, ptrdiff_t *lastp,
+EmitGoto(JSContext *cx, BytecodeEmitter *bce, StmtInfo *toStmt, ptrdiff_t *lastp,
          jsatomid labelIndex = INVALID_ATOMID, SrcNoteType noteType = SRC_NULL)
 {
     intN index;
 
-    if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
+    if (!EmitNonLocalJumpFixup(cx, bce, toStmt))
         return -1;
 
     if (labelIndex != INVALID_ATOMID)
-        index = NewSrcNote2(cx, cg, noteType, ptrdiff_t(labelIndex));
+        index = NewSrcNote2(cx, bce, noteType, ptrdiff_t(labelIndex));
     else if (noteType != SRC_NULL)
-        index = NewSrcNote(cx, cg, noteType);
+        index = NewSrcNote(cx, bce, noteType);
     else
         index = 0;
     if (index < 0)
         return -1;
 
-    ptrdiff_t result = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
+    ptrdiff_t result = EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, lastp);
     if (result < 0)
         return result;
 
-    if (!EmitBlockChain(cx, cg))
+    if (!EmitBlockChain(cx, bce))
         return -1;
 
     return result;
 }
 
 static JSBool
-BackPatch(JSContext *cx, CodeGenerator *cg, ptrdiff_t last, jsbytecode *target, jsbytecode op)
+BackPatch(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t last, jsbytecode *target, jsbytecode op)
 {
     jsbytecode *pc, *stop;
     ptrdiff_t delta, span;
 
-    pc = CG_CODE(cg, last);
-    stop = CG_CODE(cg, -1);
+    pc = CG_CODE(bce, last);
+    stop = CG_CODE(bce, -1);
     while (pc != stop) {
-        delta = GetJumpOffset(cg, pc);
+        delta = GetJumpOffset(bce, pc);
         span = target - pc;
-        CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
+        CHECK_AND_SET_JUMP_OFFSET(cx, bce, pc, span);
 
         /*
          * Set *pc after jump offset in case bpdelta didn't overflow, but span
          * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
          * and need to see the JSOP_BACKPATCH* op at *pc).
          */
         *pc = op;
         pc -= delta;
@@ -1681,35 +1681,35 @@ frontend::PopStatementTC(TreeContext *tc
         tc->topScopeStmt = stmt->downScope;
         if (stmt->flags & SIF_SCOPE) {
             tc->blockChainBox = stmt->blockBox->parent;
         }
     }
 }
 
 JSBool
-frontend::PopStatementCG(JSContext *cx, CodeGenerator *cg)
-{
-    StmtInfo *stmt = cg->topStmt;
+frontend::PopStatementCG(JSContext *cx, BytecodeEmitter *bce)
+{
+    StmtInfo *stmt = bce->topStmt;
     if (!STMT_IS_TRYING(stmt) &&
-        (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
-         !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
+        (!BackPatch(cx, bce, stmt->breaks, CG_NEXT(bce), JSOP_GOTO) ||
+         !BackPatch(cx, bce, stmt->continues, CG_CODE(bce, stmt->update),
                     JSOP_GOTO))) {
         return JS_FALSE;
     }
-    PopStatementTC(cg);
+    PopStatementTC(bce);
     return JS_TRUE;
 }
 
 JSBool
-frontend::DefineCompileTimeConstant(JSContext *cx, CodeGenerator *cg, JSAtom *atom, ParseNode *pn)
+frontend::DefineCompileTimeConstant(JSContext *cx, BytecodeEmitter *bce, JSAtom *atom, ParseNode *pn)
 {
     /* XXX just do numbers for now */
     if (pn->isKind(TOK_NUMBER)) {
-        if (!cg->constMap.put(atom, NumberValue(pn->pn_dval)))
+        if (!bce->constMap.put(atom, NumberValue(pn->pn_dval)))
             return JS_FALSE;
     }
     return JS_TRUE;
 }
 
 StmtInfo *
 frontend::LexicalLookup(TreeContext *tc, JSAtom *atom, jsint *slotp, StmtInfo *stmt)
 {
@@ -1743,50 +1743,50 @@ frontend::LexicalLookup(TreeContext *tc,
     return stmt;
 }
 
 /*
  * The function sets vp to NO_CONSTANT when the atom does not corresponds to a
  * name defining a constant.
  */
 static JSBool
-LookupCompileTimeConstant(JSContext *cx, CodeGenerator *cg, JSAtom *atom, Value *constp)
+LookupCompileTimeConstant(JSContext *cx, BytecodeEmitter *bce, JSAtom *atom, Value *constp)
 {
     /*
-     * Chase down the cg stack, but only until we reach the outermost cg.
+     * Chase down the bce stack, but only until we reach the outermost bce.
      * This enables propagating consts from top-level into switch cases in a
      * function compiled along with the top-level script.
      */
     constp->setMagic(JS_NO_CONSTANT);
     do {
-        if (cg->inFunction() || cg->compileAndGo()) {
+        if (bce->inFunction() || bce->compileAndGo()) {
             /* XXX this will need revising if 'const' becomes block-scoped. */
-            StmtInfo *stmt = LexicalLookup(cg, atom, NULL);
+            StmtInfo *stmt = LexicalLookup(bce, atom, NULL);
             if (stmt)
                 return JS_TRUE;
 
-            if (CodeGenerator::ConstMap::Ptr p = cg->constMap.lookup(atom)) {
+            if (BytecodeEmitter::ConstMap::Ptr p = bce->constMap.lookup(atom)) {
                 JS_ASSERT(!p->value.isMagic(JS_NO_CONSTANT));
                 *constp = p->value;
                 return JS_TRUE;
             }
 
             /*
              * Try looking in the variable object for a direct property that
              * is readonly and permanent.  We know such a property can't be
              * shadowed by another property on obj's prototype chain, or a
              * with object or catch variable; nor can prop's value be changed,
              * nor can prop be deleted.
              */
-            if (cg->inFunction()) {
-                if (cg->bindings.hasBinding(cx, atom))
+            if (bce->inFunction()) {
+                if (bce->bindings.hasBinding(cx, atom))
                     break;
             } else {
-                JS_ASSERT(cg->compileAndGo());
-                JSObject *obj = cg->scopeChain();
+                JS_ASSERT(bce->compileAndGo());
+                JSObject *obj = bce->scopeChain();
 
                 const Shape *shape = obj->nativeLookup(cx, ATOM_TO_JSID(atom));
                 if (shape) {
                     /*
                      * We're compiling code that will be executed immediately,
                      * not re-executed against a different scope chain and/or
                      * variable object.  Therefore we can get constant values
                      * from our variable object here.
@@ -1796,205 +1796,205 @@ LookupCompileTimeConstant(JSContext *cx,
                         *constp = obj->getSlot(shape->slot);
                     }
                 }
 
                 if (shape)
                     break;
             }
         }
-    } while (cg->parent && (cg = cg->parent->asCodeGenerator()));
+    } while (bce->parent && (bce = bce->parent->asBytecodeEmitter()));
     return JS_TRUE;
 }
 
 static inline bool
 FitsWithoutBigIndex(uintN index)
 {
     return index < JS_BIT(16);
 }
 
 /*
  * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
  * reset instruction is necessary, JSOP_FALSE to indicate an error or either
  * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
  * after the main bytecode sequence.
  */
 static JSOp
-EmitBigIndexPrefix(JSContext *cx, CodeGenerator *cg, uintN index)
+EmitBigIndexPrefix(JSContext *cx, BytecodeEmitter *bce, uintN index)
 {
     uintN indexBase;
 
     /*
      * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
      * for big indexes.
      */
     JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
     JS_STATIC_ASSERT(INDEX_LIMIT >=
                      (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
 
     if (FitsWithoutBigIndex(index))
         return JSOP_NOP;
     indexBase = index >> 16;
     if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
-        if (Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
+        if (Emit1(cx, bce, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
             return JSOP_FALSE;
         return JSOP_RESETBASE0;
     }
 
     if (index >= INDEX_LIMIT) {
         JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
                              JSMSG_TOO_MANY_LITERALS);
         return JSOP_FALSE;
     }
 
-    if (Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
+    if (Emit2(cx, bce, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
         return JSOP_FALSE;
     return JSOP_RESETBASE;
 }
 
 /*
  * Emit a bytecode and its 2-byte constant index immediate operand. If the
  * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
  * operand effectively extends the 16-bit immediate of the prefixed opcode,
  * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
  * with single-byte JSOP_INDEXBASE[123] codes.
  *
  * Such prefixing currently requires a suffix to restore the "zero segment"
  * register setting, but this could be optimized further.
  */
 static bool
-EmitIndexOp(JSContext *cx, JSOp op, uintN index, CodeGenerator *cg, JSOp *psuffix)
+EmitIndexOp(JSContext *cx, JSOp op, uintN index, BytecodeEmitter *bce, JSOp *psuffix)
 {
     JSOp bigSuffix;
 
-    bigSuffix = EmitBigIndexPrefix(cx, cg, index);
+    bigSuffix = EmitBigIndexPrefix(cx, bce, index);
     if (bigSuffix == JSOP_FALSE)
         return false;
     EMIT_UINT16_IMM_OP(op, index);
 
     /*
      * For decomposed ops, the suffix needs to go after the decomposed version.
      * This means the suffix will run in the interpreter in both the base
      * and decomposed paths, which works as suffix ops are idempotent.
      */
     JS_ASSERT(!!(js_CodeSpec[op].format & JOF_DECOMPOSE) == (psuffix != NULL));
     if (psuffix) {
         *psuffix = bigSuffix;
         return true;
     }
 
-    return bigSuffix == JSOP_NOP || Emit1(cx, cg, bigSuffix) >= 0;
+    return bigSuffix == JSOP_NOP || Emit1(cx, bce, bigSuffix) >= 0;
 }
 
 /*
- * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
+ * Slight sugar for EmitIndexOp, again accessing cx and bce from the macro
  * caller's lexical environment, and embedding a false return on error.
  */
 #define EMIT_INDEX_OP(op, index)                                              \
     JS_BEGIN_MACRO                                                            \
-        if (!EmitIndexOp(cx, op, index, cg))                                  \
+        if (!EmitIndexOp(cx, op, index, bce))                                 \
             return JS_FALSE;                                                  \
     JS_END_MACRO
 
 static bool
-EmitAtomOp(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg, JSOp *psuffix = NULL)
+EmitAtomOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce, JSOp *psuffix = NULL)
 {
     JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
 
     if (op == JSOP_GETPROP &&
         pn->pn_atom == cx->runtime->atomState.lengthAtom) {
         /* Specialize length accesses for the interpreter. */
         op = JSOP_LENGTH;
     }
 
     jsatomid index;
-    if (!cg->makeAtomIndex(pn->pn_atom, &index))
+    if (!bce->makeAtomIndex(pn->pn_atom, &index))
         return false;
 
-    return EmitIndexOp(cx, op, index, cg, psuffix);
+    return EmitIndexOp(cx, op, index, bce, psuffix);
 }
 
 static JSBool
-EmitObjectOp(JSContext *cx, ObjectBox *objbox, JSOp op, CodeGenerator *cg)
+EmitObjectOp(JSContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce)
 {
     JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
-    return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
+    return EmitIndexOp(cx, op, bce->objectList.index(objbox), bce);
 }
 
 /*
  * What good are ARGNO_LEN and SLOTNO_LEN, you ask?  The answer is that, apart
  * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
  * other parts of the code there's no necessary relationship between the two.
  * The abstraction cracks here in order to share EmitSlotIndexOp code among
  * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
  */
 JS_STATIC_ASSERT(ARGNO_LEN == 2);
 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
 
 static JSBool
-EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index, CodeGenerator *cg)
+EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index, BytecodeEmitter *bce)
 {
     JSOp bigSuffix;
     ptrdiff_t off;
     jsbytecode *pc;
 
     JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
               JOF_OPTYPE(op) == JOF_SLOTOBJECT);
-    bigSuffix = EmitBigIndexPrefix(cx, cg, index);
+    bigSuffix = EmitBigIndexPrefix(cx, bce, index);
     if (bigSuffix == JSOP_FALSE)
         return JS_FALSE;
 
     /* Emit [op, slot, index]. */
-    off = EmitN(cx, cg, op, 2 + INDEX_LEN);
+    off = EmitN(cx, bce, op, 2 + INDEX_LEN);
     if (off < 0)
         return JS_FALSE;
-    pc = CG_CODE(cg, off);
+    pc = CG_CODE(bce, off);
     SET_UINT16(pc, slot);
     pc += 2;
     SET_INDEX(pc, index);
-    return bigSuffix == JSOP_NOP || Emit1(cx, cg, bigSuffix) >= 0;
+    return bigSuffix == JSOP_NOP || Emit1(cx, bce, bigSuffix) >= 0;
 }
 
 bool
-CodeGenerator::shouldNoteClosedName(ParseNode *pn)
+BytecodeEmitter::shouldNoteClosedName(ParseNode *pn)
 {
     return !callsEval() && pn->isDefn() && pn->isClosed();
 }
 
 /*
  * Adjust the slot for a block local to account for the number of variables
  * that share the same index space with locals. Due to the incremental code
  * generation for top-level script, we do the adjustment via code patching in
  * BytecodeCompiler::compileScript; see comments there.
  *
  * The function returns -1 on failures.
  */
 static jsint
-AdjustBlockSlot(JSContext *cx, CodeGenerator *cg, jsint slot)
-{
-    JS_ASSERT((jsuint) slot < cg->maxStackDepth);
-    if (cg->inFunction()) {
-        slot += cg->bindings.countVars();
+AdjustBlockSlot(JSContext *cx, BytecodeEmitter *bce, jsint slot)
+{
+    JS_ASSERT((jsuint) slot < bce->maxStackDepth);
+    if (bce->inFunction()) {
+        slot += bce->bindings.countVars();
         if ((uintN) slot >= SLOTNO_LIMIT) {
-            ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
+            ReportCompileErrorNumber(cx, CG_TS(bce), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
             slot = -1;
         }
     }
     return slot;
 }
 
 static bool
-EmitEnterBlock(JSContext *cx, ParseNode *pn, CodeGenerator *cg)
+EmitEnterBlock(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
 {
     JS_ASSERT(pn->isKind(TOK_LEXICALSCOPE));
-    if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
+    if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, bce))
         return false;
 
     JSObject *blockObj = pn->pn_objbox->object;
-    jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
+    jsint depth = AdjustBlockSlot(cx, bce, OBJ_BLOCK_DEPTH(cx, blockObj));
     if (depth < 0)
         return false;
 
     uintN base = JSSLOT_FREE(&BlockClass);
     for (uintN slot = base, limit = base + OBJ_BLOCK_COUNT(cx, blockObj); slot < limit; slot++) {
         const Value &v = blockObj->getSlot(slot);
 
         /* Beware the empty destructuring dummy. */
@@ -2015,44 +2015,44 @@ EmitEnterBlock(JSContext *cx, ParseNode 
         }
 #endif
 
         /*
          * If this variable is closed over, and |eval| is not present, then
          * then set a bit in dslots so the Method JIT can deoptimize this
          * slot.
          */
-        bool isClosed = cg->shouldNoteClosedName(dn);
+        bool isClosed = bce->shouldNoteClosedName(dn);
         blockObj->setSlot(slot, BooleanValue(isClosed));
     }
 
     /*
      * If clones of this block will have any extensible parents, then the
      * clones must get unique shapes; see the comments for
      * js::Bindings::extensibleParents.
      */
-    if ((cg->flags & TCF_FUN_EXTENSIBLE_SCOPE) ||
-        cg->bindings.extensibleParents())
+    if ((bce->flags & TCF_FUN_EXTENSIBLE_SCOPE) ||
+        bce->bindings.extensibleParents())
         blockObj->setBlockOwnShape(cx);
 
     return true;
 }
 
 static JSBool
-EmitLeaveBlock(JSContext *cx, CodeGenerator *cg, JSOp op, ObjectBox *box)
+EmitLeaveBlock(JSContext *cx, BytecodeEmitter *bce, JSOp op, ObjectBox *box)
 {
     JSOp bigSuffix;
     uintN count = OBJ_BLOCK_COUNT(cx, box->object);
-
-    bigSuffix = EmitBigIndexPrefix(cx, cg, box->index);
+    
+    bigSuffix = EmitBigIndexPrefix(cx, bce, box->index);
     if (bigSuffix == JSOP_FALSE)
         return JS_FALSE;
-    if (Emit5(cx, cg, op, count, box->index) < 0)
+    if (Emit5(cx, bce, op, count, box->index) < 0)
         return JS_FALSE;
-    return bigSuffix == JSOP_NOP || Emit1(cx, cg, bigSuffix) >= 0;
+    return bigSuffix == JSOP_NOP || Emit1(cx, bce, bigSuffix) >= 0;
 }
 
 /*
  * Try to convert a *NAME op to a *GNAME op, which optimizes access to
  * undeclared globals. Return true if a conversion was made.
  *
  * This conversion is not made if we are in strict mode.  In eval code nested
  * within (strict mode) eval code, access to an undeclared "global" might
@@ -2067,24 +2067,23 @@ EmitLeaveBlock(JSContext *cx, CodeGenera
  *   "use strict";
  *   function foo()
  *   {
  *     undeclared = 17; // throws ReferenceError
  *   }
  *   foo();
  */
 static bool
-TryConvertToGname(CodeGenerator *cg, ParseNode *pn, JSOp *op)
-{
-    if (cg->compileAndGo() &&
-        cg->compiler()->globalScope->globalObj &&
-        !cg->mightAliasLocals() &&
+TryConvertToGname(BytecodeEmitter *bce, ParseNode *pn, JSOp *op)
+{
+    if (bce->compileAndGo() && 
+        bce->compiler()->globalScope->globalObj &&
+        !bce->mightAliasLocals() &&
         !pn->isDeoptimized() &&
-        !(cg->flags & TCF_STRICT_MODE_CODE))
-    {
+        !(bce->flags & TCF_STRICT_MODE_CODE)) { 
         switch (*op) {
           case JSOP_NAME:     *op = JSOP_GETGNAME; break;
           case JSOP_SETNAME:  *op = JSOP_SETGNAME; break;
           case JSOP_INCNAME:  *op = JSOP_INCGNAME; break;
           case JSOP_NAMEINC:  *op = JSOP_GNAMEINC; break;
           case JSOP_DECNAME:  *op = JSOP_DECGNAME; break;
           case JSOP_NAMEDEC:  *op = JSOP_GNAMEDEC; break;
           case JSOP_SETCONST:
@@ -2097,99 +2096,99 @@ TryConvertToGname(CodeGenerator *cg, Par
     }
     return false;
 }
 
 // Binds a global, given a |dn| that is known to have the PND_GVAR bit, and a pn
 // that is |dn| or whose definition is |dn|. |pn->pn_cookie| is an outparam
 // that will be free (meaning no binding), or a slot number.
 static bool
-BindKnownGlobal(JSContext *cx, CodeGenerator *cg, ParseNode *dn, ParseNode *pn, JSAtom *atom)
+BindKnownGlobal(JSContext *cx, BytecodeEmitter *bce, ParseNode *dn, ParseNode *pn, JSAtom *atom)
 {
     // Cookie is an outparam; make sure caller knew to clear it.
     JS_ASSERT(pn->pn_cookie.isFree());
 
-    if (cg->mightAliasLocals())
+    if (bce->mightAliasLocals())
         return true;
 
-    GlobalScope *globalScope = cg->compiler()->globalScope;
+    GlobalScope *globalScope = bce->compiler()->globalScope;
 
     jsatomid index;
     if (dn->pn_cookie.isFree()) {
         // The definition wasn't bound, so find its atom's index in the
         // mapping of defined globals.
         AtomIndexPtr p = globalScope->names.lookup(atom);
         JS_ASSERT(!!p);
         index = p.value();
     } else {
-        CodeGenerator *globalcg = globalScope->cg;
-
-        // If the definition is bound, and we're in the same cg, we can re-use
+        BytecodeEmitter *globalbce = globalScope->bce;
+
+        // If the definition is bound, and we're in the same bce, we can re-use
         // its cookie.
-        if (globalcg == cg) {
+        if (globalbce == bce) {
             pn->pn_cookie = dn->pn_cookie;
             pn->pn_dflags |= PND_BOUND;
             return true;
         }
 
-        // Otherwise, find the atom's index by using the originating cg's
+        // Otherwise, find the atom's index by using the originating bce's
         // global use table.
-        index = globalcg->globalUses[dn->pn_cookie.asInteger()].slot;
-    }
-
-    if (!cg->addGlobalUse(atom, index, &pn->pn_cookie))
+        index = globalbce->globalUses[dn->pn_cookie.asInteger()].slot;
+    }
+
+    if (!bce->addGlobalUse(atom, index, &pn->pn_cookie))
         return false;
 
     if (!pn->pn_cookie.isFree())
         pn->pn_dflags |= PND_BOUND;
 
     return true;
 }
 
 // See BindKnownGlobal()'s comment.
 static bool
-BindGlobal(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSAtom *atom)
+BindGlobal(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSAtom *atom)
 {
     pn->pn_cookie.makeFree();
 
     Definition *dn;
     if (pn->isUsed()) {
         dn = pn->pn_lexdef;
     } else {
         if (!pn->isDefn())
             return true;
         dn = (Definition *)pn;
     }
 
     // Only optimize for defined globals.
     if (!dn->isGlobal())
         return true;
 
-    return BindKnownGlobal(cx, cg, dn, pn, atom);
+    return BindKnownGlobal(cx, bce, dn, pn, atom);
 }
 
 /*
  * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
- * and stores, given the compile-time information in cg and a TOK_NAME node pn.
+ * and stores, given the compile-time information in bce and a TOK_NAME node pn.
  * It returns false on error, true on success.
  *
  * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
  * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
  * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
  * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE.  Whether or not
  * pn->pn_op was modified, if this function finds an argument or local variable
  * name, PND_CONST will be set in pn_dflags for read-only properties after a
  * successful return.
  *
  * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
  * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
  * in EmitTree.
  */
 static JSBool
-BindNameToSlot(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+BindNameToSlot(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     Definition *dn;
     JSOp op;
     JSAtom *atom;
     Definition::Kind dn_kind;
 
     JS_ASSERT(pn->isKind(TOK_NAME));
 
@@ -2236,30 +2235,30 @@ BindNameToSlot(JSContext *cx, CodeGenera
      * declaration originates at top level in eval code.
      */
     switch (op) {
       case JSOP_NAME:
       case JSOP_SETCONST:
         break;
       case JSOP_DELNAME:
         if (dn_kind != Definition::UNKNOWN) {
-            if (cg->parser->callerFrame && dn->isTopLevel())
-                JS_ASSERT(cg->compileAndGo());
+            if (bce->parser->callerFrame && dn->isTopLevel())
+                JS_ASSERT(bce->compileAndGo());
             else
                 pn->setOp(JSOP_FALSE);
             pn->pn_dflags |= PND_BOUND;
             return JS_TRUE;
         }
         break;
       default:
         if (pn->isConst()) {
-            if (cg->needStrictChecks()) {
+            if (bce->needStrictChecks()) {
                 JSAutoByteString name;
                 if (!js_AtomToPrintableString(cx, atom, &name) ||
-                    !ReportStrictModeError(cx, CG_TS(cg), cg, pn, JSMSG_READ_ONLY, name.ptr())) {
+                    !ReportStrictModeError(cx, CG_TS(bce), bce, pn, JSMSG_READ_ONLY, name.ptr())) {
                     return JS_FALSE;
                 }
             }
             pn->setOp(op = JSOP_NAME);
         }
     }
 
     if (dn->isGlobal()) {
@@ -2281,106 +2280,106 @@ BindNameToSlot(JSContext *cx, CodeGenera
          * |dn|. For example, we could have a SETGNAME op's lexdef be a
          * GETGNAME op, and their cookies have very different meanings. As
          * a workaround, just make the cookie free.
          */
         cookie.makeFree();
     }
 
     if (cookie.isFree()) {
-        StackFrame *caller = cg->parser->callerFrame;
+        StackFrame *caller = bce->parser->callerFrame;
         if (caller) {
-            JS_ASSERT(cg->compileAndGo());
+            JS_ASSERT(bce->compileAndGo());
 
             /*
              * Don't generate upvars on the left side of a for loop. See
              * bug 470758.
              */
-            if (cg->flags & TCF_IN_FOR_INIT)
+            if (bce->flags & TCF_IN_FOR_INIT)
                 return JS_TRUE;
 
             JS_ASSERT(caller->isScriptFrame());
 
             /*
              * If this is an eval in the global scope, then unbound variables
              * must be globals, so try to use GNAME ops.
              */
-            if (caller->isGlobalFrame() && TryConvertToGname(cg, pn, &op)) {
+            if (caller->isGlobalFrame() && TryConvertToGname(bce, pn, &op)) {
                 jsatomid _;
-                if (!cg->makeAtomIndex(atom, &_))
+                if (!bce->makeAtomIndex(atom, &_))
                     return JS_FALSE;
 
                 pn->setOp(op);
                 pn->pn_dflags |= PND_BOUND;
                 return JS_TRUE;
             }
 
             /*
              * Out of tricks, so we must rely on PICs to optimize named
              * accesses from direct eval called from function code.
              */
             return JS_TRUE;
         }
 
         /* Optimize accesses to undeclared globals. */
-        if (!cg->mightAliasLocals() && !TryConvertToGname(cg, pn, &op))
+        if (!bce->mightAliasLocals() && !TryConvertToGname(bce, pn, &op))
             return JS_TRUE;
 
         jsatomid _;
-        if (!cg->makeAtomIndex(atom, &_))
+        if (!bce->makeAtomIndex(atom, &_))
             return JS_FALSE;
 
         pn->setOp(op);
         pn->pn_dflags |= PND_BOUND;
 
         return JS_TRUE;
     }
 
     uint16 level = cookie.level();
-    JS_ASSERT(cg->staticLevel >= level);
-
-    const uintN skip = cg->staticLevel - level;
+    JS_ASSERT(bce->staticLevel >= level);
+
+    const uintN skip = bce->staticLevel - level;
     if (skip != 0) {
-        JS_ASSERT(cg->inFunction());
-        JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->roLexdeps->lookup(atom));
+        JS_ASSERT(bce->inFunction());
+        JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, bce->roLexdeps->lookup(atom));
         JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
-        JS_ASSERT(cg->fun()->u.i.skipmin <= skip);
+        JS_ASSERT(bce->fun()->u.i.skipmin <= skip);
 
         /*
          * If op is a mutating opcode, this upvar's lookup skips too many levels,
          * or the function is heavyweight, we fall back on JSOP_*NAME*.
          */
         if (op != JSOP_NAME)
             return JS_TRUE;
         if (skip >= UpvarCookie::UPVAR_LEVEL_LIMIT)
             return JS_TRUE;
-        if (cg->flags & TCF_FUN_HEAVYWEIGHT)
+        if (bce->flags & TCF_FUN_HEAVYWEIGHT)
             return JS_TRUE;
 
-        if (!cg->fun()->isFlatClosure())
+        if (!bce->fun()->isFlatClosure())
             return JS_TRUE;
 
-        if (!cg->upvarIndices.ensureMap(cx))
+        if (!bce->upvarIndices.ensureMap(cx))
             return JS_FALSE;
 
-        AtomIndexAddPtr p = cg->upvarIndices->lookupForAdd(atom);
+        AtomIndexAddPtr p = bce->upvarIndices->lookupForAdd(atom);
         jsatomid index;
         if (p) {
             index = p.value();
         } else {
-            if (!cg->bindings.addUpvar(cx, atom))
+            if (!bce->bindings.addUpvar(cx, atom))
                 return JS_FALSE;
 
-            index = cg->upvarIndices->count();
-            if (!cg->upvarIndices->add(p, atom, index))
+            index = bce->upvarIndices->count();
+            if (!bce->upvarIndices->add(p, atom, index))
                 return JS_FALSE;
 
-            UpvarCookies &upvarMap = cg->upvarMap;
+            UpvarCookies &upvarMap = bce->upvarMap;
             /* upvarMap should have the same number of UpvarCookies as there are lexdeps. */
-            size_t lexdepCount = cg->roLexdeps->count();
+            size_t lexdepCount = bce->roLexdeps->count();
 
             JS_ASSERT_IF(!upvarMap.empty(), lexdepCount == upvarMap.length());
             if (upvarMap.empty()) {
                 /* Lazily initialize the upvar map with exactly the necessary capacity. */
                 if (lexdepCount <= upvarMap.sMaxInlineStorage) {
                     JS_ALWAYS_TRUE(upvarMap.growByUninitialized(lexdepCount));
                 } else {
                     void *buf = upvarMap.allocPolicy().malloc_(lexdepCount * sizeof(UpvarCookie));
@@ -2389,17 +2388,17 @@ BindNameToSlot(JSContext *cx, CodeGenera
                     upvarMap.replaceRawBuffer(static_cast<UpvarCookie *>(buf), lexdepCount);
                 }
                 for (size_t i = 0; i < lexdepCount; ++i)
                     upvarMap[i] = UpvarCookie();
             }
 
             uintN slot = cookie.slot();
             if (slot != UpvarCookie::CALLEE_SLOT && dn_kind != Definition::ARG) {
-                TreeContext *tc = cg;
+                TreeContext *tc = bce;
                 do {
                     tc = tc->parent;
                 } while (tc->staticLevel != level);
                 if (tc->inFunction())
                     slot += tc->fun()->nargs;
             }
 
             JS_ASSERT(index < upvarMap.length());
@@ -2445,20 +2444,20 @@ BindNameToSlot(JSContext *cx, CodeGenera
           default: JS_NOT_REACHED("arg");
         }
         JS_ASSERT(!pn->isConst());
         break;
 
       case Definition::VAR:
         if (dn->isOp(JSOP_CALLEE)) {
             JS_ASSERT(op != JSOP_CALLEE);
-            JS_ASSERT((cg->fun()->flags & JSFUN_LAMBDA) && atom == cg->fun()->atom);
+            JS_ASSERT((bce->fun()->flags & JSFUN_LAMBDA) && atom == bce->fun()->atom);
 
             /*
-             * Leave pn->isOp(JSOP_NAME) if cg->fun is heavyweight to
+             * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to
              * address two cases: a new binding introduced by eval, and
              * assignment to the name in strict mode.
              *
              *   var fun = (function f(s) { eval(s); return f; });
              *   assertEq(fun("var f = 42"), 42);
              *
              * ECMAScript specifies that a function expression's name is bound
              * in a lexical environment distinct from that used to bind its
@@ -2472,17 +2471,17 @@ BindNameToSlot(JSContext *cx, CodeGenera
              * Outside strict mode, assignment to a function expression's name
              * has no effect.  But in strict mode, this attempt to mutate an
              * immutable binding must throw a TypeError.  We implement this by
              * not optimizing such assignments and by marking such functions as
              * heavyweight, ensuring that the function name is represented in
              * the scope chain so that assignment will throw a TypeError.
              */
             JS_ASSERT(op != JSOP_DELNAME);
-            if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
+            if (!(bce->flags & TCF_FUN_HEAVYWEIGHT)) {
                 op = JSOP_CALLEE;
                 pn->pn_dflags |= PND_CONST;
             }
 
             pn->setOp(op);
             pn->pn_dflags |= PND_BOUND;
             return JS_TRUE;
         }
@@ -2509,17 +2508,17 @@ BindNameToSlot(JSContext *cx, CodeGenera
     JS_ASSERT(!pn->isOp(op));
     pn->setOp(op);
     pn->pn_cookie.set(0, cookie.slot());
     pn->pn_dflags |= PND_BOUND;
     return JS_TRUE;
 }
 
 bool
-CodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
+BytecodeEmitter::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
 {
     if (!globalMap.ensureMap(context()))
         return false;
 
     AtomIndexAddPtr p = globalMap->lookupForAdd(atom);
     if (p) {
         jsatomid index = p.value();
         cookie->set(0, index);
@@ -2555,17 +2554,17 @@ CodeGenerator::addGlobalUse(JSAtom *atom
  * The caller should initialize *answer to false and invoke this function on
  * an expression statement or similar subtree to decide whether the tree could
  * produce code that has any side effects.  For an expression statement, we
  * define useless code as code with no side effects, because the main effect,
  * the value left on the stack after the code executes, will be discarded by a
  * pop bytecode.
  */
 static JSBool
-CheckSideEffects(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSBool *answer)
+CheckSideEffects(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSBool *answer)
 {
     JSBool ok;
     ParseNode *pn2;
 
     ok = JS_TRUE;
     if (!pn || *answer)
         return ok;
 
@@ -2584,17 +2583,17 @@ CheckSideEffects(JSContext *cx, CodeGene
       case PN_LIST:
         if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) ||
             pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) {
             /*
              * Non-operators along with ||, &&, ===, and !== never invoke
              * toString or valueOf.
              */
             for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
-                ok &= CheckSideEffects(cx, cg, pn2, answer);
+                ok &= CheckSideEffects(cx, bce, pn2, answer);
         } else {
             /*
              * All invocation operations (construct: TOK_NEW, call: TOK_LP)
              * are presumed to be useful, because they may have side effects
              * even if their main effect (their return value) is discarded.
              *
              * TOK_LB binary trees of 3 or more nodes are flattened into lists
              * to avoid too much recursion.  All such lists must be presumed
@@ -2607,19 +2606,19 @@ CheckSideEffects(JSContext *cx, CodeGene
              * on Array.prototype create this hazard). Initialiser list nodes
              * have JSOP_NEWINIT in their pn_op.
              */
             *answer = JS_TRUE;
         }
         break;
 
       case PN_TERNARY:
-        ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
-             CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
-             CheckSideEffects(cx, cg, pn->pn_kid3, answer);
+        ok = CheckSideEffects(cx, bce, pn->pn_kid1, answer) &&
+             CheckSideEffects(cx, bce, pn->pn_kid2, answer) &&
+             CheckSideEffects(cx, bce, pn->pn_kid3, answer);
         break;
 
       case PN_BINARY:
         if (pn->isKind(TOK_ASSIGN)) {
             /*
              * Assignment is presumed to be useful, even if the next operation
              * is another assignment overwriting this one's ostensible effect,
              * because the left operand may be a property with a setter that
@@ -2627,77 +2626,77 @@ CheckSideEffects(JSContext *cx, CodeGene
              *
              * The only exception is assignment of a useless value to a const
              * declared in the function currently being compiled.
              */
             pn2 = pn->pn_left;
             if (!pn2->isKind(TOK_NAME)) {
                 *answer = JS_TRUE;
             } else {
-                if (!BindNameToSlot(cx, cg, pn2))
+                if (!BindNameToSlot(cx, bce, pn2))
                     return JS_FALSE;
-                if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
+                if (!CheckSideEffects(cx, bce, pn->pn_right, answer))
                     return JS_FALSE;
                 if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst()))
                     *answer = JS_TRUE;
             }
         } else {
             if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) ||
                 pn->isOp(JSOP_STRICTNE)) {
                 /*
                  * ||, &&, ===, and !== do not convert their operands via
                  * toString or valueOf method calls.
                  */
-                ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
-                     CheckSideEffects(cx, cg, pn->pn_right, answer);
+                ok = CheckSideEffects(cx, bce, pn->pn_left, answer) &&
+                     CheckSideEffects(cx, bce, pn->pn_right, answer);
             } else {
                 /*
                  * We can't easily prove that neither operand ever denotes an
                  * object with a toString or valueOf method.
                  */
                 *answer = JS_TRUE;
             }
         }
         break;
 
       case PN_UNARY:
         switch (pn->getKind()) {
           case TOK_DELETE:
             pn2 = pn->pn_kid;
             switch (pn2->getKind()) {
               case TOK_NAME:
-                if (!BindNameToSlot(cx, cg, pn2))
+                if (!BindNameToSlot(cx, bce, pn2))
                     return JS_FALSE;
                 if (pn2->isConst()) {
                     *answer = JS_FALSE;
                     break;
                 }
                 /* FALL THROUGH */
               case TOK_DOT:
 #if JS_HAS_XML_SUPPORT
               case TOK_DBLDOT:
-                JS_ASSERT_IF(pn2->getKind() == TOK_DBLDOT, !cg->inStrictMode());
+                JS_ASSERT_IF(pn2->getKind() == TOK_DBLDOT, !bce->inStrictMode());
                 /* FALL THROUGH */
 
 #endif
               case TOK_LP:
               case TOK_LB:
                 /* All these delete addressing modes have effects too. */
                 *answer = JS_TRUE;
                 break;
               default:
-                ok = CheckSideEffects(cx, cg, pn2, answer);
+                ok = CheckSideEffects(cx, bce, pn2, answer);
                 break;
             }
             break;
 
           case TOK_UNARYOP:
             if (pn->isOp(JSOP_NOT)) {
                 /* ! does not convert its operand via toString or valueOf. */
-                ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
+                ok = CheckSideEffects(cx, bce, pn->pn_kid, answer);
                 break;
             }
             /* FALL THROUGH */
 
           default:
             /*
              * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
              * have direct effects. Of the remaining unary-arity node types,
@@ -2711,62 +2710,62 @@ CheckSideEffects(JSContext *cx, CodeGene
 
       case PN_NAME:
         /*
          * Take care to avoid trying to bind a label name (labels, both for
          * statements and property values in object initialisers, have pn_op
          * defaulted to JSOP_NOP).
          */
         if (pn->isKind(TOK_NAME) && !pn->isOp(JSOP_NOP)) {
-            if (!BindNameToSlot(cx, cg, pn))
+            if (!BindNameToSlot(cx, bce, pn))
                 return JS_FALSE;
             if (!pn->isOp(JSOP_ARGUMENTS) && !pn->isOp(JSOP_CALLEE) &&
                 pn->pn_cookie.isFree()) {
                 /*
                  * Not an argument or local variable use, and not a use of a
                  * unshadowed named function expression's given name, so this
                  * expression could invoke a getter that has side effects.
                  */
                 *answer = JS_TRUE;
             }
         }
         pn2 = pn->maybeExpr();
         if (pn->isKind(TOK_DOT)) {
-            if (pn2->isKind(TOK_NAME) && !BindNameToSlot(cx, cg, pn2))
+            if (pn2->isKind(TOK_NAME) && !BindNameToSlot(cx, bce, pn2))
                 return JS_FALSE;
             if (!(pn2->isOp(JSOP_ARGUMENTS) &&
                   pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
                 /*
                  * Any dotted property reference could call a getter, except
                  * for arguments.length where arguments is unambiguous.
                  */
                 *answer = JS_TRUE;
             }
         }
-        ok = CheckSideEffects(cx, cg, pn2, answer);
+        ok = CheckSideEffects(cx, bce, pn2, answer);
         break;
 
       case PN_NAMESET:
-        ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
+        ok = CheckSideEffects(cx, bce, pn->pn_tree, answer);
         break;
 
       case PN_NULLARY:
         if (pn->isKind(TOK_DEBUGGER))
             *answer = JS_TRUE;
         break;
     }
     return ok;
 }
 
 static JSBool
-EmitNameOp(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSBool callContext)
+EmitNameOp(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSBool callContext)
 {
     JSOp op;
 
-    if (!BindNameToSlot(cx, cg, pn))
+    if (!BindNameToSlot(cx, bce, pn))
         return JS_FALSE;
     op = pn->getOp();
 
     if (callContext) {
         switch (op) {
           case JSOP_NAME:
             op = JSOP_CALLNAME;
             break;
@@ -2784,319 +2783,319 @@ EmitNameOp(JSContext *cx, CodeGenerator 
             break;
           default:
             JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
             break;
         }
     }
 
     if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
-        if (Emit1(cx, cg, op) < 0)
+        if (Emit1(cx, bce, op) < 0)
             return JS_FALSE;
-        if (callContext && Emit1(cx, cg, JSOP_PUSH) < 0)
+        if (callContext && Emit1(cx, bce, JSOP_PUSH) < 0)
             return JS_FALSE;
     } else {
         if (!pn->pn_cookie.isFree()) {
             EMIT_UINT16_IMM_OP(op, pn->pn_cookie.asInteger());
         } else {
-            if (!EmitAtomOp(cx, pn, op, cg))
+            if (!EmitAtomOp(cx, pn, op, bce))
                 return JS_FALSE;
         }
     }
 
     return JS_TRUE;
 }
 
 #if JS_HAS_XML_SUPPORT
 static bool
-EmitXMLName(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
-{
-    JS_ASSERT(!cg->inStrictMode());
+EmitXMLName(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
+{
+    JS_ASSERT(!bce->inStrictMode());
     JS_ASSERT(pn->isKind(TOK_UNARYOP));
     JS_ASSERT(pn->isOp(JSOP_XMLNAME));
     JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
 
     ParseNode *pn2 = pn->pn_kid;
-    uintN oldflags = cg->flags;
-    cg->flags &= ~TCF_IN_FOR_INIT;
-    if (!EmitTree(cx, cg, pn2))
+    uintN oldflags = bce->flags;
+    bce->flags &= ~TCF_IN_FOR_INIT;
+    if (!EmitTree(cx, bce, pn2))
         return false;
-    cg->flags |= oldflags & TCF_IN_FOR_INIT;
-    if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - pn2->pn_offset) < 0)
+    bce->flags |= oldflags & TCF_IN_FOR_INIT;
+    if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - pn2->pn_offset) < 0)
         return false;
 
-    return Emit1(cx, cg, op) >= 0;
+    return Emit1(cx, bce, op) >= 0;
 }
 #endif
 
 static inline bool
-EmitElemOpBase(JSContext *cx, CodeGenerator *cg, JSOp op)
-{
-    if (Emit1(cx, cg, op) < 0)
+EmitElemOpBase(JSContext *cx, BytecodeEmitter *bce, JSOp op)
+{
+    if (Emit1(cx, bce, op) < 0)
         return false;
-    CheckTypeSet(cx, cg, op);
+    CheckTypeSet(cx, bce, op);
     return true;
 }
 
 static bool
-EmitSpecialPropOp(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
+EmitSpecialPropOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     /*
      * Special case for obj.__proto__ to deoptimize away from fast paths in the
      * interpreter and trace recorder, which skip dense array instances by
      * going up to Array.prototype before looking up the property name.
      */
     jsatomid index;
-    if (!cg->makeAtomIndex(pn->pn_atom, &index))
+    if (!bce->makeAtomIndex(pn->pn_atom, &index))
         return false;
-    if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
+    if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, bce))
         return false;
-    return EmitElemOpBase(cx, cg, op);
+    return EmitElemOpBase(cx, bce, op);
 }
 
 static bool
-EmitPropOp(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg,
+EmitPropOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce,
            JSBool callContext, JSOp *psuffix = NULL)
 {
     ParseNode *pn2, *pndot, *pnup, *pndown;
     ptrdiff_t top;
 
     JS_ASSERT(pn->isArity(PN_NAME));
     pn2 = pn->maybeExpr();
 
     /* Special case deoptimization for __proto__. */
     if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
         pn->pn_atom == cx->runtime->atomState.protoAtom) {
-        if (pn2 && !EmitTree(cx, cg, pn2))
+        if (pn2 && !EmitTree(cx, bce, pn2))
             return false;
-        return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
+        return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, bce);
     }
 
     if (callContext) {
         JS_ASSERT(pn->isKind(TOK_DOT));
         JS_ASSERT(op == JSOP_GETPROP);
         op = JSOP_CALLPROP;
     } else if (op == JSOP_GETPROP && pn->isKind(TOK_DOT)) {
         if (pn2->isKind(TOK_NAME)) {
             /*
              * Try to optimize arguments.length into JSOP_ARGCNT. If type
              * inference is enabled this is optimized separately.
              */
-            if (!BindNameToSlot(cx, cg, pn2))
+            if (!BindNameToSlot(cx, bce, pn2))
                 return false;
             if (!cx->typeInferenceEnabled() &&
                 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
                 if (pn2->isOp(JSOP_ARGUMENTS))
-                    return Emit1(cx, cg, JSOP_ARGCNT) >= 0;
+                    return Emit1(cx, bce, JSOP_ARGCNT) >= 0;
             }
         }
     }
 
     /*
      * If the object operand is also a dotted property reference, reverse the
      * list linked via pn_expr temporarily so we can iterate over it from the
      * bottom up (reversing again as we go), to avoid excessive recursion.
      */
     if (pn2->isKind(TOK_DOT)) {
         pndot = pn2;
         pnup = NULL;
-        top = CG_OFFSET(cg);
+        top = CG_OFFSET(bce);
         for (;;) {
             /* Reverse pndot->pn_expr to point up, not down. */
             pndot->pn_offset = top;
             JS_ASSERT(!pndot->isUsed());
             pndown = pndot->pn_expr;
             pndot->pn_expr = pnup;
             if (!pndown->isKind(TOK_DOT))
                 break;
             pnup = pndot;
             pndot = pndown;
         }
 
         /* pndown is a primary expression, not a dotted property reference. */
-        if (!EmitTree(cx, cg, pndown))
+        if (!EmitTree(cx, bce, pndown))
             return false;
 
         do {
             /* Walk back up the list, emitting annotated name ops. */
-            if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - pndown->pn_offset) < 0)
+            if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - pndown->pn_offset) < 0)
                 return false;
 
             /* Special case deoptimization on __proto__, as above. */
             if (pndot->isArity(PN_NAME) && pndot->pn_atom == cx->runtime->atomState.protoAtom) {
-                if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
+                if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, bce))
                     return false;
-            } else if (!EmitAtomOp(cx, pndot, pndot->getOp(), cg)) {
+            } else if (!EmitAtomOp(cx, pndot, pndot->getOp(), bce)) {
                 return false;
             }
 
             /* Reverse the pn_expr link again. */
             pnup = pndot->pn_expr;
             pndot->pn_expr = pndown;
             pndown = pndot;
         } while ((pndot = pnup) != NULL);
     } else {
-        if (!EmitTree(cx, cg, pn2))
+        if (!EmitTree(cx, bce, pn2))
             return false;
     }
 
-    if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - pn2->pn_offset) < 0)
+    if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - pn2->pn_offset) < 0)
         return false;
 
-    return EmitAtomOp(cx, pn, op, cg, psuffix);
+    return EmitAtomOp(cx, pn, op, bce, psuffix);
 }
 
 static bool
-EmitPropIncDec(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
+EmitPropIncDec(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     JSOp suffix = JSOP_NOP;
-    if (!EmitPropOp(cx, pn, op, cg, false, &suffix))
+    if (!EmitPropOp(cx, pn, op, bce, false, &suffix))
         return false;
-    if (Emit1(cx, cg, JSOP_NOP) < 0)
+    if (Emit1(cx, bce, JSOP_NOP) < 0)
         return false;
 
     /*
      * The stack is the same depth before/after INCPROP, so no balancing to do
      * before the decomposed version.
      */
-    int start = CG_OFFSET(cg);
-
-    if (suffix != JSOP_NOP && Emit1(cx, cg, suffix) < 0)
+    int start = CG_OFFSET(bce);
+
+    if (suffix != JSOP_NOP && Emit1(cx, bce, suffix) < 0)
         return false;
 
     const JSCodeSpec *cs = &js_CodeSpec[op];
     JS_ASSERT(cs->format & JOF_PROP);
     JS_ASSERT(cs->format & (JOF_INC | JOF_DEC));
 
     bool post = (cs->format & JOF_POST);
     JSOp binop = (cs->format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
 
-                                                   // OBJ
-    if (Emit1(cx, cg, JSOP_DUP) < 0)               // OBJ OBJ
+                                                    // OBJ
+    if (Emit1(cx, bce, JSOP_DUP) < 0)               // OBJ OBJ
         return false;
-    if (!EmitAtomOp(cx, pn, JSOP_GETPROP, cg))     // OBJ V
+    if (!EmitAtomOp(cx, pn, JSOP_GETPROP, bce))     // OBJ V
         return false;
-    if (Emit1(cx, cg, JSOP_POS) < 0)               // OBJ N
+    if (Emit1(cx, bce, JSOP_POS) < 0)               // OBJ N
         return false;
-    if (post && Emit1(cx, cg, JSOP_DUP) < 0)       // OBJ N? N
+    if (post && Emit1(cx, bce, JSOP_DUP) < 0)       // OBJ N? N
         return false;
-    if (Emit1(cx, cg, JSOP_ONE) < 0)               // OBJ N? N 1
+    if (Emit1(cx, bce, JSOP_ONE) < 0)               // OBJ N? N 1
         return false;
-    if (Emit1(cx, cg, binop) < 0)                  // OBJ N? N+1
+    if (Emit1(cx, bce, binop) < 0)                  // OBJ N? N+1
         return false;
 
     if (post) {
-        if (Emit2(cx, cg, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
+        if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0)   // N? N+1 OBJ
             return false;
-        if (Emit1(cx, cg, JSOP_SWAP) < 0)                // N? OBJ N+1
+        if (Emit1(cx, bce, JSOP_SWAP) < 0)                  // N? OBJ N+1
             return false;
     }
 
-    if (!EmitAtomOp(cx, pn, JSOP_SETPROP, cg))     // N? N+1
+    if (!EmitAtomOp(cx, pn, JSOP_SETPROP, bce))     // N? N+1
         return false;
-    if (post && Emit1(cx, cg, JSOP_POP) < 0)       // RESULT
+    if (post && Emit1(cx, bce, JSOP_POP) < 0)       // RESULT
         return false;
 
-    UpdateDecomposeLength(cg, start);
-
-    if (suffix != JSOP_NOP && Emit1(cx, cg, suffix) < 0)
+    UpdateDecomposeLength(bce, start);
+
+    if (suffix != JSOP_NOP && Emit1(cx, bce, suffix) < 0)
         return false;
 
     return true;
 }
 
 static bool
-EmitNameIncDec(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
+EmitNameIncDec(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     JSOp suffix = JSOP_NOP;
-    if (!EmitAtomOp(cx, pn, op, cg, &suffix))
+    if (!EmitAtomOp(cx, pn, op, bce, &suffix))
         return false;
-    if (Emit1(cx, cg, JSOP_NOP) < 0)
+    if (Emit1(cx, bce, JSOP_NOP) < 0)
         return false;
 
     /* Remove the result to restore the stack depth before the INCNAME. */
-    cg->stackDepth--;
-
-    int start = CG_OFFSET(cg);
-
-    if (suffix != JSOP_NOP && Emit1(cx, cg, suffix) < 0)
+    bce->stackDepth--;
+
+    int start = CG_OFFSET(bce);
+
+    if (suffix != JSOP_NOP && Emit1(cx, bce, suffix) < 0)
         return false;
 
     const JSCodeSpec *cs = &js_CodeSpec[op];
     JS_ASSERT((cs->format & JOF_NAME) || (cs->format & JOF_GNAME));
     JS_ASSERT(cs->format & (JOF_INC | JOF_DEC));
 
     bool global = (cs->format & JOF_GNAME);
     bool post = (cs->format & JOF_POST);
     JSOp binop = (cs->format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
 
-    if (!EmitAtomOp(cx, pn, global ? JSOP_BINDGNAME : JSOP_BINDNAME, cg))  // OBJ
+    if (!EmitAtomOp(cx, pn, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce))  // OBJ
         return false;
-    if (!EmitAtomOp(cx, pn, global ? JSOP_GETGNAME : JSOP_NAME, cg))       // OBJ V
+    if (!EmitAtomOp(cx, pn, global ? JSOP_GETGNAME : JSOP_NAME, bce))       // OBJ V
         return false;
-    if (Emit1(cx, cg, JSOP_POS) < 0)               // OBJ N
+    if (Emit1(cx, bce, JSOP_POS) < 0)               // OBJ N
         return false;
-    if (post && Emit1(cx, cg, JSOP_DUP) < 0)       // OBJ N? N
+    if (post && Emit1(cx, bce, JSOP_DUP) < 0)       // OBJ N? N
         return false;
-    if (Emit1(cx, cg, JSOP_ONE) < 0)               // OBJ N? N 1
+    if (Emit1(cx, bce, JSOP_ONE) < 0)               // OBJ N? N 1
         return false;
-    if (Emit1(cx, cg, binop) < 0)                  // OBJ N? N+1
+    if (Emit1(cx, bce, binop) < 0)                  // OBJ N? N+1
         return false;
 
     if (post) {
-        if (Emit2(cx, cg, JSOP_PICK, (jsbytecode)2) < 0)    // N? N+1 OBJ
+        if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0)   // N? N+1 OBJ
             return false;
-        if (Emit1(cx, cg, JSOP_SWAP) < 0)                   // N? OBJ N+1
+        if (Emit1(cx, bce, JSOP_SWAP) < 0)                  // N? OBJ N+1
             return false;
     }
 
-    if (!EmitAtomOp(cx, pn, global ? JSOP_SETGNAME : JSOP_SETNAME, cg))     // N? N+1
+    if (!EmitAtomOp(cx, pn, global ? JSOP_SETGNAME : JSOP_SETNAME, bce))    // N? N+1
         return false;
-    if (post && Emit1(cx, cg, JSOP_POP) < 0)       // RESULT
+    if (post && Emit1(cx, bce, JSOP_POP) < 0)       // RESULT
         return false;
 
-    UpdateDecomposeLength(cg, start);
-
-    if (suffix != JSOP_NOP && Emit1(cx, cg, suffix) < 0)
+    UpdateDecomposeLength(bce, start);
+
+    if (suffix != JSOP_NOP && Emit1(cx, bce, suffix) < 0)
         return false;
 
     return true;
 }
 
 static JSBool
-EmitElemOp(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
+EmitElemOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     ptrdiff_t top;
     ParseNode *left, *right, *next;
     int32_t slot;
 
-    top = CG_OFFSET(cg);
+    top = CG_OFFSET(bce);
     if (pn->isArity(PN_LIST)) {
         /* Left-associative operator chain to avoid too much recursion. */
         JS_ASSERT(pn->isOp(JSOP_GETELEM));
         JS_ASSERT(pn->pn_count >= 3);
         left = pn->pn_head;
         right = pn->last();
         next = left->pn_next;
         JS_ASSERT(next != right);
 
         /*
          * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
          * one or more index expression and JSOP_GETELEM op pairs. If type
          * inference is enabled this is optimized separately.
          */
         if (left->isKind(TOK_NAME) && next->isKind(TOK_NUMBER)) {
-            if (!BindNameToSlot(cx, cg, left))
+            if (!BindNameToSlot(cx, bce, left))
                 return false;
             if (left->isOp(JSOP_ARGUMENTS) &&
                 JSDOUBLE_IS_INT32(next->pn_dval, &slot) &&
                 jsuint(slot) < JS_BIT(16) &&
                 !cx->typeInferenceEnabled() &&
-                (!cg->inStrictMode() ||
-                 (!cg->mutatesParameter() && !cg->callsEval()))) {
+                (!bce->inStrictMode() ||
+                 (!bce->mutatesParameter() && !bce->callsEval()))) {
                 /*
                  * arguments[i]() requires arguments object as "this".
                  * Check that we never generates list for that usage.
                  */
                 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
                 left->pn_offset = next->pn_offset = top;
                 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
                 left = next;
@@ -3108,48 +3107,48 @@ EmitElemOp(JSContext *cx, ParseNode *pn,
          * Check whether we generated JSOP_ARGSUB, just above, and have only
          * one more index expression to emit.  Given arguments[0][j], we must
          * skip the while loop altogether, falling through to emit code for j
          * (in the subtree referenced by right), followed by the annotated op,
          * at the bottom of this function.
          */
         JS_ASSERT(next != right || pn->pn_count == 3);
         if (left == pn->pn_head) {
-            if (!EmitTree(cx, cg, left))
+            if (!EmitTree(cx, bce, left))
                 return false;
         }
         while (next != right) {
-            if (!EmitTree(cx, cg, next))
+            if (!EmitTree(cx, bce, next))
                 return false;
-            if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
+            if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - top) < 0)
                 return false;
-            if (!EmitElemOpBase(cx, cg, JSOP_GETELEM))
+            if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
                 return false;
             next = next->pn_next;
         }
     } else {
         if (pn->isArity(PN_NAME)) {
             /*
              * Set left and right so pn appears to be a TOK_LB node, instead
              * of a TOK_DOT node.  See the TOK_FOR/IN case in EmitTree, and
              * EmitDestructuringOps nearer below.  In the destructuring case,
              * the base expression (pn_expr) of the name may be null, which
              * means we have to emit a JSOP_BINDNAME.
              */
             left = pn->maybeExpr();
             if (!left) {
-                left = NullaryNode::create(cg);
+                left = NullaryNode::create(bce);
                 if (!left)
                     return false;
                 left->setKind(TOK_STRING);
                 left->setOp(JSOP_BINDNAME);
                 left->pn_pos = pn->pn_pos;
                 left->pn_atom = pn->pn_atom;
             }
-            right = NullaryNode::create(cg);
+            right = NullaryNode::create(bce);
             if (!right)
                 return false;
             right->setKind(TOK_STRING);
             right->setOp(IsIdentifier(pn->pn_atom) ? JSOP_QNAMEPART : JSOP_STRING);
             right->pn_pos = pn->pn_pos;
             right->pn_atom = pn->pn_atom;
         } else {
             JS_ASSERT(pn->isArity(PN_BINARY));
@@ -3159,147 +3158,147 @@ EmitElemOp(JSContext *cx, ParseNode *pn,
 
         /*
          * Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. If type
          * inference is enabled this is optimized separately.
          */
         if (op == JSOP_GETELEM &&
             left->isKind(TOK_NAME) &&
             right->isKind(TOK_NUMBER)) {
-            if (!BindNameToSlot(cx, cg, left))
+            if (!BindNameToSlot(cx, bce, left))
                 return false;
             if (left->isOp(JSOP_ARGUMENTS) &&
                 JSDOUBLE_IS_INT32(right->pn_dval, &slot) &&
                 jsuint(slot) < JS_BIT(16) &&
                 !cx->typeInferenceEnabled() &&
-                (!cg->inStrictMode() ||
-                 (!cg->mutatesParameter() && !cg->callsEval()))) {
+                (!bce->inStrictMode() ||
+                 (!bce->mutatesParameter() && !bce->callsEval()))) {
                 left->pn_offset = right->pn_offset = top;
                 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
                 return true;
             }
         }
 
-        if (!EmitTree(cx, cg, left))
+        if (!EmitTree(cx, bce, left))
             return false;
     }
 
     /* The right side of the descendant operator is implicitly quoted. */
     JS_ASSERT(op != JSOP_DESCENDANTS || !right->isKind(TOK_STRING) ||
               right->isOp(JSOP_QNAMEPART));
-    if (!EmitTree(cx, cg, right))
+    if (!EmitTree(cx, bce, right))
         return false;
-    if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
+    if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - top) < 0)
         return false;
-    return EmitElemOpBase(cx, cg, op);
+    return EmitElemOpBase(cx, bce, op);
 }
 
 static bool
-EmitElemIncDec(JSContext *cx, ParseNode *pn, JSOp op, CodeGenerator *cg)
+EmitElemIncDec(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
 {
     if (pn) {
-        if (!EmitElemOp(cx, pn, op, cg))
+        if (!EmitElemOp(cx, pn, op, bce))
             return false;
     } else {
-        if (!EmitElemOpBase(cx, cg, op))
+        if (!EmitElemOpBase(cx, bce, op))
             return false;
     }
-    if (Emit1(cx, cg, JSOP_NOP) < 0)
+    if (Emit1(cx, bce, JSOP_NOP) < 0)
         return false;
 
     /* INCELEM pops two values and pushes one, so restore the initial depth. */
-    cg->stackDepth++;
-
-    int start = CG_OFFSET(cg);
+    bce->stackDepth++;
+
+    int start = CG_OFFSET(bce);
 
     const JSCodeSpec *cs = &js_CodeSpec[op];
     JS_ASSERT(cs->format & JOF_ELEM);
     JS_ASSERT(cs->format & (JOF_INC | JOF_DEC));
 
     bool post = (cs->format & JOF_POST);
     JSOp binop = (cs->format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
 
     /*
      * We need to convert the key to an object id first, so that we do not do
      * it inside both the GETELEM and the SETELEM.
      */
-                                                 // OBJ KEY*
-    if (Emit1(cx, cg, JSOP_TOID) < 0)            // OBJ KEY
+                                                    // OBJ KEY*
+    if (Emit1(cx, bce, JSOP_TOID) < 0)              // OBJ KEY
         return false;
-    if (Emit1(cx, cg, JSOP_DUP2) < 0)            // OBJ KEY OBJ KEY
+    if (Emit1(cx, bce, JSOP_DUP2) < 0)              // OBJ KEY OBJ KEY
         return false;
-    if (!EmitElemOpBase(cx, cg, JSOP_GETELEM))   // OBJ KEY V
+    if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))     // OBJ KEY V
         return false;
-    if (Emit1(cx, cg, JSOP_POS) < 0)             // OBJ KEY N
+    if (Emit1(cx, bce, JSOP_POS) < 0)               // OBJ KEY N
         return false;
-    if (post && Emit1(cx, cg, JSOP_DUP) < 0)     // OBJ KEY N? N
+    if (post && Emit1(cx, bce, JSOP_DUP) < 0)       // OBJ KEY N? N
         return false;
-    if (Emit1(cx, cg, JSOP_ONE) < 0)             // OBJ KEY N? N 1
+    if (Emit1(cx, bce, JSOP_ONE) < 0)               // OBJ KEY N? N 1
         return false;
-    if (Emit1(cx, cg, binop) < 0)                // OBJ KEY N? N+1
+    if (Emit1(cx, bce, binop) < 0)                  // OBJ KEY N? N+1
         return false;
 
     if (post) {
-        if (Emit2(cx, cg, JSOP_PICK, (jsbytecode)3) < 0)    // KEY N N+1 OBJ
+        if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0)   // KEY N N+1 OBJ
             return false;
-        if (Emit2(cx, cg, JSOP_PICK, (jsbytecode)3) < 0)    // N N+1 OBJ KEY
+        if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0)   // N N+1 OBJ KEY
             return false;
-        if (Emit2(cx, cg, JSOP_PICK, (jsbytecode)2) < 0)    // N OBJ KEY N+1
+        if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0)   // N OBJ KEY N+1
             return false;
     }
 
-    if (!EmitElemOpBase(cx, cg, JSOP_SETELEM))   // N? N+1
+    if (!EmitElemOpBase(cx, bce, JSOP_SETELEM))     // N? N+1
         return false;
-    if (post && Emit1(cx, cg, JSOP_POP) < 0)     // RESULT
+    if (post && Emit1(cx, bce, JSOP_POP) < 0)       // RESULT
         return false;
 
-    UpdateDecomposeLength(cg, start);
+    UpdateDecomposeLength(bce, start);
 
     return true;
 }
 
 static JSBool
-EmitNumberOp(JSContext *cx, jsdouble dval, CodeGenerator *cg)
+EmitNumberOp(JSContext *cx, jsdouble dval, BytecodeEmitter *bce)
 {
     int32_t ival;
     uint32 u;
     ptrdiff_t off;
     jsbytecode *pc;
 
     if (JSDOUBLE_IS_INT32(dval, &ival)) {
         if (ival == 0)
-            return Emit1(cx, cg, JSOP_ZERO) >= 0;
+            return Emit1(cx, bce, JSOP_ZERO) >= 0;
         if (ival == 1)
-            return Emit1(cx, cg, JSOP_ONE) >= 0;
+            return Emit1(cx, bce, JSOP_ONE) >= 0;
         if ((jsint)(int8)ival == ival)
-            return Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
+            return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
 
         u = (uint32)ival;
         if (u < JS_BIT(16)) {
             EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
         } else if (u < JS_BIT(24)) {
-            off = EmitN(cx, cg, JSOP_UINT24, 3);
+            off = EmitN(cx, bce, JSOP_UINT24, 3);
             if (off < 0)
                 return JS_FALSE;
-            pc = CG_CODE(cg, off);
+            pc = CG_CODE(bce, off);
             SET_UINT24(pc, u);
         } else {
-            off = EmitN(cx, cg, JSOP_INT32, 4);
+            off = EmitN(cx, bce, JSOP_INT32, 4);
             if (off < 0)
                 return JS_FALSE;
-            pc = CG_CODE(cg, off);
+            pc = CG_CODE(bce, off);
             SET_INT32(pc, ival);
         }
         return JS_TRUE;
     }
 
-    if (!cg->constList.append(DoubleValue(dval)))
+    if (!bce->constList.append(DoubleValue(dval)))
         return JS_FALSE;
 
-    return EmitIndexOp(cx, JSOP_DOUBLE, cg->constList.length() - 1, cg);
+    return EmitIndexOp(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce);
 }
 
 /*
  * To avoid bloating all parse nodes for the special case of switch, values are
  * allocated in the temp pool and pointed to by the parse node. These values
  * are not currently recycled (like parse nodes) and the temp pool is only
  * flushed at the end of compiling a script, so these values are technically
  * leaked. This would only be a problem for scripts containing a large number
@@ -3314,60 +3313,60 @@ AllocateSwitchConstant(JSContext *cx)
 /*
  * Sometimes, let-slots are pushed to the JS stack before we logically enter
  * the let scope. For example,
  *     let (x = EXPR) BODY
  * compiles to roughly {enterblock; EXPR; setlocal x; BODY; leaveblock} even
  * though EXPR is evaluated in the enclosing scope; it does not see x.
  *
  * In those cases we use TempPopScope around the code to emit EXPR. It
- * temporarily removes the let-scope from the CodeGenerator's scope stack and
+ * temporarily removes the let-scope from the BytecodeEmitter's scope stack and
  * emits extra bytecode to ensure that js::GetBlockChain also finds the correct
  * scope at run time.
  */
 class TempPopScope {
     StmtInfo *savedStmt;
     StmtInfo *savedScopeStmt;
     ObjectBox *savedBlockBox;
 
   public:
     TempPopScope() : savedStmt(NULL), savedScopeStmt(NULL), savedBlockBox(NULL) {}
 
-    bool popBlock(JSContext *cx, CodeGenerator *cg) {
-        savedStmt = cg->topStmt;
-        savedScopeStmt = cg->topScopeStmt;
-        savedBlockBox = cg->blockChainBox;
-
-        if (cg->topStmt->type == STMT_FOR_LOOP || cg->topStmt->type == STMT_FOR_IN_LOOP)
-            PopStatementTC(cg);
-        JS_ASSERT(STMT_LINKS_SCOPE(cg->topStmt));
-        JS_ASSERT(cg->topStmt->flags & SIF_SCOPE);
-        PopStatementTC(cg);
+    bool popBlock(JSContext *cx, BytecodeEmitter *bce) {
+        savedStmt = bce->topStmt;
+        savedScopeStmt = bce->topScopeStmt;
+        savedBlockBox = bce->blockChainBox;
+
+        if (bce->topStmt->type == STMT_FOR_LOOP || bce->topStmt->type == STMT_FOR_IN_LOOP)
+            PopStatementTC(bce);
+        JS_ASSERT(STMT_LINKS_SCOPE(bce->topStmt));
+        JS_ASSERT(bce->topStmt->flags & SIF_SCOPE);
+        PopStatementTC(bce);
 
         /*
          * Since we have changed the block chain, emit an instruction marking
          * the change for the benefit of dynamic GetScopeChain callers such as
          * the debugger.
          *
          * FIXME bug 671360 - The JSOP_NOP instruction should not be necessary.
          */
-        return Emit1(cx, cg, JSOP_NOP) >= 0 && EmitBlockChain(cx, cg);
-    }
-
-    bool repushBlock(JSContext *cx, CodeGenerator *cg) {
+        return Emit1(cx, bce, JSOP_NOP) >= 0 && EmitBlockChain(cx, bce);
+    }
+
+    bool repushBlock(JSContext *cx, BytecodeEmitter *bce) {
         JS_ASSERT(savedStmt);
-        cg->topStmt = savedStmt;
-        cg->topScopeStmt = savedScopeStmt;
-        cg->blockChainBox = savedBlockBox;
-        return Emit1(cx, cg, JSOP_NOP) >= 0 && EmitBlockChain(cx, cg);
+        bce->topStmt = savedStmt;
+        bce->topScopeStmt = savedScopeStmt;
+        bce->blockChainBox = savedBlockBox;
+        return Emit1(cx, bce, JSOP_NOP) >= 0 && EmitBlockChain(cx, bce);
     }
 };
 
 static JSBool
-EmitSwitch(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitSwitch(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     JSOp switchOp;
     JSBool ok, hasDefault, constPropagated;
     ptrdiff_t top, off, defaultOffset;
     ParseNode *pn2, *pn3, *pn4;
     uint32 caseCount, tableLength;
     ParseNode **table;
     int32_t i, low, high;
@@ -3397,61 +3396,61 @@ EmitSwitch(JSContext *cx, CodeGenerator 
     if (pn2->isKind(TOK_LEXICALSCOPE)) {
         /*
          * Push the body's block scope before discriminant code-gen to reflect
          * the order of slots on the stack. The block's locals must lie under
          * the discriminant on the stack so that case-dispatch bytecodes can
          * find the discriminant on top of stack.
          */
         box = pn2->pn_objbox;
-        PushBlockScope(cg, &stmtInfo, box, -1);
+        PushBlockScope(bce, &stmtInfo, box, -1);
         stmtInfo.type = STMT_SWITCH;
 
         /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
-        if (!EmitEnterBlock(cx, pn2, cg))
+        if (!EmitEnterBlock(cx, pn2, bce))
             return JS_FALSE;
 
         /*
          * Pop the switch's statement info around discriminant code-gen, which
          * belongs in the enclosing scope.
          */
-        if (!tps.popBlock(cx, cg))
+        if (!tps.popBlock(cx, bce))
             return JS_FALSE;
     }
 #ifdef __GNUC__
     else {
         box = NULL;
     }
 #endif
 #endif
 
     /*
      * Emit code for the discriminant first (or nearly first, in the case of a
      * switch whose body is a block scope).
      */
-    if (!EmitTree(cx, cg, pn->pn_left))
+    if (!EmitTree(cx, bce, pn->pn_left))
         return JS_FALSE;
 
     /* Switch bytecodes run from here till end of final case. */
-    top = CG_OFFSET(cg);
+    top = CG_OFFSET(bce);
 #if !JS_HAS_BLOCK_SCOPE
-    PushStatement(cg, &stmtInfo, STMT_SWITCH, top);
+    PushStatement(bce, &stmtInfo, STMT_SWITCH, top);
 #else
     if (pn2->isKind(TOK_LC)) {
-        PushStatement(cg, &stmtInfo, STMT_SWITCH, top);
+        PushStatement(bce, &stmtInfo, STMT_SWITCH, top);
     } else {
         /* Re-push the switch's statement info record. */
-        if (!tps.repushBlock(cx, cg))
+        if (!tps.repushBlock(cx, bce))
             return JS_FALSE;
 
         /*
          * Set the statement info record's idea of top. Reset top too, since
          * repushBlock emits code.
          */
-        stmtInfo.update = top = CG_OFFSET(cg);
+        stmtInfo.update = top = CG_OFFSET(bce);
 
         /* Advance pn2 to refer to the switch case list. */
         pn2 = pn2->expr();
     }
 #endif
 
     caseCount = pn2->pn_count;
     tableLength = 0;
@@ -3492,17 +3491,17 @@ EmitSwitch(JSContext *cx, CodeGenerator 
               case TOK_NUMBER:
                 constVal.setNumber(pn4->pn_dval);
                 break;
               case TOK_STRING:
                 constVal.setString(pn4->pn_atom);
                 break;
               case TOK_NAME:
                 if (!pn4->maybeExpr()) {
-                    ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &constVal);
+                    ok = LookupCompileTimeConstant(cx, bce, pn4->pn_atom, &constVal);
                     if (!ok)
                         goto release;
                     if (!constVal.isMagic(JS_NO_CONSTANT)) {
                         if (constVal.isObject()) {
                             /*
                              * XXX JSOP_LOOKUPSWITCH does not support const-
                              * propagated object values, see bug 407186.
                              */
@@ -3606,26 +3605,26 @@ EmitSwitch(JSContext *cx, CodeGenerator 
                 switchOp = JSOP_LOOKUPSWITCH;
         } else if (switchOp == JSOP_LOOKUPSWITCH) {
             /*
              * Lookup switch supports only atom indexes below 64K limit.
              * Conservatively estimate the maximum possible index during
              * switch generation and use conditional switch if it exceeds
              * the limit.
              */
-            if (caseCount + cg->constList.length() > JS_BIT(16))
+            if (caseCount + bce->constList.length() > JS_BIT(16))
                 switchOp = JSOP_CONDSWITCH;
         }
     }
 
     /*
      * Emit a note with two offsets: first tells total switch code length,
      * second tells offset to first JSOP_CASE if condswitch.
      */
-    noteIndex = NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
+    noteIndex = NewSrcNote3(cx, bce, SRC_SWITCH, 0, 0);
     if (noteIndex < 0)
         return JS_FALSE;
 
     if (switchOp == JSOP_CONDSWITCH) {
         /*
          * 0 bytes of immediate for unoptimized ECMAv2 switch.
          */
         switchSize = 0;
@@ -3650,78 +3649,78 @@ EmitSwitch(JSContext *cx, CodeGenerator 
      * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
      * to emit the immediate operand(s) by which bytecode readers such as
      * BuildSpanDepTable discover the length of the switch opcode *before*
      * calling SetJumpOffset (which may call BuildSpanDepTable).  It's
      * also important to zero all unknown jump offset immediate operands,
      * so they can be converted to span dependencies with null targets to
      * be computed later (EmitN zeros switchSize bytes after switchOp).
      */
-    if (EmitN(cx, cg, switchOp, switchSize) < 0)
+    if (EmitN(cx, bce, switchOp, switchSize) < 0)
         return JS_FALSE;
 
     off = -1;
     if (switchOp == JSOP_CONDSWITCH) {
         intN caseNoteIndex = -1;
         JSBool beforeCases = JS_TRUE;
 
         /* Emit code for evaluating cases and jumping to case statements. */
         for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
             pn4 = pn3->pn_left;
-            if (pn4 && !EmitTree(cx, cg, pn4))
+            if (pn4 && !EmitTree(cx, bce, pn4))
                 return JS_FALSE;
             if (caseNoteIndex >= 0) {
                 /* off is the previous JSOP_CASE's bytecode offset. */
-                if (!SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0, CG_OFFSET(cg) - off))
+                if (!SetSrcNoteOffset(cx, bce, (uintN)caseNoteIndex, 0, CG_OFFSET(bce) - off))
                     return JS_FALSE;
             }
             if (!pn4) {
                 JS_ASSERT(pn3->isKind(TOK_DEFAULT));
                 continue;
             }
-            caseNoteIndex = NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
+            caseNoteIndex = NewSrcNote2(cx, bce, SRC_PCDELTA, 0);
             if (caseNoteIndex < 0)
                 return JS_FALSE;
-            off = EmitJump(cx, cg, JSOP_CASE, 0);
+            off = EmitJump(cx, bce, JSOP_CASE, 0);
             if (off < 0)
                 return JS_FALSE;
             pn3->pn_offset = off;
             if (beforeCases) {
                 uintN noteCount, noteCountDelta;
 
                 /* Switch note's second offset is to first JSOP_CASE. */
-                noteCount = CG_NOTE_COUNT(cg);
-                if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, off - top))
+                noteCount = CG_NOTE_COUNT(bce);
+                if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 1, off - top))
                     return JS_FALSE;
-                noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
+                noteCountDelta = CG_NOTE_COUNT(bce) - noteCount;
                 if (noteCountDelta != 0)
                     caseNoteIndex += noteCountDelta;
                 beforeCases = JS_FALSE;
             }
         }
 
         /*
          * If we didn't have an explicit default (which could fall in between
          * cases, preventing us from fusing this SetSrcNoteOffset with the call
          * in the loop above), link the last case to the implicit default for
          * the decompiler.
          */
         if (!hasDefault &&
             caseNoteIndex >= 0 &&
-            !SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0, CG_OFFSET(cg) - off))
+            !SetSrcNoteOffset(cx, bce, (uintN)caseNoteIndex, 0, CG_OFFSET(bce) - off))
         {
             return JS_FALSE;
         }
 
         /* Emit default even if no explicit default statement. */
-        defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
+        defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0);
         if (defaultOffset < 0)
             return JS_FALSE;
     } else {
-        pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
+        pc = CG_CODE(bce, top + JUMP_OFFSET_LEN);
 
         if (switchOp == JSOP_TABLESWITCH) {
             /* Fill in switch bounds, which we know fit in 16-bit offsets. */
             SET_JUMP_OFFSET(pc, low);
             pc += JUMP_OFFSET_LEN;
             SET_JUMP_OFFSET(pc, high);
             pc += JUMP_OFFSET_LEN;
 
@@ -3754,360 +3753,360 @@ EmitSwitch(JSContext *cx, CodeGenerator 
         }
 
         /*
          * After this point, all control flow involving JSOP_TABLESWITCH
          * must set ok and goto out to exit this function.  To keep things
          * simple, all switchOp cases exit that way.
          */
         MUST_FLOW_THROUGH("out");
-        if (cg->spanDeps) {
+        if (bce->spanDeps) {
             /*
              * We have already generated at least one big jump so we must
              * explicitly add span dependencies for the switch jumps. When
              * called below, SetJumpOffset can only do it when patching the
-             * first big jump or when cg->spanDeps is null.
+             * first big jump or when bce->spanDeps is null.
              */
-            if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
+            if (!AddSwitchSpanDeps(cx, bce, CG_CODE(bce, top)))
                 goto bad;
         }
 
         if (constPropagated) {
             /*
              * Skip switchOp, as we are not setting jump offsets in the two
-             * for loops below.  We'll restore CG_NEXT(cg) from savepc after,
+             * for loops below.  We'll restore CG_NEXT(bce) from savepc after,
              * unless there was an error.
              */
-            savepc = CG_NEXT(cg);
-            CG_NEXT(cg) = pc + 1;
+            savepc = CG_NEXT(bce);
+            CG_NEXT(bce) = pc + 1;
             if (switchOp == JSOP_TABLESWITCH) {
                 for (i = 0; i < (jsint)tableLength; i++) {
                     pn3 = table[i];
                     if (pn3 &&
                         (pn4 = pn3->pn_left) != NULL &&
                         pn4->isKind(TOK_NAME)) {
                         /* Note a propagated constant with the const's name. */
                         JS_ASSERT(!pn4->maybeExpr());
                         jsatomid index;
-                        if (!cg->makeAtomIndex(pn4->pn_atom, &index))
+                        if (!bce->makeAtomIndex(pn4->pn_atom, &index))
                             goto bad;
-                        CG_NEXT(cg) = pc;
-                        if (NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
+                        CG_NEXT(bce) = pc;
+                        if (NewSrcNote2(cx, bce, SRC_LABEL, ptrdiff_t(index)) < 0)
                             goto bad;
                     }
                     pc += JUMP_OFFSET_LEN;
                 }
             } else {
                 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
                     pn4 = pn3->pn_left;
                     if (pn4 && pn4->isKind(TOK_NAME)) {
                         /* Note a propagated constant with the const's name. */
                         JS_ASSERT(!pn4->maybeExpr());
                         jsatomid index;
-                        if (!cg->makeAtomIndex(pn4->pn_atom, &index))
+                        if (!bce->makeAtomIndex(pn4->pn_atom, &index))
                             goto bad;
-                        CG_NEXT(cg) = pc;
-                        if (NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
+                        CG_NEXT(bce) = pc;
+                        if (NewSrcNote2(cx, bce, SRC_LABEL, ptrdiff_t(index)) < 0)
                             goto bad;
                     }
                     pc += INDEX_LEN + JUMP_OFFSET_LEN;
                 }
             }
-            CG_NEXT(cg) = savepc;
+            CG_NEXT(bce) = savepc;
         }
     }
 
     /* Emit code for each case's statements, copying pn_offset up to pn3. */
     for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
         if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(TOK_DEFAULT))
-            CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
+            CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, bce, pn3->pn_offset, goto bad);
         pn4 = pn3->pn_right;
-        ok = EmitTree(cx, cg, pn4);
+        ok = EmitTree(cx, bce, pn4);
         if (!ok)
             goto out;
         pn3->pn_offset = pn4->pn_offset;
         if (pn3->isKind(TOK_DEFAULT))
             off = pn3->pn_offset - top;
     }
 
     if (!hasDefault) {
         /* If no default case, offset for default is to end of switch. */
-        off = CG_OFFSET(cg) - top;
+        off = CG_OFFSET(bce) - top;
     }
 
     /* We better have set "off" by now. */
     JS_ASSERT(off != -1);
 
     /* Set the default offset (to end of switch if no default). */
     if (switchOp == JSOP_CONDSWITCH) {
         pc = NULL;
         JS_ASSERT(defaultOffset != -1);
-        ok = SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset), off - (defaultOffset - top));
+        ok = SetJumpOffset(cx, bce, CG_CODE(bce, defaultOffset), off - (defaultOffset - top));
         if (!ok)
             goto out;
     } else {
-        pc = CG_CODE(cg, top);
-        ok = SetJumpOffset(cx, cg, pc, off);
+        pc = CG_CODE(bce, top);
+        ok = SetJumpOffset(cx, bce, pc, off);
         if (!ok)
             goto out;
         pc += JUMP_OFFSET_LEN;
     }
 
     /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
-    off = CG_OFFSET(cg) - top;
-    ok = SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
+    off = CG_OFFSET(bce) - top;
+    ok = SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, off);
     if (!ok)
         goto out;
 
     if (switchOp == JSOP_TABLESWITCH) {
         /* Skip over the already-initialized switch bounds. */
         pc += 2 * JUMP_OFFSET_LEN;
 
         /* Fill in the jump table, if there is one. */
         for (i = 0; i < (jsint)tableLength; i++) {
             pn3 = table[i];
             off = pn3 ? pn3->pn_offset - top : 0;
-            ok = SetJumpOffset(cx, cg, pc, off);
+            ok = SetJumpOffset(cx, bce, pc, off);
             if (!ok)
                 goto out;
             pc += JUMP_OFFSET_LEN;
         }
     } else if (switchOp == JSOP_LOOKUPSWITCH) {
         /* Skip over the already-initialized number of cases. */
         pc += INDEX_LEN;
 
         for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
             if (pn3->isKind(TOK_DEFAULT))
                 continue;
-            if (!cg->constList.append(*pn3->pn_pval))
+            if (!bce->constList.append(*pn3->pn_pval))
                 goto bad;
-            SET_INDEX(pc, cg->constList.length() - 1);
+            SET_INDEX(pc, bce->constList.length() - 1);
             pc += INDEX_LEN;
 
             off = pn3->pn_offset - top;
-            ok = SetJumpOffset(cx, cg, pc, off);
+            ok = SetJumpOffset(cx, bce, pc, off);
             if (!ok)
                 goto out;
             pc += JUMP_OFFSET_LEN;
         }
     }
 
 out:
     if (table)
         cx->free_(table);
     if (ok) {
-        ok = PopStatementCG(cx, cg);
+        ok = PopStatementCG(cx, bce);
 
 #if JS_HAS_BLOCK_SCOPE
         if (ok && pn->pn_right->isKind(TOK_LEXICALSCOPE))
-            ok = EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, box);
+            ok = EmitLeaveBlock(cx, bce, JSOP_LEAVEBLOCK, box);
 #endif
     }
     return ok;
 
 bad:
     ok = JS_FALSE;
     goto out;
 }
 
 JSBool
-frontend::EmitFunctionScript(JSContext *cx, CodeGenerator *cg, ParseNode *body)
+frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *body)
 {
     /*
      * The decompiler has assumptions about what may occur immediately after
      * script->main (e.g., in the case of destructuring params). Thus, put the
      * following ops into the range [script->code, script->main). Note:
      * execution starts from script->code, so this has no semantic effect.
      */
 
-    if (cg->flags & TCF_FUN_IS_GENERATOR) {
+    if (bce->flags & TCF_FUN_IS_GENERATOR) {
         /* JSOP_GENERATOR must be the first instruction. */
-        CG_SWITCH_TO_PROLOG(cg);
-        JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
-        if (Emit1(cx, cg, JSOP_GENERATOR) < 0)
+        CG_SWITCH_TO_PROLOG(bce);
+        JS_ASSERT(CG_NEXT(bce) == CG_BASE(bce));
+        if (Emit1(cx, bce, JSOP_GENERATOR) < 0)
             return false;
-        CG_SWITCH_TO_MAIN(cg);
+        CG_SWITCH_TO_MAIN(bce);
     }
 
     /*
      * Strict mode functions' arguments objects copy initial parameter values.
      * We create arguments objects lazily -- but that doesn't work for strict
      * mode functions where a parameter might be modified and arguments might
      * be accessed. For such functions we synthesize an access to arguments to
      * initialize it with the original parameter values.
      */
-    if (cg->needsEagerArguments()) {
-        CG_SWITCH_TO_PROLOG(cg);
-        if (Emit1(cx, cg, JSOP_ARGUMENTS) < 0 || Emit1(cx, cg, JSOP_POP) < 0)
+    if (bce->needsEagerArguments()) {
+        CG_SWITCH_TO_PROLOG(bce);
+        if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0 || Emit1(cx, bce, JSOP_POP) < 0)
             return false;
-        CG_SWITCH_TO_MAIN(cg);
-    }
-
-    if (cg->flags & TCF_FUN_UNBRAND_THIS) {
-        CG_SWITCH_TO_PROLOG(cg);
-        if (Emit1(cx, cg, JSOP_UNBRANDTHIS) < 0)
+        CG_SWITCH_TO_MAIN(bce);
+    }
+
+    if (bce->flags & TCF_FUN_UNBRAND_THIS) {
+        CG_SWITCH_TO_PROLOG(bce);
+        if (Emit1(cx, bce, JSOP_UNBRANDTHIS) < 0)
             return false;
-        CG_SWITCH_TO_MAIN(cg);
-    }
-
-    return EmitTree(cx, cg, body) &&
-           Emit1(cx, cg, JSOP_STOP) >= 0 &&
-           JSScript::NewScriptFromCG(cx, cg);
+        CG_SWITCH_TO_MAIN(bce);
+    }
+
+    return EmitTree(cx, bce, body) &&
+           Emit1(cx, bce, JSOP_STOP) >= 0 &&
+           JSScript::NewScriptFromCG(cx, bce);
 }
 
 static bool
-MaybeEmitVarDecl(JSContext *cx, CodeGenerator *cg, JSOp prologOp,
-                 ParseNode *pn, jsatomid *result)
+MaybeEmitVarDecl(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn,
+                 jsatomid *result)
 {
     jsatomid atomIndex;
 
     if (!pn->pn_cookie.isFree()) {
         atomIndex = pn->pn_cookie.slot();
     } else {
-        if (!cg->makeAtomIndex(pn->pn_atom, &atomIndex))
+        if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
             return false;
     }
 
     if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM &&
-        (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT)) &&
+        (!bce->inFunction() || (bce->flags & TCF_FUN_HEAVYWEIGHT)) &&
         !(pn->pn_dflags & PND_GVAR))
     {
-        CG_SWITCH_TO_PROLOG(cg);
-        if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
+        CG_SWITCH_TO_PROLOG(bce);
+        if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin.lineno))
             return false;
         EMIT_INDEX_OP(prologOp, atomIndex);
-        CG_SWITCH_TO_MAIN(cg);
-    }
-
-    if (cg->inFunction() &&
+        CG_SWITCH_TO_MAIN(bce);
+    }
+
+    if (bce->inFunction() &&
         JOF_OPTYPE(pn->getOp()) == JOF_LOCAL &&
-        pn->pn_cookie.slot() < cg->bindings.countVars() &&
-        cg->shouldNoteClosedName(pn))
+        pn->pn_cookie.slot() < bce->bindings.countVars() &&
+        bce->shouldNoteClosedName(pn))
     {
-        if (!cg->closedVars.append(pn->pn_cookie.slot()))
+        if (!bce->closedVars.append(pn->pn_cookie.slot()))
             return false;
     }
 
     if (result)
         *result = atomIndex;
     return true;
 }
 
 #if JS_HAS_DESTRUCTURING
 
 typedef JSBool
-(*DestructuringDeclEmitter)(JSContext *cx, CodeGenerator *cg, JSOp prologOp, ParseNode *pn);
+(*DestructuringDeclEmitter)(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn);
 
 static JSBool
-EmitDestructuringDecl(JSContext *cx, CodeGenerator *cg, JSOp prologOp, ParseNode *pn)
+EmitDestructuringDecl(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn)
 {
     JS_ASSERT(pn->isKind(TOK_NAME));
-    if (!BindNameToSlot(cx, cg, pn))
+    if (!BindNameToSlot(cx, bce, pn))
         return JS_FALSE;
 
     JS_ASSERT(!pn->isOp(JSOP_ARGUMENTS) && !pn->isOp(JSOP_CALLEE));
-    return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
+    return MaybeEmitVarDecl(cx, bce, prologOp, pn, NULL);
 }
 
 static JSBool
-EmitDestructuringDecls(JSContext *cx, CodeGenerator *cg, JSOp prologOp, ParseNode *pn)
+EmitDestructuringDecls(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn)
 {
     ParseNode *pn2, *pn3;
     DestructuringDeclEmitter emitter;
 
     if (pn->isKind(TOK_RB)) {
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
             if (pn2->isKind(TOK_COMMA))
                 continue;
             emitter = (pn2->isKind(TOK_NAME))
                       ? EmitDestructuringDecl
                       : EmitDestructuringDecls;
-            if (!emitter(cx, cg, prologOp, pn2))
+            if (!emitter(cx, bce, prologOp, pn2))
                 return JS_FALSE;
         }
     } else {
         JS_ASSERT(pn->isKind(TOK_RC));
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
             pn3 = pn2->pn_right;
             emitter = pn3->isKind(TOK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls;
-            if (!emitter(cx, cg, prologOp, pn3))
+            if (!emitter(cx, bce, prologOp, pn3))
                 return JS_FALSE;
         }
     }
     return JS_TRUE;
 }
 
 static JSBool
-EmitDestructuringOpsHelper(JSContext *cx, CodeGenerator *cg, ParseNode *pn);
+EmitDestructuringOpsHelper(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn);
 
 static JSBool
-EmitDestructuringLHS(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitDestructuringLHS(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     /*
      * Now emit the lvalue opcode sequence.  If the lvalue is a nested
      * destructuring initialiser-form, call ourselves to handle it, then
      * pop the matched value.  Otherwise emit an lvalue bytecode sequence
      * ending with a JSOP_ENUMELEM or equivalent op.
      */
     if (pn->isKind(TOK_RB) || pn->isKind(TOK_RC)) {
-        if (!EmitDestructuringOpsHelper(cx, cg, pn))
+        if (!EmitDestructuringOpsHelper(cx, bce, pn))
             return JS_FALSE;
-        if (Emit1(cx, cg, JSOP_POP) < 0)
+        if (Emit1(cx, bce, JSOP_POP) < 0)
             return JS_FALSE;
     } else {
         if (pn->isKind(TOK_NAME)) {
-            if (!BindNameToSlot(cx, cg, pn))
+            if (!BindNameToSlot(cx, bce, pn))
                 return JS_FALSE;
             if (pn->isConst() && !pn->isInitialized())
-                return Emit1(cx, cg, JSOP_POP) >= 0;
+                return Emit1(cx, bce, JSOP_POP) >= 0;
         }
 
         switch (pn->getOp()) {
           case JSOP_SETNAME:
           case JSOP_SETGNAME:
             /*
              * NB: pn is a PN_NAME node, not a PN_BINARY.  Nevertheless,
              * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
              * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
              */
-            if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
+            if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, bce))
                 return JS_FALSE;
             break;
 
           case JSOP_SETCONST:
-            if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
+            if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, bce))
                 return JS_FALSE;
             break;
 
           case JSOP_SETLOCAL:
           {
             jsuint slot = pn->pn_cookie.asInteger();
             EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
             break;
           }
 
           case JSOP_SETARG:
           {
             jsuint slot = pn->pn_cookie.asInteger();
             EMIT_UINT16_IMM_OP(pn->getOp(), slot);
-            if (Emit1(cx, cg, JSOP_POP) < 0)
+            if (Emit1(cx, bce, JSOP_POP) < 0)
                 return JS_FALSE;
             break;
           }
 
           default:
           {
             ptrdiff_t top;
 
-            top = CG_OFFSET(cg);
-            if (!EmitTree(cx, cg, pn))
+            top = CG_OFFSET(bce);
+            if (!EmitTree(cx, bce, pn))
                 return JS_FALSE;
-            if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
+            if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - top) < 0)
                 return JS_FALSE;
-            if (!EmitElemOpBase(cx, cg, JSOP_ENUMELEM))
+            if (!EmitElemOpBase(cx, bce, JSOP_ENUMELEM))
                 return JS_FALSE;
             break;
           }
 
           case JSOP_ENUMELEM:
             JS_ASSERT(0);
         }
     }
@@ -4118,103 +4117,103 @@ EmitDestructuringLHS(JSContext *cx, Code
 /*
  * Recursive helper for EmitDestructuringOps.
  *
  * Given a value to destructure on the stack, walk over an object or array
  * initialiser at pn, emitting bytecodes to match property values and store
  * them in the lvalues identified by the matched property names.
  */
 static JSBool
-EmitDestructuringOpsHelper(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitDestructuringOpsHelper(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     jsuint index;
     ParseNode *pn2, *pn3;
     JSBool doElemOp;
 
 #ifdef DEBUG
-    intN stackDepth = cg->stackDepth;
+    intN stackDepth = bce->stackDepth;
     JS_ASSERT(stackDepth != 0);
     JS_ASSERT(pn->isArity(PN_LIST));
     JS_ASSERT(pn->isKind(TOK_RB) || pn->isKind(TOK_RC));
 #endif
 
     if (pn->pn_count == 0) {
         /* Emit a DUP;POP sequence for the decompiler. */
-        return Emit1(cx, cg, JSOP_DUP) >= 0 &&
-               Emit1(cx, cg, JSOP_POP) >= 0;
+        return Emit1(cx, bce, JSOP_DUP) >= 0 &&
+               Emit1(cx, bce, JSOP_POP) >= 0;
     }
 
     index = 0;
     for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
         /*
          * Duplicate the value being destructured to use as a reference base.
          * If dup is not the first one, annotate it for the decompiler.
          */
-        if (pn2 != pn->pn_head && NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
+        if (pn2 != pn->pn_head && NewSrcNote(cx, bce, SRC_CONTINUE) < 0)
             return JS_FALSE;
-        if (Emit1(cx, cg, JSOP_DUP) < 0)
+        if (Emit1(cx, bce, JSOP_DUP) < 0)
             return JS_FALSE;
 
         /*
          * Now push the property name currently being matched, which is either
          * the array initialiser's current index, or the current property name
          * "label" on the left of a colon in the object initialiser.  Set pn3
          * to the lvalue node, which is in the value-initializing position.
          */
         doElemOp = JS_TRUE;
         if (pn->isKind(TOK_RB)) {
-            if (!EmitNumberOp(cx, index, cg))
+            if (!EmitNumberOp(cx, index, bce))
                 return JS_FALSE;
             pn3 = pn2;
         } else {
             JS_ASSERT(pn->isKind(TOK_RC));
             JS_ASSERT(pn2->isKind(TOK_COLON));
             pn3 = pn2->pn_left;
             if (pn3->isKind(TOK_NUMBER)) {
                 /*
                  * If we are emitting an object destructuring initialiser,
                  * annotate the index op with SRC_INITPROP so we know we are
                  * not decompiling an array initialiser.
                  */
-                if (NewSrcNote(cx, cg, SRC_INITPROP) < 0)
+                if (NewSrcNote(cx, bce, SRC_INITPROP) < 0)
                     return JS_FALSE;
-                if (!EmitNumberOp(cx, pn3->pn_dval, cg))
+                if (!EmitNumberOp(cx, pn3->pn_dval, bce))
                     return JS_FALSE;
             } else {
                 JS_ASSERT(pn3->isKind(TOK_STRING) || pn3->isKind(TOK_NAME));
-                if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
+                if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, bce))
                     return JS_FALSE;
                 doElemOp = JS_FALSE;
             }
             pn3 = pn2->pn_right;
         }
 
         if (doElemOp) {
             /*
              * Ok, get the value of the matching property name.  This leaves
              * that value on top of the value being destructured, so the stack
              * is one deeper than when we started.
              */
-            if (!EmitElemOpBase(cx, cg, JSOP_GETELEM))
+            if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
                 return JS_FALSE;
-            JS_ASSERT(cg->stackDepth == stackDepth + 1);
+            JS_ASSERT(bce->stackDepth == stackDepth + 1);
         }
 
         /* Nullary comma node makes a hole in the array destructurer. */
         if (pn3->isKind(TOK_COMMA) && pn3->isArity(PN_NULLARY)) {
             JS_ASSERT(pn->isKind(TOK_RB));
             JS_ASSERT(pn2 == pn3);
-            if (Emit1(cx, cg, JSOP_POP) < 0)
+            if (Emit1(cx, bce, JSOP_POP) < 0)
                 return JS_FALSE;
         } else {
-            if (!EmitDestructuringLHS(cx, cg, pn3))
+            if (!EmitDestructuringLHS(cx, bce, pn3))
                 return JS_FALSE;
         }
 
-        JS_ASSERT(cg->stackDepth == stackDepth);
+        JS_ASSERT(bce->stackDepth == stackDepth);
         ++index;
     }
 
     return JS_TRUE;
 }
 
 static ptrdiff_t
 OpToDeclType(JSOp op)
@@ -4227,110 +4226,111 @@ OpToDeclType(JSOp op)
       case JSOP_DEFVAR:
         return SRC_DECL_VAR;
       default:
         return SRC_DECL_NONE;
     }
 }
 
 static JSBool
-EmitDestructuringOps(JSContext *cx, CodeGenerator *cg, JSOp prologOp, ParseNode *pn)
+EmitDestructuringOps(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn)
 {
     /*
      * If we're called from a variable declaration, help the decompiler by
      * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
      * If the destructuring initialiser is empty, our helper will emit a
      * JSOP_DUP followed by a JSOP_POP for the decompiler.
      */
-    if (NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
+    if (NewSrcNote2(cx, bce, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
         return JS_FALSE;
 
     /*
      * Call our recursive helper to emit the destructuring assignments and
      * related stack manipulations.
      */
-    return EmitDestructuringOpsHelper(cx, cg, pn);
+    return EmitDestructuringOpsHelper(cx, bce, pn);
 }
 
 static JSBool
-EmitGroupAssignment(JSContext *cx, CodeGenerator *cg, JSOp prologOp,
+EmitGroupAssignment(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp,
                     ParseNode *lhs, ParseNode *rhs)
 {
     jsuint depth, limit, i, nslots;
     ParseNode *pn;
 
-    depth = limit = (uintN) cg->stackDepth;
+    depth = limit = (uintN) bce->stackDepth;
     for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
         if (limit == JS_BIT(16)) {
-            ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
+            ReportCompileErrorNumber(cx, CG_TS(bce), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
             return JS_FALSE;
         }
 
         /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
         JS_ASSERT(!(pn->isKind(TOK_COMMA) && pn->isArity(PN_NULLARY)));
-        if (!EmitTree(cx, cg, pn))
+        if (!EmitTree(cx, bce, pn))
             return JS_FALSE;
         ++limit;
     }
 
-    if (NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
+    if (NewSrcNote2(cx, bce, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
         return JS_FALSE;
 
     i = depth;
     for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
         /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
         JS_ASSERT(i < limit);
-        jsint slot = AdjustBlockSlot(cx, cg, i);
+        jsint slot = AdjustBlockSlot(cx, bce, i);
         if (slot < 0)
             return JS_FALSE;
         EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
 
         if (pn->isKind(TOK_COMMA) && pn->isArity(PN_NULLARY)) {
-            if (Emit1(cx, cg, JSOP_POP) < 0)
+            if (Emit1(cx, bce, JSOP_POP) < 0)
                 return JS_FALSE;
         } else {
-            if (!EmitDestructuringLHS(cx, cg, pn))
+            if (!EmitDestructuringLHS(cx, bce, pn))
                 return JS_FALSE;
         }
     }
 
     nslots = limit - depth;
     EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
-    cg->stackDepth = (uintN) depth;
+    bce->stackDepth = (uintN) depth;
     return JS_TRUE;
 }
 
 /*
  * Helper called with pop out param initialized to a JSOP_POP* opcode.  If we
  * can emit a group assignment sequence, which results in 0 stack depth delta,
  * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
  */
 static JSBool
-MaybeEmitGroupAssignment(JSContext *cx, CodeGenerator *cg, JSOp prologOp, ParseNode *pn, JSOp *pop)
+MaybeEmitGroupAssignment(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn,
+                         JSOp *pop)
 {
     ParseNode *lhs, *rhs;
 
     JS_ASSERT(pn->isKind(TOK_ASSIGN));
     JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
     lhs = pn->pn_left;
     rhs = pn->pn_right;
     if (lhs->isKind(TOK_RB) && rhs->isKind(TOK_RB) &&
         !(rhs->pn_xflags & PNX_HOLEY) &&
         lhs->pn_count <= rhs->pn_count) {
-        if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
+        if (!EmitGroupAssignment(cx, bce, prologOp, lhs, rhs))
             return JS_FALSE;
         *pop = JSOP_NOP;
     }
     return JS_TRUE;
 }
 
 #endif /* JS_HAS_DESTRUCTURING */
 
 static JSBool
-EmitVariables(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSBool inLetHead,
+EmitVariables(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSBool inLetHead,
               ptrdiff_t *headNoteIndex)
 {
     bool forInVar, first;
     ptrdiff_t off, noteIndex, tmp;
     ParseNode *pn2, *pn3, *next;
     JSOp op;
     jsatomid atomIndex;
     uintN oldflags;
@@ -4366,17 +4366,17 @@ EmitVariables(JSContext *cx, CodeGenerat
                  * parser (see Parser::variables) has ensured that our caller
                  * will be the TOK_FOR/TOK_IN case in EmitTree, and that case
                  * will emit the destructuring code only after emitting an
                  * enumerating opcode and a branch that tests whether the
                  * enumeration ended.
                  */
                 JS_ASSERT(forInVar);
                 JS_ASSERT(pn->pn_count == 1);
-                if (!EmitDestructuringDecls(cx, cg, pn->getOp(), pn2))
+                if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2))
                     return JS_FALSE;
                 break;
             }
 #endif
 
             /*
              * A destructuring initialiser assignment preceded by var will
              * never occur to the left of 'in' in a for-in loop.  As with 'for
@@ -4407,40 +4407,40 @@ EmitVariables(JSContext *cx, CodeGenerat
                 /*
                  * If this is the only destructuring assignment in the list,
                  * try to optimize to a group assignment.  If we're in a let
                  * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
                  * in pn->pn_op, to suppress a second (and misplaced) 'let'.
                  */
                 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
                 op = JSOP_POP;
-                if (!MaybeEmitGroupAssignment(cx, cg,
+                if (!MaybeEmitGroupAssignment(cx, bce,
                                               inLetHead ? JSOP_POP : pn->getOp(),
                                               pn2, &op)) {
                     return JS_FALSE;
                 }
                 if (op == JSOP_NOP) {
                     pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
                     break;
                 }
             }
 
             pn3 = pn2->pn_left;
-            if (!EmitDestructuringDecls(cx, cg, pn->getOp(), pn3))
+            if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3))
                 return JS_FALSE;
 
-            if (!EmitTree(cx, cg, pn2->pn_right))
+            if (!EmitTree(cx, bce, pn2->pn_right))
                 return JS_FALSE;
 
             /*
              * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
              * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
              * we will emit at the bottom of this function.
              */
-            if (!EmitDestructuringOps(cx, cg,
+            if (!EmitDestructuringOps(cx, bce,
                                       inLetHead ? JSOP_POP : pn->getOp(),
                                       pn3)) {
                 return JS_FALSE;
             }
             goto emit_note_pop;
 #endif
         }
 
@@ -4448,53 +4448,53 @@ EmitVariables(JSContext *cx, CodeGenerat
          * Load initializer early to share code above that jumps to do_name.
          * NB: if this var redeclares an existing binding, then pn2 is linked
          * on its definition's use-chain and pn_expr has been overlayed with
          * pn_lexdef.
          */
         pn3 = pn2->maybeExpr();
 
      do_name:
-        if (!BindNameToSlot(cx, cg, pn2))
+        if (!BindNameToSlot(cx, bce, pn2))
             return JS_FALSE;
 
         op = pn2->getOp();
         if (op == JSOP_ARGUMENTS) {
             /* JSOP_ARGUMENTS => no initializer */
             JS_ASSERT(!pn3 && !let);
             pn3 = NULL;
 #ifdef __GNUC__
             atomIndex = 0;            /* quell GCC overwarning */
 #endif
         } else {
             JS_ASSERT(op != JSOP_CALLEE);
             JS_ASSERT(!pn2->pn_cookie.isFree() || !let);
-            if (!MaybeEmitVarDecl(cx, cg, pn->getOp(), pn2, &atomIndex))
+            if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex))
                 return JS_FALSE;
 
             if (pn3) {
                 JS_ASSERT(!forInVar);
                 if (op == JSOP_SETNAME) {
                     JS_ASSERT(!let);
                     EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
                 } else if (op == JSOP_SETGNAME) {
                     JS_ASSERT(!let);
                     EMIT_INDEX_OP(JSOP_BINDGNAME, atomIndex);
                 }
                 if (pn->isOp(JSOP_DEFCONST) &&
-                    !DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3))
+                    !DefineCompileTimeConstant(cx, bce, pn2->pn_atom, pn3))
                 {
                     return JS_FALSE;
                 }
 
-                oldflags = cg->flags;
-                cg->flags &= ~TCF_IN_FOR_INIT;
-                if (!EmitTree(cx, cg, pn3))
+                oldflags = bce->flags;
+                bce->flags &= ~TCF_IN_FOR_INIT;
+                if (!EmitTree(cx, bce, pn3))
                     return JS_FALSE;
-                cg->flags |= oldflags & TCF_IN_FOR_INIT;
+                bce->flags |= oldflags & TCF_IN_FOR_INIT;
             }
         }
 
         /*
          * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
          * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
          * a TOK_SEQ node to make the two statements appear as one. Therefore
          * if this declaration is part of a for-in loop head, we do not need to
@@ -4505,312 +4505,311 @@ EmitVariables(JSContext *cx, CodeGenerat
         if (forInVar) {
             JS_ASSERT(pn->pn_count == 1);
             JS_ASSERT(!pn3);
             break;
         }
 
         if (first &&
             !inLetHead &&
-            NewSrcNote2(cx, cg, SRC_DECL,
+            NewSrcNote2(cx, bce, SRC_DECL,
                         (pn->isOp(JSOP_DEFCONST))
                         ? SRC_DECL_CONST
                         : (pn->isOp(JSOP_DEFVAR))
                         ? SRC_DECL_VAR
                         : SRC_DECL_LET) < 0)
         {
             return JS_FALSE;
         }
         if (op == JSOP_ARGUMENTS) {
-            if (Emit1(cx, cg, op) < 0)
+            if (Emit1(cx, bce, op) < 0)
                 return JS_FALSE;
         } else if (!pn2->pn_cookie.isFree()) {
             EMIT_UINT16_IMM_OP(op, atomIndex);
         } else {
             EMIT_INDEX_OP(op, atomIndex);
         }
 
 #if JS_HAS_DESTRUCTURING
     emit_note_pop:
 #endif
-        tmp = CG_OFFSET(cg);
+        tmp = CG_OFFSET(bce);
         if (noteIndex >= 0) {
-            if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
+            if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, tmp-off))
                 return JS_FALSE;
         }
         if (!next)
             break;
         off = tmp;
-        noteIndex = NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
-        if (noteIndex < 0 || Emit1(cx, cg, JSOP_POP) < 0)
+        noteIndex = NewSrcNote2(cx, bce, SRC_PCDELTA, 0);
+        if (noteIndex < 0 || Emit1(cx, bce, JSOP_POP) < 0)
             return JS_FALSE;
     }
 
     /* If this is a let head, emit and return a srcnote on the pop. */
     if (inLetHead) {
-        *headNoteIndex = NewSrcNote(cx, cg, SRC_DECL);
+        *headNoteIndex = NewSrcNote(cx, bce, SRC_DECL);
         if (*headNoteIndex < 0)
             return JS_FALSE;
         if (!(pn->pn_xflags & PNX_POPVAR))
-            return Emit1(cx, cg, JSOP_NOP) >= 0;
-    }
-
-    return !(pn->pn_xflags & PNX_POPVAR) || Emit1(cx, cg, JSOP_POP) >= 0;
+            return Emit1(cx, bce, JSOP_NOP) >= 0;
+    }
+
+    return !(pn->pn_xflags & PNX_POPVAR) || Emit1(cx, bce, JSOP_POP) >= 0;
 }
 
 static bool
-EmitAssignment(JSContext *cx, CodeGenerator *cg, ParseNode *lhs, JSOp op, ParseNode *rhs)
-{
-    ptrdiff_t top = CG_OFFSET(cg);
+EmitAssignment(JSContext *cx, BytecodeEmitter *bce, ParseNode *lhs, JSOp op, ParseNode *rhs)
+{
+    ptrdiff_t top = CG_OFFSET(bce);
 
     /*
      * Check left operand type and generate specialized code for it.
      * Specialize to avoid ECMA "reference type" values on the operand
      * stack, which impose pervasive runtime "GetValue" costs.
      */
     jsatomid atomIndex = (jsatomid) -1;              /* quell GCC overwarning */
     jsbytecode offset = 1;
 
     switch (lhs->getKind()) {
       case TOK_NAME:
-        if (!BindNameToSlot(cx, cg, lhs))
+        if (!BindNameToSlot(cx, bce, lhs))
             return false;
         if (!lhs->pn_cookie.isFree()) {
             atomIndex = lhs->pn_cookie.asInteger();
         } else {
-            if (!cg->makeAtomIndex(lhs->pn_atom, &atomIndex))
+            if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
                 return false;
             if (!lhs->isConst()) {
                 JSOp op = lhs->isOp(JSOP_SETGNAME) ? JSOP_BINDGNAME : JSOP_BINDNAME;
                 EMIT_INDEX_OP(op, atomIndex);
                 offset++;
             }
         }
         break;
       case TOK_DOT:
-        if (!EmitTree(cx, cg, lhs->expr()))
+        if (!EmitTree(cx, bce, lhs->expr()))
             return false;
         offset++;
-        if (!cg->makeAtomIndex(lhs->pn_atom, &atomIndex))
+        if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
             return false;
         break;
       case TOK_LB:
         JS_ASSERT(lhs->isArity(PN_BINARY));
-        if (!EmitTree(cx, cg, lhs->pn_left))
+        if (!EmitTree(cx, bce, lhs->pn_left))
             return false;
-        if (!EmitTree(cx, cg, lhs->pn_right))
+        if (!EmitTree(cx, bce, lhs->pn_right))
             return false;
         offset += 2;
         break;
 #if JS_HAS_DESTRUCTURING
       case TOK_RB:
       case TOK_RC:
         break;
 #endif
       case TOK_LP:
-        if (!EmitTree(cx, cg, lhs))
+        if (!EmitTree(cx, bce, lhs))
             return false;
         offset++;
         break;
 #if JS_HAS_XML_SUPPORT
       case TOK_UNARYOP:
-        JS_ASSERT(!cg->inStrictMode());
+        JS_ASSERT(!bce->inStrictMode());
         JS_ASSERT(lhs->isOp(JSOP_SETXMLNAME));
-
-        if (!EmitTree(cx, cg, lhs->pn_kid))
+        if (!EmitTree(cx, bce, lhs->pn_kid))
             return false;
-        if (Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
+        if (Emit1(cx, bce, JSOP_BINDXMLNAME) < 0)
             return false;
         offset++;
         break;
 #endif
       default:
         JS_ASSERT(0);
     }
 
     if (op != JSOP_NOP) {
         JS_ASSERT(rhs);
         switch (lhs->getKind()) {
           case TOK_NAME:
             if (lhs->isConst()) {
                 if (lhs->isOp(JSOP_CALLEE)) {
-                    if (Emit1(cx, cg, JSOP_CALLEE) < 0)
+                    if (Emit1(cx, bce, JSOP_CALLEE) < 0)
                         return false;
                 } else {
                     EMIT_INDEX_OP(lhs->getOp(), atomIndex);
                 }
             } else if (lhs->isOp(JSOP_SETNAME)) {
-                if (Emit1(cx, cg, JSOP_DUP) < 0)
+                if (Emit1(cx, bce, JSOP_DUP) < 0)
                     return false;
                 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
             } else if (lhs->isOp(JSOP_SETGNAME)) {
-                if (!BindGlobal(cx, cg, lhs, lhs->pn_atom))
+                if (!BindGlobal(cx, bce, lhs, lhs->pn_atom))
                     return false;
-                EmitAtomOp(cx, lhs, JSOP_GETGNAME, cg);
+                EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce);
             } else {
                 EMIT_UINT16_IMM_OP(lhs->isOp(JSOP_SETARG) ? JSOP_GETARG : JSOP_GETLOCAL, atomIndex);
             }
             break;
           case TOK_DOT:
-            if (Emit1(cx, cg, JSOP_DUP) < 0)
+            if (Emit1(cx, bce, JSOP_DUP) < 0)
                 return false;
             if (lhs->pn_atom == cx->runtime->atomState.protoAtom) {
-                if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
+                if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, bce))
                     return false;
-                if (!EmitElemOpBase(cx, cg, JSOP_GETELEM))
+                if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
                     return false;
             } else {
                 bool isLength = (lhs->pn_atom == cx->runtime->atomState.lengthAtom);
                 EMIT_INDEX_OP(isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex);
             }
             break;
           case TOK_LB:
           case TOK_LP:
 #if JS_HAS_XML_SUPPORT
           case TOK_UNARYOP:
 #endif
-            if (Emit1(cx, cg, JSOP_DUP2) < 0)
+            if (Emit1(cx, bce, JSOP_DUP2) < 0)
                 return false;
-            if (!EmitElemOpBase(cx, cg, JSOP_GETELEM))
+            if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
                 return false;
             break;
           default:;
         }
     }
 
     /* Now emit the right operand (it may affect the namespace). */
     if (rhs) {
-        if (!EmitTree(cx, cg, rhs))
+        if (!EmitTree(cx, bce, rhs))
             return false;
     } else {
         /* The value to assign is the next enumeration value in a for-in loop. */
-        if (Emit2(cx, cg, JSOP_ITERNEXT, offset) < 0)
+        if (Emit2(cx, bce, JSOP_ITERNEXT, offset) < 0)
             return false;
     }
 
     /* If += etc., emit the binary operator with a decompiler note. */
     if (op != JSOP_NOP) {
         /*
          * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
          * declared in the current compilation unit, as in this case (just
          * a bit further below) we will avoid emitting the assignment op.
          */
         if (!lhs->isKind(TOK_NAME) || !lhs->isConst()) {
-            if (NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
+            if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0)
                 return false;
         }
-        if (Emit1(cx, cg, op) < 0)
+        if (Emit1(cx, bce, op) < 0)
             return false;
     }
 
     /* Left parts such as a.b.c and a[b].c need a decompiler note. */
     if (!lhs->isKind(TOK_NAME) &&
 #if JS_HAS_DESTRUCTURING
         !lhs->isKind(TOK_RB) &&
         !lhs->isKind(TOK_RC) &&
 #endif
-        NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
+        NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - top) < 0)
     {
         return false;
     }
 
     /* Finally, emit the specialized assignment bytecode. */
     switch (lhs->getKind()) {
       case TOK_NAME:
         if (lhs->isConst()) {
             if (!rhs) {
-                ReportCompileErrorNumber(cx, CG_TS(cg), lhs, JSREPORT_ERROR,
+                ReportCompileErrorNumber(cx, CG_TS(bce), lhs, JSREPORT_ERROR,
                                          JSMSG_BAD_FOR_LEFTSIDE);
                 return false;
             }
             break;
         }
         /* FALL THROUGH */
       case TOK_DOT:
         EMIT_INDEX_OP(lhs->getOp(), atomIndex);
         break;
       case TOK_LB:
       case TOK_LP:
-        if (Emit1(cx, cg, JSOP_SETELEM) < 0)
+        if (Emit1(cx, bce, JSOP_SETELEM) < 0)
             return false;
         break;
 #if JS_HAS_DESTRUCTURING
       case TOK_RB:
       case TOK_RC:
-        if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, lhs))
+        if (!EmitDestructuringOps(cx, bce, JSOP_SETNAME, lhs))
             return false;
         break;
 #endif
 #if JS_HAS_XML_SUPPORT
       case TOK_UNARYOP:
-        JS_ASSERT(!cg->inStrictMode());
-        if (Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
+        JS_ASSERT(!bce->inStrictMode());
+        if (Emit1(cx, bce, JSOP_SETXMLNAME) < 0)
             return false;
         break;
 #endif
       default:
         JS_ASSERT(0);
     }
     return true;
 }
 
 #if defined DEBUG_brendan || defined DEBUG_mrbkap
 static JSBool
-GettableNoteForNextOp(CodeGenerator *cg)
+GettableNoteForNextOp(BytecodeEmitter *bce)
 {
     ptrdiff_t offset, target;
     jssrcnote *sn, *end;
 
     offset = 0;
-    target = CG_OFFSET(cg);
-    for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
+    target = CG_OFFSET(bce);
+    for (sn = CG_NOTES(bce), end = sn + CG_NOTE_COUNT(bce); sn < end;
          sn = SN_NEXT(sn)) {
         if (offset == target && SN_IS_GETTABLE(sn))
             return JS_TRUE;
         offset += SN_DELTA(sn);
     }
     return JS_FALSE;
 }
 #endif
 
 /* Top-level named functions need a nop for decompilation. */
 static JSBool
-EmitFunctionDefNop(JSContext *cx, CodeGenerator *cg, uintN index)
-{
-    return NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
-           Emit1(cx, cg, JSOP_NOP) >= 0;
+EmitFunctionDefNop(JSContext *cx, BytecodeEmitter *bce, uintN index)
+{
+    return NewSrcNote2(cx, bce, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
+           Emit1(cx, bce, JSOP_NOP) >= 0;
 }
 
 static bool
-EmitNewInit(JSContext *cx, CodeGenerator *cg, JSProtoKey key, ParseNode *pn, int sharpnum)
-{
-    if (Emit3(cx, cg, JSOP_NEWINIT, (jsbytecode) key, 0) < 0)
+EmitNewInit(JSContext *cx, BytecodeEmitter *bce, JSProtoKey key, ParseNode *pn, int sharpnum)
+{
+    if (Emit3(cx, bce, JSOP_NEWINIT, (jsbytecode) key, 0) < 0)
         return false;
 #if JS_HAS_SHARP_VARS
-    if (cg->hasSharps()) {
+    if (bce->hasSharps()) {
         if (pn->pn_count != 0)
-            EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
+            EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, bce->sharpSlotBase);
         if (sharpnum >= 0)
-            EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
+            EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, bce->sharpSlotBase, sharpnum);
     } else {
         JS_ASSERT(sharpnum < 0);
     }
 #endif
     return true;
 }
 
 static bool
-EmitEndInit(JSContext *cx, CodeGenerator *cg, uint32 count)
+EmitEndInit(JSContext *cx, BytecodeEmitter *bce, uint32 count)
 {
 #if JS_HAS_SHARP_VARS
     /* Emit an op for sharp array cleanup and decompilation. */
-    if (cg->hasSharps() && count != 0)
-        EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
+    if (bce->hasSharps() && count != 0)
+        EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, bce->sharpSlotBase);
 #endif
-    return Emit1(cx, cg, JSOP_ENDINIT) >= 0;
+    return Emit1(cx, bce, JSOP_ENDINIT) >= 0;
 }
 
 bool
 ParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
 {
     switch (getKind()) {
       case TOK_NUMBER:
         vp->setNumber(pn_dval);
@@ -4895,175 +4894,175 @@ ParseNode::getConstantValue(JSContext *c
       }
       default:
         JS_NOT_REACHED("Unexpected node");
     }
     return false;
 }
 
 static bool
-EmitSingletonInitialiser(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitSingletonInitialiser(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     Value value;
-    if (!pn->getConstantValue(cx, cg->needStrictChecks(), &value))
+    if (!pn->getConstantValue(cx, bce->needStrictChecks(), &value))
         return false;
 
     JS_ASSERT(value.isObject());
-    ObjectBox *objbox = cg->parser->newObjectBox(&value.toObject());
+    ObjectBox *objbox = bce->parser->newObjectBox(&value.toObject());
     if (!objbox)
         return false;
 
-    return EmitObjectOp(cx, objbox, JSOP_OBJECT, cg);
+    return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce);
 }
 
 /* See the SRC_FOR source note offsetBias comments later in this file. */
 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
 
 class EmitLevelManager
 {
-    CodeGenerator *cg;
+    BytecodeEmitter *bce;
   public:
-    EmitLevelManager(CodeGenerator *cg) : cg(cg) { cg->emitLevel++; }
-    ~EmitLevelManager() { cg->emitLevel--; }
+    EmitLevelManager(BytecodeEmitter *bce) : bce(bce) { bce->emitLevel++; }
+    ~EmitLevelManager() { bce->emitLevel--; }
 };
 
 static bool
-EmitCatch(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitCatch(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     ptrdiff_t catchStart, guardJump;
 
     /*
      * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
      * and save the block object atom.
      */
-    StmtInfo *stmt = cg->topStmt;
+    StmtInfo *stmt = bce->topStmt;
     JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
     stmt->type = STMT_CATCH;
     catchStart = stmt->update;
 
     /* Go up one statement info record to the TRY or FINALLY record. */
     stmt = stmt->down;
     JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
 
     /* Pick up the pending exception and bind it to the catch variable. */
-    if (Emit1(cx, cg, JSOP_EXCEPTION) < 0)
+    if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
         return false;
 
     /*
      * Dup the exception object if there is a guard for rethrowing to use
      * it later when rethrowing or in other catches.
      */
-    if (pn->pn_kid2 && Emit1(cx, cg, JSOP_DUP) < 0)
+    if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0)
         return false;
 
     ParseNode *pn2 = pn->pn_kid1;
     switch (pn2->getKind()) {
 #if JS_HAS_DESTRUCTURING
       case TOK_RB:
       case TOK_RC:
-        if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
+        if (!EmitDestructuringOps(cx, bce, JSOP_NOP, pn2))
             return false;
-        if (Emit1(cx, cg, JSOP_POP) < 0)
+        if (Emit1(cx, bce, JSOP_POP) < 0)
             return false;
         break;
 #endif
 
       case TOK_NAME:
         /* Inline and specialize BindNameToSlot for pn2. */
         JS_ASSERT(!pn2->pn_cookie.isFree());
         EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie.asInteger());
         break;
 
       default:
         JS_ASSERT(0);
     }
 
     /* Emit the guard expression, if there is one. */
     if (pn->pn_kid2) {
-        if (!EmitTree(cx, cg, pn->pn_kid2))
+        if (!EmitTree(cx, bce, pn->pn_kid2))
             return false;
-        if (!SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0, CG_OFFSET(cg) - catchStart))
+        if (!SetSrcNoteOffset(cx, bce, CATCHNOTE(*stmt), 0, CG_OFFSET(bce) - catchStart))
             return false;
         /* ifeq <next block> */
-        guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
+        guardJump = EmitJump(cx, bce, JSOP_IFEQ, 0);
         if (guardJump < 0)
             return false;
         GUARDJUMP(*stmt) = guardJump;
 
         /* Pop duplicated exception object as we no longer need it. */
-        if (Emit1(cx, cg, JSOP_POP) < 0)
+        if (Emit1(cx, bce, JSOP_POP) < 0)
             return false;
     }
 
     /* Emit the catch body. */
-    if (!EmitTree(cx, cg, pn->pn_kid3))
+    if (!EmitTree(cx, bce, pn->pn_kid3))
         return false;
 
     /*
      * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
      * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
      */
-    ptrdiff_t off = cg->stackDepth;
-    if (NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
+    ptrdiff_t off = bce->stackDepth;
+    if (NewSrcNote2(cx, bce, SRC_CATCH, off) < 0)
         return false;
     return true;
 }
 
 static bool
-EmitTry(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitTry(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     StmtInfo stmtInfo;
     ptrdiff_t catchJump = -1;
 
     /*
      * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
      * for later fixup.
      *
      * When a finally block is active (STMT_FINALLY in our tree context),
      * non-local jumps (including jumps-over-catches) result in a GOSUB
      * being written into the bytecode stream and fixed-up later (c.f.
      * EmitBackPatchOp and BackPatch).
      */
-    PushStatement(cg, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, CG_OFFSET(cg));
+    PushStatement(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, CG_OFFSET(bce));
 
     /*
      * Since an exception can be thrown at any place inside the try block,
      * we need to restore the stack and the scope chain before we transfer
      * the control to the exception handler.
      *
      * For that we store in a try note associated with the catch or
      * finally block the stack depth upon the try entry. The interpreter
      * uses this depth to properly unwind the stack and the scope chain.
      */
-    intN depth = cg->stackDepth;
+    intN depth = bce->stackDepth;
 
     /* Mark try location for decompilation, then emit try block. */
-    if (Emit1(cx, cg, JSOP_TRY) < 0)
+    if (Emit1(cx, bce, JSOP_TRY) < 0)
         return false;
-    ptrdiff_t tryStart = CG_OFFSET(cg);
-    if (!EmitTree(cx, cg, pn->pn_kid1))
+    ptrdiff_t tryStart = CG_OFFSET(bce);
+    if (!EmitTree(cx, bce, pn->pn_kid1))
         return false;
-    JS_ASSERT(depth == cg->stackDepth);
+    JS_ASSERT(depth == bce->stackDepth);
 
     /* GOSUB to finally, if present. */
     if (pn->pn_kid3) {
-        if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+        if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
             return false;
-        if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo)) < 0)
+        if (EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, &GOSUBS(stmtInfo)) < 0)
             return false;
     }
 
     /* Emit (hidden) jump over catch and/or finally. */
-    if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+    if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
         return false;
-    if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump) < 0)
+    if (EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, &catchJump) < 0)
         return false;
 
-    ptrdiff_t tryEnd = CG_OFFSET(cg);
+    ptrdiff_t tryEnd = CG_OFFSET(bce);
 
     ObjectBox *prevBox = NULL;
     /* If this try has a catch block, emit it. */
     ParseNode *lastCatch = NULL;
     if (ParseNode *pn2 = pn->pn_kid2) {
         uintN count = 0;    /* previous catch block's population */
 
         /*
@@ -5087,86 +5086,86 @@ EmitTry(JSContext *cx, CodeGenerator *cg
          * <offset to next catch block> points to rethrow code.  This
          * code will [gosub] to the finally code if appropriate, and is
          * also used for the catch-all trynote for capturing exceptions
          * thrown from catch{} blocks.
          */
         for (ParseNode *pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
             ptrdiff_t guardJump, catchNote;
 
-            JS_ASSERT(cg->stackDepth == depth);
+            JS_ASSERT(bce->stackDepth == depth);
             guardJump = GUARDJUMP(stmtInfo);
             if (guardJump != -1) {
-                if (EmitKnownBlockChain(cx, cg, prevBox) < 0)
+                if (EmitKnownBlockChain(cx, bce, prevBox) < 0)
                     return false;
-
+            
                 /* Fix up and clean up previous catch block. */
-                CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
+                CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, guardJump);
 
                 /*
                  * Account for JSOP_ENTERBLOCK (whose block object count
                  * is saved below) and pushed exception object that we
                  * still have after the jumping from the previous guard.
                  */
-                cg->stackDepth = depth + count + 1;
+                bce->stackDepth = depth + count + 1;
 
                 /*
                  * Move exception back to cx->exception to prepare for
                  * the next catch. We hide [throwing] from the decompiler
                  * since it compensates for the hidden JSOP_DUP at the
                  * start of the previous guarded catch.
                  */
-                if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
-                    Emit1(cx, cg, JSOP_THROWING) < 0) {
+                if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0 ||
+                    Emit1(cx, bce, JSOP_THROWING) < 0) {
                     return false;
                 }
-                if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+                if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                     return false;
-                if (!EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, prevBox))
+                if (!EmitLeaveBlock(cx, bce, JSOP_LEAVEBLOCK, prevBox))
                     return false;
-                JS_ASSERT(cg->stackDepth == depth);
+                JS_ASSERT(bce->stackDepth == depth);
             }
 
             /*
              * Annotate the JSOP_ENTERBLOCK that's about to be generated
              * by the call to EmitTree immediately below.  Save this
              * source note's index in stmtInfo for use by the TOK_CATCH:
              * case, where the length of the catch guard is set as the
              * note's offset.
              */
-            catchNote = NewSrcNote2(cx, cg, SRC_CATCH, 0);
+            catchNote = NewSrcNote2(cx, bce, SRC_CATCH, 0);
             if (catchNote < 0)
                 return false;
             CATCHNOTE(stmtInfo) = catchNote;
 
             /*
              * Emit the lexical scope and catch body.  Save the catch's
              * block object population via count, for use when targeting
              * guardJump at the next catch (the guard mismatch case).
              */
             JS_ASSERT(pn3->isKind(TOK_LEXICALSCOPE));
             count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
             prevBox = pn3->pn_objbox;
-            if (!EmitTree(cx, cg, pn3))
+            if (!EmitTree(cx, bce, pn3))
                 return false;
 
             /* gosub <finally>, if required */
             if (pn->pn_kid3) {
-                if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo)) < 0)
+                if (EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, &GOSUBS(stmtInfo)) < 0)
                     return false;
-                JS_ASSERT(cg->stackDepth == depth);
+                JS_ASSERT(bce->stackDepth == depth);
             }
 
             /*
              * Jump over the remaining catch blocks.  This will get fixed
              * up to jump to after catch/finally.
              */
-            if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+            if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
                 return false;
-            if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump) < 0)
+            if (EmitBackPatchOp(cx, bce, JSOP_BACKPATCH, &catchJump) < 0)
                 return false;
 
             /*
              * Save a pointer to the last catch node to handle try-finally
              * and try-catch(guard)-finally special cases.
              */
             lastCatch = pn3->expr();
         }
@@ -5174,182 +5173,182 @@ EmitTry(JSContext *cx, CodeGenerator *cg
 
     /*
      * Last catch guard jumps to the rethrow code sequence if none of the
      * guards match. Target guardJump at the beginning of the rethrow
      * sequence, just in case a guard expression throws and leaves the
      * stack unbalanced.
      */
     if (lastCatch && lastCatch->pn_kid2) {
-        if (EmitKnownBlockChain(cx, cg, prevBox) < 0)
+        if (EmitKnownBlockChain(cx, bce, prevBox) < 0)
             return false;
-
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
+        
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, GUARDJUMP(stmtInfo));
 
         /* Sync the stack to take into account pushed exception. */
-        JS_ASSERT(cg->stackDepth == depth);
-        cg->stackDepth = depth + 1;
+        JS_ASSERT(bce->stackDepth == depth);
+        bce->stackDepth = depth + 1;
 
         /*
          * Rethrow the exception, delegating executing of finally if any
          * to the exception handler.
          */
-        if (NewSrcNote(cx, cg, SRC_HIDDEN) < 0 || Emit1(cx, cg, JSOP_THROW) < 0)
+        if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0 || Emit1(cx, bce, JSOP_THROW) < 0)
             return false;
 
-        if (EmitBlockChain(cx, cg) < 0)
+        if (EmitBlockChain(cx, bce) < 0)
             return false;
     }
 
-    JS_ASSERT(cg->stackDepth == depth);
+    JS_ASSERT(bce->stackDepth == depth);
 
     /* Emit finally handler if any. */
     ptrdiff_t finallyStart = 0;   /* to quell GCC uninitialized warnings */
     if (pn->pn_kid3) {
         /*
          * Fix up the gosubs that might have been emitted before non-local
          * jumps to the finally code.
          */
-        if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
+        if (!BackPatch(cx, bce, GOSUBS(stmtInfo), CG_NEXT(bce), JSOP_GOSUB))
             return false;
 
-        finallyStart = CG_OFFSET(cg);
+        finallyStart = CG_OFFSET(bce);
 
         /* Indicate that we're emitting a subroutine body. */
         stmtInfo.type = STMT_SUBROUTINE;
-        if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
+        if (!UpdateLineNumberNotes(cx, bce, pn->pn_kid3->pn_pos.begin.lineno))
             return false;
-        if (Emit1(cx, cg, JSOP_FINALLY) < 0 ||
-            !EmitTree(cx, cg, pn->pn_kid3) ||
-            Emit1(cx, cg, JSOP_RETSUB) < 0)
+        if (Emit1(cx, bce, JSOP_FINALLY) < 0 ||
+            !EmitTree(cx, bce, pn->pn_kid3) ||
+            Emit1(cx, bce, JSOP_RETSUB) < 0)
         {
             return false;
         }
-        JS_ASSERT(cg->stackDepth == depth);
-    }
-    if (!PopStatementCG(cx, cg))
+        JS_ASSERT(bce->stackDepth == depth);
+    }
+    if (!PopStatementCG(cx, bce))
         return false;
 
-    if (NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 || Emit1(cx, cg, JSOP_NOP) < 0)
+    if (NewSrcNote(cx, bce, SRC_ENDBRACE) < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
         return false;
 
     /* Fix up the end-of-try/catch jumps to come here. */
-    if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
+    if (!BackPatch(cx, bce, catchJump, CG_NEXT(bce), JSOP_GOTO))
         return false;
 
     /*
      * Add the try note last, to let post-order give us the right ordering
      * (first to last for a given nesting level, inner to outer by level).
      */
-    if (pn->pn_kid2 && !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd))
+    if (pn->pn_kid2 && !NewTryNote(cx, bce, JSTRY_CATCH, depth, tryStart, tryEnd))
         return false;
 
     /*
      * If we've got a finally, mark try+catch region with additional
      * trynote to catch exceptions (re)thrown from a catch block or
      * for the try{}finally{} case.
      */
-    if (pn->pn_kid3 && !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart))
+    if (pn->pn_kid3 && !NewTryNote(cx, bce, JSTRY_FINALLY, depth, tryStart, finallyStart))
         return false;
 
     return true;
 }
 
 static bool
-EmitIf(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+EmitIf(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     StmtInfo stmtInfo;
 
     /* Initialize so we can detect else-if chains and avoid recursion. */
     stmtInfo.type = STMT_IF;
     ptrdiff_t beq = -1;
     ptrdiff_t jmp = -1;
     ptrdiff_t noteIndex = -1;
 
   if_again:
     /* Emit code for the condition before pushing stmtInfo. */
-    if (!EmitTree(cx, cg, pn->pn_kid1))
+    if (!EmitTree(cx, bce, pn->pn_kid1))
         return JS_FALSE;
-    ptrdiff_t top = CG_OFFSET(cg);
+    ptrdiff_t top = CG_OFFSET(bce);
     if (stmtInfo.type == STMT_IF) {
-        PushStatement(cg, &stmtInfo, STMT_IF, top);
+        PushStatement(bce, &stmtInfo, STMT_IF, top);
     } else {
         /*
          * We came here from the goto further below that detects else-if
          * chains, so we must mutate stmtInfo back into a STMT_IF record.
          * Also (see below for why) we need a note offset for SRC_IF_ELSE
          * to help the decompiler.  Actually, we need two offsets, one for
          * decompiling any else clause and the second for decompiling an
          * else-if chain without bracing, overindenting, or incorrectly
          * scoping let declarations.
          */
         JS_ASSERT(stmtInfo.type == STMT_ELSE);
         stmtInfo.type = STMT_IF;
         stmtInfo.update = top;
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
             return JS_FALSE;
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 1, top - beq))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 1, top - beq))
             return JS_FALSE;
     }
 
     /* Emit an annotated branch-if-false around the then part. */
     ParseNode *pn3 = pn->pn_kid3;
-    noteIndex = NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
+    noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF);
     if (noteIndex < 0)
         return JS_FALSE;
-    beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
+    beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
     if (beq < 0)
         return JS_FALSE;
 
     /* Emit code for the then and optional else parts. */
-    if (!EmitTree(cx, cg, pn->pn_kid2))
+    if (!EmitTree(cx, bce, pn->pn_kid2))
         return JS_FALSE;
     if (pn3) {
         /* Modify stmtInfo so we know we're in the else part. */
         stmtInfo.type = STMT_ELSE;
 
         /*
          * Emit a JSOP_BACKPATCH op to jump from the end of our then part
          * around the else part.  The PopStatementCG call at the bottom of this
          * function will fix up the backpatch chain linked from
          * stmtInfo.breaks.
          */
-        jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks);
+        jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks);
         if (jmp < 0)
             return JS_FALSE;
 
         /* Ensure the branch-if-false comes here, then emit the else. */
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, beq);
         if (pn3->isKind(TOK_IF)) {
             pn = pn3;
             goto if_again;
         }
 
-        if (!EmitTree(cx, cg, pn3))
+        if (!EmitTree(cx, bce, pn3))
             return JS_FALSE;
 
         /*
          * Annotate SRC_IF_ELSE with the offset from branch to jump, for
          * the decompiler's benefit.  We can't just "back up" from the pc
          * of the else clause, because we don't know whether an extended
          * jump was required to leap from the end of the then clause over
          * the else clause.
          */
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
             return JS_FALSE;
     } else {
         /* No else part, fixup the branch-if-false to come here. */
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
-    }
-    return PopStatementCG(cx, cg);
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, beq);
+    }
+    return PopStatementCG(cx, bce);
 }
 
 #if JS_HAS_BLOCK_SCOPE
 static bool
-EmitLet(JSContext *cx, CodeGenerator *cg, ParseNode *&pn)
+EmitLet(JSContext *cx, BytecodeEmitter *bce, ParseNode *&pn)
 {
     /*
      * pn represents one of these syntactic constructs:
      *   let-expression:                        (let (x = y) EXPR)
      *   let-statement:                         let (x = y) { ... }
      *   let-declaration in statement context:  let x = y;
      *   let-declaration in for-loop head:      for (let ...) ...
      *
@@ -5371,678 +5370,678 @@ EmitLet(JSContext *cx, CodeGenerator *cg
      * Use TempPopScope to evaluate the expressions in the enclosing scope.
      * This also causes the initializing assignments to be emitted in the
      * enclosing scope, but the assignment opcodes emitted here
      * (essentially just setlocal, though destructuring assignment uses
      * other additional opcodes) do not care about the block chain.
      */
     JS_ASSERT(pn->isArity(PN_LIST));
     TempPopScope tps;
-    bool popScope = pn2 || (cg->flags & TCF_IN_FOR_INIT);
-    if (popScope && !tps.popBlock(cx, cg))
+    bool popScope = pn2 || (bce->flags & TCF_IN_FOR_INIT);
+    if (popScope && !tps.popBlock(cx, bce))
         return false;
     ptrdiff_t noteIndex;
-    if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
+    if (!EmitVariables(cx, bce, pn, pn2 != NULL, &noteIndex))
         return false;
-    ptrdiff_t tmp = CG_OFFSET(cg);
-    if (popScope && !tps.repushBlock(cx, cg))
+    ptrdiff_t tmp = CG_OFFSET(bce);
+    if (popScope && !tps.repushBlock(cx, bce))
         return false;
 
     /* Thus non-null pn2 is the body of the let block or expression. */
-    if (pn2 && !EmitTree(cx, cg, pn2))
+    if (pn2 && !EmitTree(cx, bce, pn2))
         return false;
 
-    if (noteIndex >= 0 && !SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - tmp))
+    if (noteIndex >= 0 && !SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, CG_OFFSET(bce) - tmp))
         return false;
 
     return true;
 }
 #endif
 
 #if JS_HAS_XML_SUPPORT
 static bool
-EmitXMLTag(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
-{
-    JS_ASSERT(!cg->inStrictMode());
-
-    if (Emit1(cx, cg, JSOP_STARTXML) < 0)
+EmitXMLTag(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
+{
+    JS_ASSERT(!bce->inStrictMode());
+
+    if (Emit1(cx, bce, JSOP_STARTXML) < 0)
         return false;
 
     {
         jsatomid index;
         JSAtom *tagAtom = (pn->isKind(TOK_XMLETAGO))
                           ? cx->runtime->atomState.etagoAtom
                           : cx->runtime->atomState.stagoAtom;
-        if (!cg->makeAtomIndex(tagAtom, &index))
+        if (!bce->makeAtomIndex(tagAtom, &index))
             return false;
         EMIT_INDEX_OP(JSOP_STRING, index);
     }
 
     JS_ASSERT(pn->pn_count != 0);
     ParseNode *pn2 = pn->pn_head;
-    if (pn2->isKind(TOK_LC) && Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
+    if (pn2->isKind(TOK_LC) && Emit1(cx, bce, JSOP_STARTXMLEXPR) < 0)
         return false;
-    if (!EmitTree(cx, cg, pn2))
+    if (!EmitTree(cx, bce, pn2))
         return false;
-    if (Emit1(cx, cg, JSOP_ADD) < 0)
+    if (Emit1(cx, bce, JSOP_ADD) < 0)
         return false;
 
     uint32 i;
     for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
-        if (pn2->isKind(TOK_LC) && Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
+        if (pn2->isKind(TOK_LC) && Emit1(cx, bce, JSOP_STARTXMLEXPR) < 0)
             return false;
-        if (!EmitTree(cx, cg, pn2))
+        if (!EmitTree(cx, bce, pn2))
             return false;
         if ((i & 1) && pn2->isKind(TOK_LC)) {
-            if (Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
+            if (Emit1(cx, bce, JSOP_TOATTRVAL) < 0)
                 return false;
         }
-        if (Emit1(cx, cg, (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0)
+        if (Emit1(cx, bce, (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0)
             return false;
     }
 
     {
         jsatomid index;
         JSAtom *tmp = (pn->isKind(TOK_XMLPTAGC)) ? cx->runtime->atomState.ptagcAtom
                                                  : cx->runtime->atomState.tagcAtom;
-        if (!cg->makeAtomIndex(tmp, &index))
+        if (!bce->makeAtomIndex(tmp, &index))
             return false;
         EMIT_INDEX_OP(JSOP_STRING, index);
     }
-    if (Emit1(cx, cg, JSOP_ADD) < 0)
+    if (Emit1(cx, bce, JSOP_ADD) < 0)
         return false;
 
-    if ((pn->pn_xflags & PNX_XMLROOT) && Emit1(cx, cg, pn->getOp()) < 0)
+    if ((pn->pn_xflags & PNX_XMLROOT) && Emit1(cx, bce, pn->getOp()) < 0)
         return false;
 
     return true;
 }
 
 static bool
-EmitXMLProcessingInstruction(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
-{
-    JS_ASSERT(!cg->inStrictMode());
+EmitXMLProcessingInstruction(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
+{
+    JS_ASSERT(!bce->inStrictMode());
 
     jsatomid index;
-    if (!cg->makeAtomIndex(pn->pn_pidata, &index))
+    if (!bce->makeAtomIndex(pn->pn_pidata, &index))
         return false;
-    if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
+    if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, bce))
         return false;
-    if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
+    if (!EmitAtomOp(cx, pn, JSOP_XMLPI, bce))
         return false;
     return true;
 }
 #endif
 
 static bool
-EmitLexicalScope(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSBool &ok)
+EmitLexicalScope(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSBool &ok)
 {
     StmtInfo stmtInfo;
     StmtInfo *stmt;
     ObjectBox *objbox = pn->pn_objbox;
-    PushBlockScope(cg, &stmtInfo, objbox, CG_OFFSET(cg));
+    PushBlockScope(bce, &stmtInfo, objbox, CG_OFFSET(bce));
 
     /*
      * If this lexical scope is not for a catch block, let block or let
      * expression, or any kind of for loop (where the scope starts in the
      * head after the first part if for (;;), else in the body if for-in);
      * and if our container is top-level but not a function body, or else
      * a block statement; then emit a SRC_BRACE note.  All other container
      * statements get braces by default from the decompiler.
      */
     ptrdiff_t noteIndex = -1;
     TokenKind type = pn->expr()->getKind();
     if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
         (!(stmt = stmtInfo.down)
-         ? !cg->inFunction()
+         ? !bce->inFunction()
          : stmt->type == STMT_BLOCK)) {
 #if defined DEBUG_brendan || defined DEBUG_mrbkap
         /* There must be no source note already output for the next op. */
-        JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
-                  CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
-                  !GettableNoteForNextOp(cg));
+        JS_ASSERT(CG_NOTE_COUNT(bce) == 0 ||
+                  CG_LAST_NOTE_OFFSET(bce) != CG_OFFSET(bce) ||
+                  !GettableNoteForNextOp(bce));
 #endif
-        noteIndex = NewSrcNote2(cx, cg, SRC_BRACE, 0);
+        noteIndex = NewSrcNote2(cx, bce, SRC_BRACE, 0);
         if (noteIndex < 0)
             return false;
     }
 
-    ptrdiff_t top = CG_OFFSET(cg);
-    if (!EmitEnterBlock(cx, pn, cg))
+    ptrdiff_t top = CG_OFFSET(bce);
+    if (!EmitEnterBlock(cx, pn, bce))
         return false;
 
-    if (!EmitTree(cx, cg, pn->pn_expr))
+    if (!EmitTree(cx, bce, pn->pn_expr))
         return false;
 
     JSOp op = pn->getOp();
     if (op == JSOP_LEAVEBLOCKEXPR) {
-        if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
+        if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - top) < 0)
             return false;
     } else {
-        if (noteIndex >= 0 && !SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - top))
+        if (noteIndex >= 0 && !SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, CG_OFFSET(bce) - top))
             return false;
     }
 
     /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
-    if (!EmitLeaveBlock(cx, cg, op, objbox))
+    if (!EmitLeaveBlock(cx, bce, op, objbox))
         return false;
 
-    ok = PopStatementCG(cx, cg);
+    ok = PopStatementCG(cx, bce);
     return true;
 }
 
 static bool
-EmitWith(JSContext *cx, CodeGenerator *cg, ParseNode *pn, JSBool &ok)
+EmitWith(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSBool &ok)
 {
     StmtInfo stmtInfo;
-    if (!EmitTree(cx, cg, pn->pn_left))
+    if (!EmitTree(cx, bce, pn->pn_left))
         return false;
-    PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
-    if (Emit1(cx, cg, JSOP_ENTERWITH) < 0)
+    PushStatement(bce, &stmtInfo, STMT_WITH, CG_OFFSET(bce));
+    if (Emit1(cx, bce, JSOP_ENTERWITH) < 0)
         return false;
 
     /* Make blockChain determination quicker. */
-    if (EmitBlockChain(cx, cg) < 0)
+    if (EmitBlockChain(cx, bce) < 0)
         return false;
-    if (!EmitTree(cx, cg, pn->pn_right))
+    if (!EmitTree(cx, bce, pn->pn_right))
         return false;
-    if (Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
+    if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
         return false;
-    ok = PopStatementCG(cx, cg);
+    ok = PopStatementCG(cx, bce);
     return true;
 }
 
 static bool
-EmitForIn(JSContext *cx, CodeGenerator *cg, ParseNode *pn, ptrdiff_t top)
+EmitForIn(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
 {
     StmtInfo stmtInfo;
-    PushStatement(cg, &stmtInfo, STMT_FOR_IN_LOOP, top);
+    PushStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top);
 
     ParseNode *forHead = pn->pn_left;
     ParseNode *forBody = pn->pn_right;
 
     /*
      * If the left part is 'var x', emit code to define x if necessary
      * using a prolog opcode, but do not emit a pop. If the left part
      * was originally 'var x = i', the parser will have rewritten it;
      * see Parser::forStatement. 'for (let x = i in o)' is mercifully
      * banned.
      */
     bool forLet = false;
     if (ParseNode *decl = forHead->pn_kid1) {
         JS_ASSERT(TokenKindIsDecl(decl->getKind()));
         forLet = decl->isKind(TOK_LET);
-        cg->flags |= TCF_IN_FOR_INIT;
-        if (!EmitTree(cx, cg, decl))
+        bce->flags |= TCF_IN_FOR_INIT;
+        if (!EmitTree(cx, bce, decl))
             return false;
-        cg->flags &= ~TCF_IN_FOR_INIT;
+        bce->flags &= ~TCF_IN_FOR_INIT;
     }
 
     /* Compile the object expression to the right of 'in'. */
     {
         TempPopScope tps;
-        if (forLet && !tps.popBlock(cx, cg))
+        if (forLet && !tps.popBlock(cx, bce))
             return false;
-        if (!EmitTree(cx, cg, forHead->pn_kid3))
+        if (!EmitTree(cx, bce, forHead->pn_kid3))
             return false;
-        if (forLet && !tps.repushBlock(cx, cg))
+        if (forLet && !tps.repushBlock(cx, bce))
             return false;
     }
 
     /*
      * Emit a bytecode to convert top of stack value to the iterator
      * object depending on the loop variant (for-in, for-each-in, or
      * destructuring for-in).
      */
     JS_ASSERT(pn->isOp(JSOP_ITER));
-    if (Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
+    if (Emit2(cx, bce, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
         return false;
 
     /* Annotate so the decompiler can find the loop-closing jump. */
-    intN noteIndex = NewSrcNote(cx, cg, SRC_FOR_IN);
+    intN noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN);
     if (noteIndex < 0)
         return false;
 
     /*
      * Jump down to the loop condition to minimize overhead assuming at
      * least one iteration, as the other loop forms do.
      */
-    ptrdiff_t jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
+    ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
     if (jmp < 0)
         return false;
 
-    intN noteIndex2 = NewSrcNote(cx, cg, SRC_TRACE);
+    intN noteIndex2 = NewSrcNote(cx, bce, SRC_TRACE);
     if (noteIndex2 < 0)
         return false;
 
-    top = CG_OFFSET(cg);
+    top = CG_OFFSET(bce);
     SET_STATEMENT_TOP(&stmtInfo, top);
-    if (EmitTraceOp(cx, cg, NULL) < 0)
+    if (EmitTraceOp(cx, bce, NULL) < 0)
         return false;
 
 #ifdef DEBUG
-    intN loopDepth = cg->stackDepth;
+    intN loopDepth = bce->stackDepth;
 #endif
 
     /*
      * Emit code to get the next enumeration value and assign it to the
      * left hand side. The JSOP_POP after this assignment is annotated
      * so that the decompiler can distinguish 'for (x in y)' from
      * 'for (var x in y)'.
      */
-    if (!EmitAssignment(cx, cg, forHead->pn_kid2, JSOP_NOP, NULL))
+    if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, NULL))
         return false;
-    ptrdiff_t tmp2 = CG_OFFSET(cg);
-    if (forHead->pn_kid1 && NewSrcNote2(cx, cg, SRC_DECL,
+    ptrdiff_t tmp2 = CG_OFFSET(bce);
+    if (forHead->pn_kid1 && NewSrcNote2(cx, bce, SRC_DECL,
                                         (forHead->pn_kid1->isOp(JSOP_DEFVAR))
                                         ? SRC_DECL_VAR
                                         : SRC_DECL_LET) < 0) {
         return false;
     }
-    if (Emit1(cx, cg, JSOP_POP) < 0)
+    if (Emit1(cx, bce, JSOP_POP) < 0)
         return false;
 
     /* The stack should be balanced around the assignment opcode sequence. */
-    JS_ASSERT(cg->stackDepth == loopDepth);
+    JS_ASSERT(bce->stackDepth == loopDepth);
 
     /* Emit code for the loop body. */
-    if (!EmitTree(cx, cg, forBody))
+    if (!EmitTree(cx, bce, forBody))
         return false;
 
     /* Set loop and enclosing "update" offsets, for continue. */
     StmtInfo *stmt = &stmtInfo;
     do {
-        stmt->update = CG_OFFSET(cg);
+        stmt->update = CG_OFFSET(bce);
     } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
 
     /*
      * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
      */
-    CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
-    if (Emit1(cx, cg, JSOP_MOREITER) < 0)
+    CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, jmp);
+    if (Emit1(cx, bce, JSOP_MOREITER) < 0)
         return false;
-    ptrdiff_t beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
+    ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - CG_OFFSET(bce));
     if (beq < 0)
         return false;
 
     /*
      * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
      * note gets bigger.
      */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex2, 0, beq - top))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex2, 0, beq - top))
         return false;
     /* Set the first srcnote offset so we can find the start of the loop body. */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp2 - jmp))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, tmp2 - jmp))
         return false;
     /* Set the second srcnote offset so we can find the closing jump. */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 1, beq - jmp))
         return false;
 
     /* Now fixup all breaks and continues (before the JSOP_ENDITER). */
-    if (!PopStatementCG(cx, cg))
+    if (!PopStatementCG(cx, bce))
         return false;
 
-    if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)))
+    if (!NewTryNote(cx, bce, JSTRY_ITER, bce->stackDepth, top, CG_OFFSET(bce)))
         return false;
 
-    return Emit1(cx, cg, JSOP_ENDITER) >= 0;
+    return Emit1(cx, bce, JSOP_ENDITER) >= 0;
 }
 
 static bool
-EmitNormalFor(JSContext *cx, CodeGenerator *cg, ParseNode *pn, ptrdiff_t top)
+EmitNormalFor(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
 {
     StmtInfo stmtInfo;
-    PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
+    PushStatement(bce, &stmtInfo, STMT_FOR_LOOP, top);
 
     ParseNode *forHead = pn->pn_left;
     ParseNode *forBody = pn->pn_right;
 
     /* C-style for (init; cond; update) ... loop. */
     JSOp op = JSOP_POP;
     ParseNode *pn3 = forHead->pn_kid1;
     if (!pn3) {
         /* No initializer: emit an annotated nop for the decompiler. */
         op = JSOP_NOP;
     } else {
-        cg->flags |= TCF_IN_FOR_INIT;
+        bce->flags |= TCF_IN_FOR_INIT;
 #if JS_HAS_DESTRUCTURING
         if (pn3->isKind(TOK_ASSIGN) &&
-            !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
+            !MaybeEmitGroupAssignment(cx, bce, op, pn3, &op)) {
             return false;
         }
 #endif
         if (op == JSOP_POP) {
-            if (!EmitTree(cx, cg, pn3))
+            if (!EmitTree(cx, bce, pn3))
                 return false;
             if (TokenKindIsDecl(pn3->getKind())) {
                 /*
                  * Check whether a destructuring-initialized var decl
                  * was optimized to a group assignment.  If so, we do
                  * not need to emit a pop below, so switch to a nop,
                  * just for the decompiler.
                  */
                 JS_ASSERT(pn3->isArity(PN_LIST));
                 if (pn3->pn_xflags & PNX_GROUPINIT)
                     op = JSOP_NOP;
             }
         }
-        cg->flags &= ~TCF_IN_FOR_INIT;
+        bce->flags &= ~TCF_IN_FOR_INIT;
     }
 
     /*
      * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
      * Use tmp to hold the biased srcnote "top" offset, which differs
      * from the top local variable by the length of the JSOP_GOTO{,X}
      * emitted in between tmp and top if this loop has a condition.
      */
-    intN noteIndex = NewSrcNote(cx, cg, SRC_FOR);
-    if (noteIndex < 0 || Emit1(cx, cg, op) < 0)
+    intN noteIndex = NewSrcNote(cx, bce, SRC_FOR);
+    if (noteIndex < 0 || Emit1(cx, bce, op) < 0)
         return false;
-    ptrdiff_t tmp = CG_OFFSET(cg);
+    ptrdiff_t tmp = CG_OFFSET(bce);
 
     ptrdiff_t jmp = -1;
     if (forHead->pn_kid2) {
         /* Goto the loop condition, which branches back to iterate. */
-        jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
+        jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
         if (jmp < 0)
             return false;
     }
 
-    top = CG_OFFSET(cg);
+    top = CG_OFFSET(bce);
     SET_STATEMENT_TOP(&stmtInfo, top);
 
-    intN noteIndex2 = NewSrcNote(cx, cg, SRC_TRACE);
+    intN noteIndex2 = NewSrcNote(cx, bce, SRC_TRACE);
     if (noteIndex2 < 0)
         return false;
 
     /* Emit code for the loop body. */
-    if (EmitTraceOp(cx, cg, forBody) < 0)
+    if (EmitTraceOp(cx, bce, forBody) < 0)
         return false;
-    if (!EmitTree(cx, cg, forBody))
+    if (!EmitTree(cx, bce, forBody))
         return false;
 
     /* Set the second note offset so we can find the update part. */
     JS_ASSERT(noteIndex != -1);
-    ptrdiff_t tmp2 = CG_OFFSET(cg);
+    ptrdiff_t tmp2 = CG_OFFSET(bce);
 
     /* Set loop and enclosing "update" offsets, for continue. */
     StmtInfo *stmt = &stmtInfo;
     do {
-        stmt->update = CG_OFFSET(cg);
+        stmt->update = CG_OFFSET(bce);
     } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
 
     /* Check for update code to do before the condition (if any). */
     pn3 = forHead->pn_kid3;
     if (pn3) {
         op = JSOP_POP;
 #if JS_HAS_DESTRUCTURING
         if (pn3->isKind(TOK_ASSIGN) &&
-            !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
+            !MaybeEmitGroupAssignment(cx, bce, op, pn3, &op)) {
             return false;
         }
 #endif
-        if (op == JSOP_POP && !EmitTree(cx, cg, pn3))
+        if (op == JSOP_POP && !EmitTree(cx, bce, pn3))
             return false;
 
         /* Always emit the POP or NOP, to help the decompiler. */
-        if (Emit1(cx, cg, op) < 0)
+        if (Emit1(cx, bce, op) < 0)
             return false;
 
         /* Restore the absolute line number for source note readers. */
         ptrdiff_t lineno = pn->pn_pos.end.lineno;
-        if (CG_CURRENT_LINE(cg) != (uintN) lineno) {
-            if (NewSrcNote2(cx, cg, SRC_SETLINE, lineno) < 0)
+        if (CG_CURRENT_LINE(bce) != (uintN) lineno) {
+            if (NewSrcNote2(cx, bce, SRC_SETLINE, lineno) < 0)
                 return false;
-            CG_CURRENT_LINE(cg) = (uintN) lineno;
-        }
-    }
-
-    ptrdiff_t tmp3 = CG_OFFSET(cg);
+            CG_CURRENT_LINE(bce) = (uintN) lineno;
+        }
+    }
+
+    ptrdiff_t tmp3 = CG_OFFSET(bce);
 
     if (forHead->pn_kid2) {
         /* Fix up the goto from top to target the loop condition. */
         JS_ASSERT(jmp >= 0);
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
-
-        if (!EmitTree(cx, cg, forHead->pn_kid2))
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, jmp);
+
+        if (!EmitTree(cx, bce, forHead->pn_kid2))
             return false;
     }
 
     /*
      * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
      * note gets bigger.
      */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex2, 0, CG_OFFSET(cg) - top))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex2, 0, CG_OFFSET(bce) - top))
         return false;
     /* Set the first note offset so we can find the loop condition. */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp3 - tmp))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, tmp3 - tmp))
         return false;
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, tmp2 - tmp))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 1, tmp2 - tmp))
         return false;
     /* The third note offset helps us find the loop-closing jump. */
-    if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2, CG_OFFSET(cg) - tmp))
+    if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 2, CG_OFFSET(bce) - tmp))
         return false;
 
     /* If no loop condition, just emit a loop-closing jump. */
     op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO;
-    if (EmitJump(cx, cg, op, top - CG_OFFSET(cg)) < 0)
+    if (EmitJump(cx, bce, op, top - CG_OFFSET(bce)) < 0)
         return false;
 
     /* Now fixup all breaks and continues. */
-    return PopStatementCG(cx, cg);
+    return PopStatementCG(cx, bce);
 }
 
 static inline bool
-EmitFor(JSContext *cx, CodeGenerator *cg, ParseNode *pn, ptrdiff_t top)
+EmitFor(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
 {
     return pn->pn_left->isKind(TOK_IN)
-           ? EmitForIn(cx, cg, pn, top)
-           : EmitNormalFor(cx, cg, pn, top);
+           ? EmitForIn(cx, bce, pn, top)
+           : EmitNormalFor(cx, bce, pn, top);
 }
 
 JSBool
-frontend::EmitTree(JSContext *cx, CodeGenerator *cg, ParseNode *pn)
+frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
 {
     JSBool useful, wantval;
     StmtInfo stmtInfo;
     StmtInfo *stmt;
     ptrdiff_t top, off, tmp, beq, jmp;
     ParseNode *pn2, *pn3;
     JSAtom *atom;
     jsatomid atomIndex;
     uintN index;
     ptrdiff_t noteIndex, noteIndex2;
     SrcNoteType noteType;
     jsbytecode *pc;
     JSOp op;
     uint32 argc;
-    EmitLevelManager elm(cg);
+    EmitLevelManager elm(bce);
 #if JS_HAS_SHARP_VARS
     jsint sharpnum;
 #endif
 
     JS_CHECK_RECURSION(cx, return JS_FALSE);
 
     JSBool ok = true;
-    pn->pn_offset = top = CG_OFFSET(cg);
+    pn->pn_offset = top = CG_OFFSET(bce);
 
     /* Emit notes to tell the current bytecode's source line number. */
-    UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
+    UPDATE_LINE_NUMBER_NOTES(cx, bce, pn->pn_pos.begin.lineno);
 
     switch (pn->getKind()) {
       case TOK_FUNCTION:
       {
         JSFunction *fun;
         uintN slot;
 
 #if JS_HAS_XML_SUPPORT
         if (pn->isArity(PN_NULLARY)) {
-            if (Emit1(cx, cg, JSOP_GETFUNNS) < 0)
+            if (Emit1(cx, bce, JSOP_GETFUNNS) < 0)
                 return JS_FALSE;
             break;
         }
 #endif
 
         fun = pn->pn_funbox->function();
         JS_ASSERT(fun->isInterpreted());
         if (fun->script()) {
             /*
              * This second pass is needed to emit JSOP_NOP with a source note
              * for the already-emitted function definition prolog opcode. See
              * comments in the TOK_LC case.
              */
             JS_ASSERT(pn->isOp(JSOP_NOP));
-            JS_ASSERT(cg->inFunction());
-            if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
+            JS_ASSERT(bce->inFunction());
+            if (!EmitFunctionDefNop(cx, bce, pn->pn_index))
                 return JS_FALSE;
             break;
         }
 
         JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
                      fun->kind() == JSFUN_INTERPRETED);
 
         /* Generate code for the function's body. */
-        CodeGenerator *cg2 = cx->new_<CodeGenerator>(cg->parser, pn->pn_pos.begin.lineno);
-        if (!cg2) {
+        BytecodeEmitter *bce2 = cx->new_<BytecodeEmitter>(bce->parser, pn->pn_pos.begin.lineno);
+        if (!bce2) {
             js_ReportOutOfMemory(cx);
             return JS_FALSE;
         }
-        if (!cg2->init(cx))
+        if (!bce2->init(cx))
             return JS_FALSE;
 
-        cg2->flags = pn->pn_funbox->tcflags | TCF_COMPILING | TCF_IN_FUNCTION |
-                     (cg->flags & TCF_FUN_MIGHT_ALIAS_LOCALS);
-        cg2->bindings.transfer(cx, &pn->pn_funbox->bindings);
+        bce2->flags = pn->pn_funbox->tcflags | TCF_COMPILING | TCF_IN_FUNCTION |
+                     (bce->flags & TCF_FUN_MIGHT_ALIAS_LOCALS);
+        bce2->bindings.transfer(cx, &pn->pn_funbox->bindings);
 #if JS_HAS_SHARP_VARS
-        if (cg2->flags & TCF_HAS_SHARPS) {
-            cg2->sharpSlotBase = cg2->bindings.sharpSlotBase(cx);
-            if (cg2->sharpSlotBase < 0)
+        if (bce2->flags & TCF_HAS_SHARPS) {
+            bce2->sharpSlotBase = bce2->bindings.sharpSlotBase(cx);
+            if (bce2->sharpSlotBase < 0)
                 return JS_FALSE;
         }
 #endif
-        cg2->setFunction(fun);
-        cg2->funbox = pn->pn_funbox;
-        cg2->parent = cg;
+        bce2->setFunction(fun);
+        bce2->funbox = pn->pn_funbox;
+        bce2->parent = bce;
 
         /*
          * js::frontend::SetStaticLevel limited static nesting depth to fit in
          * 16 bits and to reserve the all-ones value, thereby reserving the
-         * magic FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment
+         * magic FREE_UPVAR_COOKIE value. Note the bce2->staticLevel assignment
          * below.
          */
-        JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
-        cg2->staticLevel = cg->staticLevel + 1;
+        JS_ASSERT(bce->staticLevel < JS_BITMASK(16) - 1);
+        bce2->staticLevel = bce->staticLevel + 1;
 
         /* We measured the max scope depth when we parsed the function. */
-        if (!EmitFunctionScript(cx, cg2, pn->pn_body))
+        if (!EmitFunctionScript(cx, bce2, pn->pn_body))
             pn = NULL;
 
-        cx->delete_(cg2);
-        cg2 = NULL;
+        cx->delete_(bce2);
+        bce2 = NULL;
         if (!pn)
             return JS_FALSE;
 
         /* Make the function object a literal in the outer script's pool. */
-        index = cg->objectList.index(pn->pn_funbox);
+        index = bce->objectList.index(pn->pn_funbox);
 
         /* Emit a bytecode pointing to the closure object in its immediate. */
         op = pn->getOp();
         if (op != JSOP_NOP) {
             if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
-                NewSrcNote(cx, cg, SRC_GENEXP) < 0)
+                NewSrcNote(cx, bce, SRC_GENEXP) < 0)
             {
                 return JS_FALSE;
             }
             EMIT_INDEX_OP(op, index);
 
             /* Make blockChain determination quicker. */
-            if (EmitBlockChain(cx, cg) < 0)
+            if (EmitBlockChain(cx, bce) < 0)
                 return JS_FALSE;
             break;
         }
 
         /*
          * For a script we emit the code as we parse. Thus the bytecode for
          * top-level functions should go in the prolog to predefine their
          * names in the variable object before the already-generated main code
          * is executed. This extra work for top-level scripts is not necessary
          * when we emit the code for a function. It is fully parsed prior to
          * invocation of the emitter and calls to EmitTree for function
          * definitions can be scheduled before generating the rest of code.
          */
-        if (!cg->inFunction()) {
-            JS_ASSERT(!cg->topStmt);
-            if (!BindGlobal(cx, cg, pn, fun->atom))
+        if (!bce->inFunction()) {
+            JS_ASSERT(!bce->topStmt);
+            if (!BindGlobal(cx, bce, pn, fun->atom))
                 return false;
             if (pn->pn_cookie.isFree()) {
-                CG_SWITCH_TO_PROLOG(cg);
+                CG_SWITCH_TO_PROLOG(bce);
                 op = fun->isFlatClosure() ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
                 EMIT_INDEX_OP(op, index);
 
                 /* Make blockChain determination quicker. */
-                if (EmitBlockChain(cx, cg) < 0)
+                if (EmitBlockChain(cx, bce) < 0)
                     return JS_FALSE;
-                CG_SWITCH_TO_MAIN(cg);
+                CG_SWITCH_TO_MAIN(bce);
             }
 
             /* Emit NOP for the decompiler. */
-            if (!EmitFunctionDefNop(cx, cg, index))
+            if (!EmitFunctionDefNop(cx, bce, index))
                 return JS_FALSE;
         } else {
-            DebugOnly<BindingKind> kind = cg->bindings.lookup(cx, fun->atom, &slot);
+            DebugOnly<BindingKind> kind = bce->bindings.lookup(cx, fun->atom, &slot);
             JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
             JS_ASSERT(index < JS_BIT(20));
             pn->pn_index = index;
             op = fun->isFlatClosure() ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
             if (pn->isClosed() &&
-                !cg->callsEval() &&
-                !cg->closedVars.append(pn->pn_cookie.slot())) {
+                !bce->callsEval() &&
+                !bce->closedVars.append(pn->pn_cookie.slot())) {
                 return JS_FALSE;
             }
-            if (!EmitSlotIndexOp(cx, op, slot, index, cg))
+            if (!EmitSlotIndexOp(cx, op, slot, index, bce))
                 return JS_FALSE;
 
             /* Make blockChain determination quicker. */
-            if (EmitBlockChain(cx, cg) < 0)
+            if (EmitBlockChain(cx, bce) < 0)
                 return JS_FALSE;
         }
         break;
       }
 
       case TOK_ARGSBODY:
       {
         ParseNode *pnlast = pn->last();
         for (ParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) {
             if (!pn2->isDefn())
                 continue;
-            if (!BindNameToSlot(cx, cg, pn2))
+            if (!BindNameToSlot(cx, bce, pn2))
                 return JS_FALSE;
-            if (JOF_OPTYPE(pn2->getOp()) == JOF_QARG && cg->shouldNoteClosedName(pn2)) {
-                if (!cg->closedArgs.append(pn2->pn_cookie.slot()))
+            if (JOF_OPTYPE(pn2->getOp()) == JOF_QARG && bce->shouldNoteClosedName(pn2)) {
+                if (!bce->closedArgs.append(pn2->pn_cookie.slot()))
                     return JS_FALSE;
             }
         }
-        ok = EmitTree(cx, cg, pnlast);
+        ok = EmitTree(cx, bce, pnlast);
         break;
       }
 
       case TOK_UPVARS:
         JS_ASSERT(pn->pn_names->count() != 0);
-        cg->roLexdeps = pn->pn_names;
-        ok = EmitTree(cx, cg, pn->pn_tree);
-        cg->roLexdeps.clearMap();
+        bce->roLexdeps = pn->pn_names;
+        ok = EmitTree(cx, bce, pn->pn_tree);
+        bce->roLexdeps.clearMap();
         pn->pn_names.releaseMap(cx);
         break;
 
       case TOK_IF:
-        ok = EmitIf(cx, cg, pn);
+        ok = EmitIf(cx, bce, pn);
         break;
 
       case TOK_SWITCH:
-        ok = EmitSwitch(cx, cg, pn);
+        ok = EmitSwitch(cx, bce, pn);
         break;
 
       case TOK_WHILE:
         /*
          * Minimize bytecodes issued for one or more iterations by jumping to
          * the condition below the body and closing the loop if the condition
          * is true with a backward branch. For iteration count i:
          *
@@ -6056,364 +6055,364 @@ frontend::EmitTree(JSContext *cx, CodeGe
          *
          * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
          * test at the top. When ParseNode trees were added during the ES3
          * work (1998-9), the code generation scheme was not optimized, and
          * the decompiler continued to take advantage of the branch and jump
          * that bracketed the body. But given the SRC_WHILE note, it is easy
          * to support the more efficient scheme.
          */
-        PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
-        noteIndex = NewSrcNote(cx, cg, SRC_WHILE);
+        PushStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top);
+        noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
         if (noteIndex < 0)
             return JS_FALSE;
-        jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
+        jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
         if (jmp < 0)
             return JS_FALSE;
-        noteIndex2 = NewSrcNote(cx, cg, SRC_TRACE);
+        noteIndex2 = NewSrcNote(cx, bce, SRC_TRACE);
         if (noteIndex2 < 0)
             return JS_FALSE;
-        top = EmitTraceOp(cx, cg, pn->pn_right);
+        top = EmitTraceOp(cx, bce, pn->pn_right);
         if (top < 0)
             return JS_FALSE;
-        if (!EmitTree(cx, cg, pn->pn_right))
+        if (!EmitTree(cx, bce, pn->pn_right))
             return JS_FALSE;
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
-        if (!EmitTree(cx, cg, pn->pn_left))
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, jmp);
+        if (!EmitTree(cx, bce, pn->pn_left))
             return JS_FALSE;
-        beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
+        beq = EmitJump(cx, bce, JSOP_IFNE, top - CG_OFFSET(bce));
         if (beq < 0)
             return JS_FALSE;
         /*
          * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
          * note gets bigger.
          */
-        if (!SetSrcNoteOffset(cx, cg, noteIndex2, 0, beq - top))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top))
             return JS_FALSE;
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp))
             return JS_FALSE;
-        ok = PopStatementCG(cx, cg);
+        ok = PopStatementCG(cx, bce);
         break;
 
       case TOK_DO:
         /* Emit an annotated nop so we know to decompile a 'do' keyword. */
-        noteIndex = NewSrcNote(cx, cg, SRC_WHILE);
-        if (noteIndex < 0 || Emit1(cx, cg, JSOP_NOP) < 0)
+        noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
+        if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
             return JS_FALSE;
 
-        noteIndex2 = NewSrcNote(cx, cg, SRC_TRACE);
+        noteIndex2 = NewSrcNote(cx, bce, SRC_TRACE);
         if (noteIndex2 < 0)
             return JS_FALSE;
 
         /* Compile the loop body. */
-        top = EmitTraceOp(cx, cg, pn->pn_left);
+        top = EmitTraceOp(cx, bce, pn->pn_left);
         if (top < 0)
             return JS_FALSE;
-        PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
-        if (!EmitTree(cx, cg, pn->pn_left))
+        PushStatement(bce, &stmtInfo, STMT_DO_LOOP, top);
+        if (!EmitTree(cx, bce, pn->pn_left))
             return JS_FALSE;
 
         /* Set loop and enclosing label update offsets, for continue. */
-        off = CG_OFFSET(cg);
+        off = CG_OFFSET(bce);
         stmt = &stmtInfo;
         do {
             stmt->update = off;
         } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
 
         /* Compile the loop condition, now that continues know where to go. */
-        if (!EmitTree(cx, cg, pn->pn_right))
+        if (!EmitTree(cx, bce, pn->pn_right))
             return JS_FALSE;
 
         /*
          * Since we use JSOP_IFNE for other purposes as well as for do-while
          * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
          * and the decompiler must get that delta and decompile recursively.
          */
-        beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
+        beq = EmitJump(cx, bce, JSOP_IFNE, top - CG_OFFSET(bce));
         if (beq < 0)
             return JS_FALSE;
         /*
          * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
          * note gets bigger.
          */
-        if (!SetSrcNoteOffset(cx, cg, noteIndex2, 0, beq - top))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top))
             return JS_FALSE;
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (off - top)))
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top)))
             return JS_FALSE;
-        ok = PopStatementCG(cx, cg);
+        ok = PopStatementCG(cx, bce);
         break;
 
       case TOK_FOR:
-        ok = EmitFor(cx, cg, pn, top);
+        ok = EmitFor(cx, bce, pn, top);
         break;
 
       case TOK_BREAK: {
-        stmt = cg->topStmt;
+        stmt = bce->topStmt;
         atom = pn->pn_atom;
 
         jsatomid labelIndex;
         if (atom) {
-            if (!cg->makeAtomIndex(atom, &labelIndex))
+            if (!bce->makeAtomIndex(atom, &labelIndex))
                 return JS_FALSE;
 
             while (stmt->type != STMT_LABEL || stmt->label != atom)
                 stmt = stmt->down;
             noteType = SRC_BREAK2LABEL;
         } else {
             labelIndex = INVALID_ATOMID;
             while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
                 stmt = stmt->down;
             noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK;
         }
 
-        if (EmitGoto(cx, cg, stmt, &stmt->breaks, labelIndex, noteType) < 0)
+        if (EmitGoto(cx, bce, stmt, &stmt->breaks, labelIndex, noteType) < 0)
             return JS_FALSE;
         break;
       }
 
       case TOK_CONTINUE: {
-        stmt = cg->topStmt;
+        stmt = bce->topStmt;
         atom = pn->pn_atom;
 
         jsatomid labelIndex;
         if (atom) {
             /* Find the loop statement enclosed by the matching label. */
             StmtInfo *loop = NULL;
-            if (!cg->makeAtomIndex(atom, &labelIndex))
+            if (!bce->makeAtomIndex(atom, &labelIndex))
                 return JS_FALSE;
             while (stmt->type != STMT_LABEL || stmt->label != atom) {
                 if (STMT_IS_LOOP(stmt))
                     loop = stmt;
                 stmt = stmt->down;
             }
             stmt = loop;
             noteType = SRC_CONT2LABEL;
         } else {
             labelIndex = INVALID_ATOMID;
             while (!STMT_IS_LOOP(stmt))
                 stmt = stmt->down;
             noteType = SRC_CONTINUE;
         }
 
-        if (EmitGoto(cx, cg, stmt, &stmt->continues, labelIndex, noteType) < 0)
+        if (EmitGoto(cx, bce, stmt, &stmt->continues, labelIndex, noteType) < 0)
             return JS_FALSE;
         break;
       }
 
       case TOK_WITH:
-        if (!EmitWith(cx, cg, pn, ok))
+        if (!EmitWith(cx, bce, pn, ok))
             return false;
         break;
 
       case TOK_TRY:
-        if (!EmitTry(cx, cg, pn))
+        if (!EmitTry(cx, bce, pn))
             return false;
         break;
 
       case TOK_CATCH:
-        if (!EmitCatch(cx, cg, pn))
+        if (!EmitCatch(cx, bce, pn))
             return false;
         break;
 
       case TOK_VAR:
-        if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
+        if (!EmitVariables(cx, bce, pn, JS_FALSE, &noteIndex))
             return JS_FALSE;
         break;
 
       case TOK_RETURN:
         /* Push a return value */
         pn2 = pn->pn_kid;
         if (pn2) {
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
         } else {
-            if (Emit1(cx, cg, JSOP_PUSH) < 0)
+            if (Emit1(cx, bce, JSOP_PUSH) < 0)
                 return JS_FALSE;
         }
 
         /*
          * EmitNonLocalJumpFixup may add fixup bytecode to close open try
          * blocks having finally clauses and to exit intermingled let blocks.
          * We can't simply transfer control flow to our caller in that case,
          * because we must gosub to those finally clauses from inner to outer,
          * with the correct stack pointer (i.e., after popping any with,
          * for/in, etc., slots nested inside the finally's try).
          *
          * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
          * extra JSOP_RETRVAL after the fixups.
          */
-        top = CG_OFFSET(cg);
-        if (Emit1(cx, cg, JSOP_RETURN) < 0)
+        top = CG_OFFSET(bce);
+        if (Emit1(cx, bce, JSOP_RETURN) < 0)
             return JS_FALSE;
-        if (!EmitNonLocalJumpFixup(cx, cg, NULL))
+        if (!EmitNonLocalJumpFixup(cx, bce, NULL))
             return JS_FALSE;
-        if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
-            CG_BASE(cg)[top] = JSOP_SETRVAL;
-            if (Emit1(cx, cg, JSOP_RETRVAL) < 0)
+        if (top + JSOP_RETURN_LENGTH != CG_OFFSET(bce)) {
+            CG_BASE(bce)[top] = JSOP_SETRVAL;
+            if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
                 return JS_FALSE;
-            if (EmitBlockChain(cx, cg) < 0)
+            if (EmitBlockChain(cx, bce) < 0)
                 return JS_FALSE;
         }
         break;
 
 #if JS_HAS_GENERATORS
       case TOK_YIELD:
-        JS_ASSERT(cg->inFunction());
+        JS_ASSERT(bce->inFunction());
         if (pn->pn_kid) {
-            if (!EmitTree(cx, cg, pn->pn_kid))
+            if (!EmitTree(cx, bce, pn->pn_kid))
                 return JS_FALSE;
         } else {
-            if (Emit1(cx, cg, JSOP_PUSH) < 0)
+            if (Emit1(cx, bce, JSOP_PUSH) < 0)
                 return JS_FALSE;
         }
-        if (pn->pn_hidden && NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
+        if (pn->pn_hidden && NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
             return JS_FALSE;
-        if (Emit1(cx, cg, JSOP_YIELD) < 0)
+        if (Emit1(cx, bce, JSOP_YIELD) < 0)
             return JS_FALSE;
         break;
 #endif
 
       case TOK_LC:
       {
 #if JS_HAS_XML_SUPPORT
         if (pn->isArity(PN_UNARY)) {
-            if (!EmitTree(cx, cg, pn->pn_kid))
+            if (!EmitTree(cx, bce, pn->pn_kid))
                 return JS_FALSE;
-            if (Emit1(cx, cg, pn->getOp()) < 0)
+            if (Emit1(cx, bce, pn->getOp()) < 0)
                 return JS_FALSE;
             break;
         }
 #endif
 
         JS_ASSERT(pn->isArity(PN_LIST));
 
         noteIndex = -1;
-        tmp = CG_OFFSET(cg);
+        tmp = CG_OFFSET(bce);
         if (pn->pn_xflags & PNX_NEEDBRACES) {
-            noteIndex = NewSrcNote2(cx, cg, SRC_BRACE, 0);
-            if (noteIndex < 0 || Emit1(cx, cg, JSOP_NOP) < 0)
+            noteIndex = NewSrcNote2(cx, bce, SRC_BRACE, 0);
+            if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
                 return JS_FALSE;
         }
 
-        PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
+        PushStatement(bce, &stmtInfo, STMT_BLOCK, top);
 
         ParseNode *pnchild = pn->pn_head;
         if (pn->pn_xflags & PNX_FUNCDEFS) {
             /*
              * This block contains top-level function definitions. To ensure
              * that we emit the bytecode defining them before the rest of code
              * in the block we use a separate pass over functions. During the
              * main pass later the emitter will add JSOP_NOP with source notes
              * for the function to preserve the original functions position
              * when decompiling.
              *
              * Currently this is used only for functions, as compile-as-we go
              * mode for scripts does not allow separate emitter passes.
              */
-            JS_ASSERT(cg->inFunction());
+            JS_ASSERT(bce->inFunction());
             if (pn->pn_xflags & PNX_DESTRUCT) {
                 /*
                  * Assign the destructuring arguments before defining any
                  * functions, see bug 419662.
                  */
                 JS_ASSERT(pnchild->isKind(TOK_SEMI));
                 JS_ASSERT(pnchild->pn_kid->isKind(TOK_VAR));
-                if (!EmitTree(cx, cg, pnchild))
+                if (!EmitTree(cx, bce, pnchild))
                     return JS_FALSE;
                 pnchild = pnchild->pn_next;
             }
 
             for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
                 if (pn2->isKind(TOK_FUNCTION)) {
                     if (pn2->isOp(JSOP_NOP)) {
-                        if (!EmitTree(cx, cg, pn2))
+                        if (!EmitTree(cx, bce, pn2))
                             return JS_FALSE;
                     } else {
                         /*
                          * JSOP_DEFFUN in a top-level block with function
                          * definitions appears, for example, when "if (true)"
                          * is optimized away from "if (true) function x() {}".
                          * See bug 428424.
                          */
                         JS_ASSERT(pn2->isOp(JSOP_DEFFUN));
                     }
                 }
             }
         }
         for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
         }
 
-        if (noteIndex >= 0 && !SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, CG_OFFSET(cg) - tmp))
+        if (noteIndex >= 0 && !SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, CG_OFFSET(bce) - tmp))
             return JS_FALSE;
 
-        ok = PopStatementCG(cx, cg);
+        ok = PopStatementCG(cx, bce);
         break;
       }
 
       case TOK_SEQ:
         JS_ASSERT(pn->isArity(PN_LIST));
-        PushStatement(cg, &stmtInfo, STMT_SEQ, top);
+        PushStatement(bce, &stmtInfo, STMT_SEQ, top);
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
         }
-        ok = PopStatementCG(cx, cg);
+        ok = PopStatementCG(cx, bce);
         break;
 
       case TOK_SEMI:
         pn2 = pn->pn_kid;
         if (pn2) {
             /*
              * Top-level or called-from-a-native JS_Execute/EvaluateScript,
              * debugger, and eval frames may need the value of the ultimate
              * expression statement as the script's result, despite the fact
              * that it appears useless to the compiler.
              *
              * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
              * calling JS_Compile* to suppress JSOP_POPV.
              */
-            useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
+            useful = wantval = !(bce->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
 
             /* Don't eliminate expressions with side effects. */
             if (!useful) {
-                if (!CheckSideEffects(cx, cg, pn2, &useful))
+                if (!CheckSideEffects(cx, bce, pn2, &useful))
                     return JS_FALSE;
             }
 
             /*
              * Don't eliminate apparently useless expressions if they are
              * labeled expression statements.  The tc->topStmt->update test
              * catches the case where we are nesting in EmitTree for a labeled
              * compound statement.
              */
             if (!useful &&
-                cg->topStmt &&
-                cg->topStmt->type == STMT_LABEL &&
-                cg->topStmt->update >= CG_OFFSET(cg)) {
+                bce->topStmt &&
+                bce->topStmt->type == STMT_LABEL &&
+                bce->topStmt->update >= CG_OFFSET(bce)) {
                 useful = true;
             }
 
             if (!useful) {
                 /* Don't complain about directive prologue members; just don't emit their code. */
                 if (!pn->isDirectivePrologueMember()) {
-                    CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
-                    if (!ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
+                    CG_CURRENT_LINE(bce) = pn2->pn_pos.begin.lineno;
+                    if (!ReportCompileErrorNumber(cx, CG_TS(bce), pn2,
                                                   JSREPORT_WARNING | JSREPORT_STRICT,
                                                   JSMSG_USELESS_EXPR)) {
                         return JS_FALSE;
                     }
                 }
             } else {
                 op = wantval ? JSOP_POPV : JSOP_POP;
 #if JS_HAS_DESTRUCTURING
                 if (!wantval &&
                     pn2->isKind(TOK_ASSIGN) &&
-                    !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
+                    !MaybeEmitGroupAssignment(cx, bce, op, pn2, &op)) {
                     return JS_FALSE;
                 }
 #endif
                 if (op != JSOP_NOP) {
                     /*
                      * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
                      * avoid null closure cloning. Do this only for assignment
                      * statements that are not completion values wanted by a
@@ -6423,187 +6422,187 @@ frontend::EmitTree(JSContext *cx, CodeGe
                     if (!wantval &&
                         pn2->isKind(TOK_ASSIGN) &&
                         pn2->isOp(JSOP_NOP) &&
                         pn2->pn_left->isOp(JSOP_SETPROP) &&
                         pn2->pn_right->isOp(JSOP_LAMBDA) &&
                         pn2->pn_right->pn_funbox->joinable()) {
                         pn2->pn_left->setOp(JSOP_SETMETHOD);
                     }
-                    if (!EmitTree(cx, cg, pn2))
+                    if (!EmitTree(cx, bce, pn2))
                         return JS_FALSE;
-                    if (Emit1(cx, cg, op) < 0)
+                    if (Emit1(cx, bce, op) < 0)
                         return JS_FALSE;
                 }
             }
         }
         break;
 
       case TOK_COLON:
         /* Emit an annotated nop so we know to decompile a label. */
         atom = pn->pn_atom;
 
         jsatomid index;
-        if (!cg->makeAtomIndex(atom, &index))
+        if (!bce->makeAtomIndex(atom, &index))
             return JS_FALSE;
 
         pn2 = pn->expr();
         noteType = (pn2->isKind(TOK_LC) ||
                     (pn2->isKind(TOK_LEXICALSCOPE) &&
                      pn2->expr()->isKind(TOK_LC)))
                    ? SRC_LABELBRACE
                    : SRC_LABEL;
-        noteIndex = NewSrcNote2(cx, cg, noteType, ptrdiff_t(index));
-        if (noteIndex < 0 || Emit1(cx, cg, JSOP_NOP) < 0)
+        noteIndex = NewSrcNote2(cx, bce, noteType, ptrdiff_t(index));
+        if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
             return JS_FALSE;
 
         /* Emit code for the labeled statement. */
-        PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
+        PushStatement(bce, &stmtInfo, STMT_LABEL, CG_OFFSET(bce));
         stmtInfo.label = atom;
-        if (!EmitTree(cx, cg, pn2))
+        if (!EmitTree(cx, bce, pn2))
             return JS_FALSE;
-        if (!PopStatementCG(cx, cg))
+        if (!PopStatementCG(cx, bce))
             return JS_FALSE;
 
         /* If the statement was compound, emit a note for the end brace. */
         if (noteType == SRC_LABELBRACE) {
-            if (NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
-                Emit1(cx, cg, JSOP_NOP) < 0) {
+            if (NewSrcNote(cx, bce, SRC_ENDBRACE) < 0 ||
+                Emit1(cx, bce, JSOP_NOP) < 0) {
                 return JS_FALSE;
             }
         }
         break;
 
       case TOK_COMMA:
         /*
          * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
          * These notes help the decompiler bracket the bytecodes generated
          * from each sub-expression that follows a comma.
          */
         off = noteIndex = -1;
         for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
-            tmp = CG_OFFSET(cg);
+            tmp = CG_OFFSET(bce);
             if (noteIndex >= 0) {
-                if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
+                if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, tmp-off))
                     return JS_FALSE;
             }
             if (!pn2->pn_next)
                 break;
             off = tmp;
-            noteIndex = NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
+            noteIndex = NewSrcNote2(cx, bce, SRC_PCDELTA, 0);
             if (noteIndex < 0 ||
-                Emit1(cx, cg, JSOP_POP) < 0) {
+                Emit1(cx, bce, JSOP_POP) < 0) {
                 return JS_FALSE;
             }
         }
         break;
 
       case TOK_ASSIGN:
-        if (!EmitAssignment(cx, cg, pn->pn_left, pn->getOp(), pn->pn_right))
+        if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right))
             return false;
         break;
 
       case TOK_HOOK:
         /* Emit the condition, then branch if false to the else part. */
-        if (!EmitTree(cx, cg, pn->pn_kid1))
+        if (!EmitTree(cx, bce, pn->pn_kid1))
             return JS_FALSE;
-        noteIndex = NewSrcNote(cx, cg, SRC_COND);
+        noteIndex = NewSrcNote(cx, bce, SRC_COND);
         if (noteIndex < 0)
             return JS_FALSE;
-        beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
-        if (beq < 0 || !EmitTree(cx, cg, pn->pn_kid2))
+        beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
+        if (beq < 0 || !EmitTree(cx, bce, pn->pn_kid2))
             return JS_FALSE;
 
         /* Jump around else, fixup the branch, emit else, fixup jump. */
-        jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
+        jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
         if (jmp < 0)
             return JS_FALSE;
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, beq);
 
         /*
          * Because each branch pushes a single value, but our stack budgeting
-         * analysis ignores branches, we now have to adjust cg->stackDepth to
+         * analysis ignores branches, we now have to adjust bce->stackDepth to
          * ignore the value pushed by the first branch.  Execution will follow
-         * only one path, so we must decrement cg->stackDepth.
+         * only one path, so we must decrement bce->stackDepth.
          *
          * Failing to do this will foil code, such as the try/catch/finally
-         * exception handling code generator, that samples cg->stackDepth for
+         * exception handling code generator, that samples bce->stackDepth for
          * use at runtime (JSOP_SETSP), or in let expression and block code
          * generation, which must use the stack depth to compute local stack
          * indexes correctly.
          */
-        JS_ASSERT(cg->stackDepth > 0);
-        cg->stackDepth--;
-        if (!EmitTree(cx, cg, pn->pn_kid3))
+        JS_ASSERT(bce->stackDepth > 0);
+        bce->stackDepth--;
+        if (!EmitTree(cx, bce, pn->pn_kid3))
             return JS_FALSE;
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
-        if (!SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, jmp);
+        if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
             return JS_FALSE;
         break;
 
       case TOK_OR:
       case TOK_AND:
         /*
          * JSOP_OR converts the operand on the stack to boolean, and if true,
          * leaves the original operand value on the stack and jumps; otherwise
          * it pops and falls into the next bytecode, which evaluates the right
          * operand.  The jump goes around the right operand evaluation.
          *
          * JSOP_AND converts the operand on the stack to boolean, and if false,
          * leaves the original operand value on the stack and jumps; otherwise
          * it pops and falls into the right operand's bytecode.
          */
         if (pn->isArity(PN_BINARY)) {
-            if (!EmitTree(cx, cg, pn->pn_left))
+            if (!EmitTree(cx, bce, pn->pn_left))
                 return JS_FALSE;
-            top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
+            top = EmitJump(cx, bce, JSOP_BACKPATCH_POP, 0);
             if (top < 0)
                 return JS_FALSE;
-            if (!EmitTree(cx, cg, pn->pn_right))
+            if (!EmitTree(cx, bce, pn->pn_right))
                 return JS_FALSE;
-            off = CG_OFFSET(cg);
-            pc = CG_CODE(cg, top);
-            CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
+            off = CG_OFFSET(bce);
+            pc = CG_CODE(bce, top);
+            CHECK_AND_SET_JUMP_OFFSET(cx, bce, pc, off - top);
             *pc = pn->getOp();
         } else {
             JS_ASSERT(pn->isArity(PN_LIST));
             JS_ASSERT(pn->pn_head->pn_next->pn_next);
 
             /* Left-associative operator chain: avoid too much recursion. */
             pn2 = pn->pn_head;
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
-            top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
+            top = EmitJump(cx, bce, JSOP_BACKPATCH_POP, 0);
             if (top < 0)
                 return JS_FALSE;
 
             /* Emit nodes between the head and the tail. */
             jmp = top;
             while ((pn2 = pn2->pn_next)->pn_next) {
-                if (!EmitTree(cx, cg, pn2))
+                if (!EmitTree(cx, bce, pn2))
                     return JS_FALSE;
-                off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
+                off = EmitJump(cx, bce, JSOP_BACKPATCH_POP, 0);
                 if (off < 0)
                     return JS_FALSE;
-                if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
+                if (!SetBackPatchDelta(cx, bce, CG_CODE(bce, jmp), off - jmp))
                     return JS_FALSE;
                 jmp = off;
 
             }
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
 
             pn2 = pn->pn_head;
-            off = CG_OFFSET(cg);
+            off = CG_OFFSET(bce);
             do {
-                pc = CG_CODE(cg, top);
-                tmp = GetJumpOffset(cg, pc);
-                CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
+                pc = CG_CODE(bce, top);
+                tmp = GetJumpOffset(bce, pc);
+                CHECK_AND_SET_JUMP_OFFSET(cx, bce, pc, off - top);
                 *pc = pn->getOp();
                 top += tmp;
             } while ((pn2 = pn2->pn_next)->pn_next);
         }
         break;
 
       case TOK_PLUS:
       case TOK_BITOR:
@@ -6615,56 +6614,56 @@ frontend::EmitTree(JSContext *cx, CodeGe
       case TOK_INSTANCEOF:
       case TOK_SHOP:
       case TOK_MINUS:
       case TOK_STAR:
       case TOK_DIVOP:
         if (pn->isArity(PN_LIST)) {
             /* Left-associative operator chain: avoid too much recursion. */
             pn2 = pn->pn_head;
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
             op = pn->getOp();
             while ((pn2 = pn2->pn_next) != NULL) {
-                if (!EmitTree(cx, cg, pn2))
+                if (!EmitTree(cx, bce, pn2))
                     return JS_FALSE;
-                if (Emit1(cx, cg, op) < 0)
+                if (Emit1(cx, bce, op) < 0)
                     return JS_FALSE;
             }
         } else {
 #if JS_HAS_XML_SUPPORT
             uintN oldflags;
 
       case TOK_DBLCOLON:
             if (pn->isArity(PN_NAME)) {
-                if (!EmitTree(cx, cg, pn->expr()))
+                if (!EmitTree(cx, bce, pn->expr()))
                     return JS_FALSE;
-                if (!EmitAtomOp(cx, pn, pn->getOp(), cg))
+                if (!EmitAtomOp(cx, pn, pn->getOp(), bce))
                     return JS_FALSE;
                 break;
             }
 
             /*
              * Binary :: has a right operand that brackets arbitrary code,
              * possibly including a let (a = b) ... expression.  We must clear
              * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
              */
-            oldflags = cg->flags;
-            cg->flags &= ~TCF_IN_FOR_INIT;
+            oldflags = bce->flags;
+            bce->flags &= ~TCF_IN_FOR_INIT;
 #endif
 
             /* Binary operators that evaluate both operands unconditionally. */
-            if (!EmitTree(cx, cg, pn->pn_left))
+            if (!EmitTree(cx, bce, pn->pn_left))
                 return JS_FALSE;
-            if (!EmitTree(cx, cg, pn->pn_right))
+            if (!EmitTree(cx, bce, pn->pn_right))
                 return JS_FALSE;
 #if JS_HAS_XML_SUPPORT
-            cg->flags |= oldflags & TCF_IN_FOR_INIT;
+            bce->flags |= oldflags & TCF_IN_FOR_INIT;
 #endif
-            if (Emit1(cx, cg, pn->getOp()) < 0)
+            if (Emit1(cx, bce, pn->getOp()) < 0)
                 return JS_FALSE;
         }
         break;
 
       case TOK_THROW:
 #if JS_HAS_XML_SUPPORT
       case TOK_AT:
       case TOK_DEFAULT:
@@ -6674,230 +6673,230 @@ frontend::EmitTree(JSContext *cx, CodeGe
       case TOK_UNARYOP:
       {
         uintN oldflags;
 
         /* Unary op, including unary +/-. */
         op = pn->getOp();
 #if JS_HAS_XML_SUPPORT
         if (op == JSOP_XMLNAME) {
-            if (!EmitXMLName(cx, pn, op, cg))
+            if (!EmitXMLName(cx, pn, op, bce))
                 return JS_FALSE;
             break;
         }
 #endif
         pn2 = pn->pn_kid;
 
         if (op == JSOP_TYPEOF && !pn2->isKind(TOK_NAME))
             op = JSOP_TYPEOFEXPR;
 
-        oldflags = cg->flags;
-        cg->flags &= ~TCF_IN_FOR_INIT;
-        if (!EmitTree(cx, cg, pn2))
+        oldflags = bce->flags;
+        bce->flags &= ~TCF_IN_FOR_INIT;
+        if (!EmitTree(cx, bce, pn2))
             return JS_FALSE;
-        cg->flags |= oldflags & TCF_IN_FOR_INIT;
-        if (Emit1(cx, cg, op) < 0)
+        bce->flags |= oldflags & TCF_IN_FOR_INIT;
+        if (Emit1(cx, bce, op) < 0)
             return JS_FALSE;
         break;
       }
 
       case TOK_INC:
       case TOK_DEC:
         /* Emit lvalue-specialized code for ++/-- operators. */
         pn2 = pn->pn_kid;
         JS_ASSERT(!pn2->isKind(TOK_RP));
         op = pn->getOp();
         switch (pn2->getKind()) {
           default:
             JS_ASSERT(pn2->isKind(TOK_NAME));
             pn2->setOp(op);
-            if (!BindNameToSlot(cx, cg, pn2))
+            if (!BindNameToSlot(cx, bce, pn2))
                 return JS_FALSE;
             op = pn2->getOp();
             if (op == JSOP_CALLEE) {
-                if (Emit1(cx, cg, op) < 0)
+                if (Emit1(cx, bce, op) < 0)
                     return JS_FALSE;
             } else if (!pn2->pn_cookie.isFree()) {
                 atomIndex = pn2->pn_cookie.asInteger();
                 EMIT_UINT16_IMM_OP(op, atomIndex);
             } else {
                 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
                 if (js_CodeSpec[op].format & (JOF_INC | JOF_DEC)) {
-                    if (!EmitNameIncDec(cx, pn2, op, cg))
+                    if (!EmitNameIncDec(cx, pn2, op, bce))
                         return JS_FALSE;
                 } else {
-                    if (!EmitAtomOp(cx, pn2, op, cg))
+                    if (!EmitAtomOp(cx, pn2, op, bce))
                         return JS_FALSE;
                 }
                 break;
             }
             if (pn2->isConst()) {
-                if (Emit1(cx, cg, JSOP_POS) < 0)
+                if (Emit1(cx, bce, JSOP_POS) < 0)
                     return JS_FALSE;
                 op = pn->getOp();
                 if (!(js_CodeSpec[op].format & JOF_POST)) {
-                    if (Emit1(cx, cg, JSOP_ONE) < 0)
+                    if (Emit1(cx, bce, JSOP_ONE) < 0)
                         return JS_FALSE;
                     op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
-                    if (Emit1(cx, cg, op) < 0)
+                    if (Emit1(cx, bce, op) < 0)
                         return JS_FALSE;
                 }
             }
             break;
           case TOK_DOT:
-            if (!EmitPropIncDec(cx, pn2, op, cg))
+            if (!EmitPropIncDec(cx, pn2, op, bce))
                 return JS_FALSE;
             break;
           case TOK_LB:
-            if (!EmitElemIncDec(cx, pn2, op, cg))
+            if (!EmitElemIncDec(cx, pn2, op, bce))
                 return JS_FALSE;
             break;
           case TOK_LP:
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
-            if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - pn2->pn_offset) < 0)
+            if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - pn2->pn_offset) < 0)
                 return JS_FALSE;
-            if (Emit1(cx, cg, op) < 0)
+            if (Emit1(cx, bce, op) < 0)
                 return JS_FALSE;
             /*
              * This is dead code for the decompiler, don't generate
              * a decomposed version of the opcode. We do need to balance
              * the stacks in the decomposed version.
              */
             JS_ASSERT(js_CodeSpec[op].format & JOF_DECOMPOSE);
             JS_ASSERT(js_CodeSpec[op].format & JOF_ELEM);
-            if (Emit1(cx, cg, (JSOp)1) < 0)
+            if (Emit1(cx, bce, (JSOp)1) < 0)
                 return JS_FALSE;
-            if (Emit1(cx, cg, JSOP_POP) < 0)
+            if (Emit1(cx, bce, JSOP_POP) < 0)
                 return JS_FALSE;
             break;
 #if JS_HAS_XML_SUPPORT
           case TOK_UNARYOP:
-            JS_ASSERT(!cg->inStrictMode());
+            JS_ASSERT(!bce->inStrictMode());
             JS_ASSERT(pn2->isOp(JSOP_SETXMLNAME));
-            if (!EmitTree(cx, cg, pn2->pn_kid))
+            if (!EmitTree(cx, bce, pn2->pn_kid))
                 return JS_FALSE;
-            if (Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
+            if (Emit1(cx, bce, JSOP_BINDXMLNAME) < 0)
                 return JS_FALSE;
-            if (!EmitElemIncDec(cx, NULL, op, cg))
+            if (!EmitElemIncDec(cx, NULL, op, bce))
                 return JS_FALSE;
             break;
 #endif
         }
         break;
 
       case TOK_DELETE:
         /*
          * Under ECMA 3, deleting a non-reference returns true -- but alas we
          * must evaluate the operand if it appears it might have side effects.
          */
         pn2 = pn->pn_kid;
         switch (pn2->getKind()) {
           case TOK_NAME:
-            if (!BindNameToSlot(cx, cg, pn2))
+            if (!BindNameToSlot(cx, bce, pn2))
                 return JS_FALSE;
             op = pn2->getOp();
             if (op == JSOP_FALSE) {
-                if (Emit1(cx, cg, op) < 0)
+                if (Emit1(cx, bce, op) < 0)
                     return JS_FALSE;
             } else {
-                if (!EmitAtomOp(cx, pn2, op, cg))
+                if (!EmitAtomOp(cx, pn2, op, bce))
                     return JS_FALSE;
             }
             break;
           case TOK_DOT:
-            if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
+            if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce, JS_FALSE))
                 return JS_FALSE;
             break;
 #if JS_HAS_XML_SUPPORT
           case TOK_DBLDOT:
-            JS_ASSERT(!cg->inStrictMode());
-            if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
+            JS_ASSERT(!bce->inStrictMode());
+            if (!EmitElemOp(cx, pn2, JSOP_DELDESC, bce))
                 return JS_FALSE;
             break;
 #endif
           case TOK_LB:
-            if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
+            if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce))
                 return JS_FALSE;
             break;
           default:
             /*
              * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
              * to foo(), true (a comma expression, requiring SRC_PCDELTA).
              */
             useful = JS_FALSE;
-            if (!CheckSideEffects(cx, cg, pn2, &useful))
+            if (!CheckSideEffects(cx, bce, pn2, &useful))
                 return JS_FALSE;
             if (!useful) {
                 off = noteIndex = -1;
             } else {
                 JS_ASSERT_IF(pn2->isKind(TOK_LP), !(pn2->pn_xflags & PNX_SETCALL));
-                if (!EmitTree(cx, cg, pn2))
+                if (!EmitTree(cx, bce, pn2))
                     return JS_FALSE;
-                off = CG_OFFSET(cg);
-                noteIndex = NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
-                if (noteIndex < 0 || Emit1(cx, cg, JSOP_POP) < 0)
+                off = CG_OFFSET(bce);
+                noteIndex = NewSrcNote2(cx, bce, SRC_PCDELTA, 0);
+                if (noteIndex < 0 || Emit1(cx, bce, JSOP_POP) < 0)
                     return JS_FALSE;
             }
-            if (Emit1(cx, cg, JSOP_TRUE) < 0)
+            if (Emit1(cx, bce, JSOP_TRUE) < 0)
                 return JS_FALSE;
             if (noteIndex >= 0) {
-                tmp = CG_OFFSET(cg);
-                if (!SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
+                tmp = CG_OFFSET(bce);
+                if (!SetSrcNoteOffset(cx, bce, (uintN)noteIndex, 0, tmp-off))
                     return JS_FALSE;
             }
         }
         break;
 
 #if JS_HAS_XML_SUPPORT
       case TOK_FILTER:
-        JS_ASSERT(!cg->inStrictMode());
-
-        if (!EmitTree(cx, cg, pn->pn_left))
+        JS_ASSERT(!bce->inStrictMode());
+
+        if (!EmitTree(cx, bce, pn->pn_left))
             return JS_FALSE;
-        jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
+        jmp = EmitJump(cx, bce, JSOP_FILTER, 0);
         if (jmp < 0)
             return JS_FALSE;
-        top = EmitTraceOp(cx, cg, pn->pn_right);
+        top = EmitTraceOp(cx, bce, pn->pn_right);
         if (top < 0)
             return JS_FALSE;
-        if (!EmitTree(cx, cg, pn->pn_right))
+        if (!EmitTree(cx, bce, pn->pn_right))
             return JS_FALSE;
-        CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
-        if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
+        CHECK_AND_SET_JUMP_OFFSET_AT(cx, bce, jmp);
+        if (EmitJump(cx, bce, JSOP_ENDFILTER, top - CG_OFFSET(bce)) < 0)
             return JS_FALSE;
 
         /* Make blockChain determination quicker. */
-        if (EmitBlockChain(cx, cg) < 0)
+        if (EmitBlockChain(cx, bce) < 0)
             return JS_FALSE;
         break;
 #endif
 
       case TOK_DOT:
         /*
          * Pop a stack operand, convert it to object, get a property named by
          * this bytecode's immediate-indexed atom operand, and push its value
          * (not a reference to it).
          */
-        ok = EmitPropOp(cx, pn, pn->getOp(), cg, JS_FALSE);
+        ok = EmitPropOp(cx, pn, pn->getOp(), bce, JS_FALSE);
         break;
 
 #if JS_HAS_XML_SUPPORT
       case TOK_DBLDOT:
-        JS_ASSERT(!cg->inStrictMode());
+        JS_ASSERT(!bce->inStrictMode());
         /* FALL THROUGH */
 #endif
       case TOK_LB:
         /*
          * Pop two operands, convert the left one to object and the right one
          * to property name (atom or tagged int), get the named property, and
          * push its value.  Set the "obj" register to the result of ToObject
          * on the left operand.
          */
-        ok = EmitElemOp(cx, pn, pn->getOp(), cg);
+        ok = EmitElemOp(cx, pn, pn->getOp(), bce);
         break;
 
       case TOK_NEW:
       case TOK_LP:
       {
         bool callop = pn->isKind(TOK_LP);
 
         /*
@@ -6913,105 +6912,104 @@ frontend::EmitTree(JSContext *cx, CodeGe
          * Then (or in a call case that has no explicit reference-base
          * object) we emit JSOP_PUSH to produce the |this| slot required
          * for calls (which non-strict mode functions will box into the
          * global object).
          */
         pn2 = pn->pn_head;
         switch (pn2->getKind()) {
           case TOK_NAME:
-            if (!EmitNameOp(cx, cg, pn2, callop))
+            if (!EmitNameOp(cx, bce, pn2, callop))
                 return JS_FALSE;
             break;
           case TOK_DOT:
-            if (!EmitPropOp(cx, pn2, pn2->getOp(), cg, callop))
+            if (!EmitPropOp(cx, pn2, pn2->getOp(), bce, callop))
                 return JS_FALSE;
             break;
           case TOK_LB:
             JS_ASSERT(pn2->isOp(JSOP_GETELEM));
-            if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
+            if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce))
                 return JS_FALSE;
             break;
           case TOK_UNARYOP:
 #if JS_HAS_XML_SUPPORT
             if (pn2->isOp(JSOP_XMLNAME)) {
-                if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
+                if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, bce))
                     return JS_FALSE;
                 callop = true;          /* suppress JSOP_PUSH after */
                 break;
             }
 #endif
             /* FALL THROUGH */
           default:
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
             callop = false;             /* trigger JSOP_PUSH after */
             break;
         }
-        if (!callop && Emit1(cx, cg, JSOP_PUSH) < 0)
+        if (!callop && Emit1(cx, bce, JSOP_PUSH) < 0)
             return JS_FALSE;
 
         /* Remember start of callable-object bytecode for decompilation hint. */
         off = top;
 
         /*
          * Emit code for each argument in order, then emit the JSOP_*CALL or
          * JSOP_NEW bytecode with a two-byte immediate telling how many args
          * were pushed on the operand stack.
          */
-        uintN oldflags = cg->flags;
-        cg->flags &= ~TCF_IN_FOR_INIT;
+        uintN oldflags = bce->flags;
+        bce->flags &= ~TCF_IN_FOR_INIT;
         for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
-            if (!EmitTree(cx, cg, pn3))
+            if (!EmitTree(cx, bce, pn3))
                 return JS_FALSE;
         }
-        cg->flags |= oldflags & TCF_IN_FOR_INIT;
-        if (NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
+        bce->flags |= oldflags & TCF_IN_FOR_INIT;
+        if (NewSrcNote2(cx, bce, SRC_PCBASE, CG_OFFSET(bce) - off) < 0)
             return JS_FALSE;
 
         argc = pn->pn_count - 1;
-        if (Emit3(cx, cg, pn->getOp(), ARGC_HI(argc), ARGC_LO(argc)) < 0)
+        if (Emit3(cx, bce, pn->getOp(), ARGC_HI(argc), ARGC_LO(argc)) < 0)
             return JS_FALSE;
-        CheckTypeSet(cx, cg, pn->getOp());
+        CheckTypeSet(cx, bce, pn->getOp());
         if (pn->isOp(JSOP_EVAL)) {
             EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
-            if (EmitBlockChain(cx, cg) < 0)
+            if (EmitBlockChain(cx, bce) < 0)
                 return JS_FALSE;
         }
         if (pn->pn_xflags & PNX_SETCALL) {
-            if (Emit1(cx, cg, JSOP_SETCALL) < 0)
+            if (Emit1(cx, bce, JSOP_SETCALL) < 0)
                 return JS_FALSE;
         }
         break;
       }
 
       case TOK_LEXICALSCOPE:
-        if (!EmitLexicalScope(cx, cg, pn, ok))
+        if (!EmitLexicalScope(cx, bce, pn, ok))
             return false;
         break;
 
 #if JS_HAS_BLOCK_SCOPE
-      case TOK_LET:
-        if (!EmitLet(cx, cg, pn))
+      case TOK_LET: 
+        if (!EmitLet(cx, bce, pn))
             return false;
         break;
 #endif /* JS_HAS_BLOCK_SCOPE */
-
 #if JS_HAS_GENERATORS
       case TOK_ARRAYPUSH: {
         jsint slot;
 
         /*
-         * The array object's stack index is in cg->arrayCompDepth. See below
+         * The array object's stack index is in bce->arrayCompDepth. See below
          * under the array initialiser code generator for array comprehension
          * special casing.
          */
-        if (!EmitTree(cx, cg, pn->pn_kid))
+        if (!EmitTree(cx, bce, pn->pn_kid))
             return JS_FALSE;
-        slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
+        slot = AdjustBlockSlot(cx, bce, bce->arrayCompDepth);
         if (slot < 0)
             return JS_FALSE;
         EMIT_UINT16_IMM_OP(pn->getOp(), slot);
         break;
       }
 #endif
 
       case TOK_RB:
@@ -7030,162 +7028,162 @@ frontend::EmitTree(JSContext *cx, CodeGe
         sharpnum = -1;
       do_emit_array:
 #endif
 
 #if JS_HAS_GENERATORS
         if (pn->isKind(TOK_ARRAYCOMP)) {
             uintN saveDepth;
 
-            if (!EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
+            if (!EmitNewInit(cx, bce, JSProto_Array, pn, sharpnum))
                 return JS_FALSE;
 
             /*
              * Pass the new array's stack index to the TOK_ARRAYPUSH case via
-             * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
+             * bce->arrayCompDepth, then simply traverse the TOK_FOR node and
              * its kids under pn2 to generate this comprehension.
              */
-            JS_ASSERT(cg->stackDepth > 0);
-            saveDepth = cg->arrayCompDepth;
-            cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
-            if (!EmitTree(cx, cg, pn->pn_head))
+            JS_ASSERT(bce->stackDepth > 0);
+            saveDepth = bce->arrayCompDepth;
+            bce->arrayCompDepth = (uint32) (bce->stackDepth - 1);
+            if (!EmitTree(cx, bce, pn->pn_head))
                 return JS_FALSE;
-            cg->arrayCompDepth = saveDepth;
+            bce->arrayCompDepth = saveDepth;
 
             /* Emit the usual op needed for decompilation. */
-            if (!EmitEndInit(cx, cg, 1))
+            if (!EmitEndInit(cx, bce, 1))
                 return JS_FALSE;
             break;
         }
 #endif /* JS_HAS_GENERATORS */
 
-        if (!cg->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && pn->pn_head &&
-            cg->checkSingletonContext()) {
-            if (!EmitSingletonInitialiser(cx, cg, pn))
+        if (!bce->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && pn->pn_head &&
+            bce->checkSingletonContext()) {
+            if (!EmitSingletonInitialiser(cx, bce, pn))
                 return JS_FALSE;
             break;
         }
 
         /* Use the slower NEWINIT for arrays in scripts containing sharps. */
-        if (cg->hasSharps()) {
-            if (!EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
+        if (bce->hasSharps()) {
+            if (!EmitNewInit(cx, bce, JSProto_Array, pn, sharpnum))
                 return JS_FALSE;
         } else {
-            ptrdiff_t off = EmitN(cx, cg, JSOP_NEWARRAY, 3);
+            ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3);
             if (off < 0)
                 return JS_FALSE;
-            pc = CG_CODE(cg, off);
+            pc = CG_CODE(bce, off);
             SET_UINT24(pc, pn->pn_count);
         }
 
         pn2 = pn->pn_head;
         for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
-            if (!EmitNumberOp(cx, atomIndex, cg))
+            if (!EmitNumberOp(cx, atomIndex, bce))
                 return JS_FALSE;
             if (pn2->isKind(TOK_COMMA) && pn2->isArity(PN_NULLARY)) {
-                if (Emit1(cx, cg, JSOP_HOLE) < 0)
+                if (Emit1(cx, bce, JSOP_HOLE) < 0)
                     return JS_FALSE;
             } else {
-                if (!EmitTree(cx, cg, pn2))
+                if (!EmitTree(cx, bce, pn2))
                     return JS_FALSE;
             }
-            if (Emit1(cx, cg, JSOP_INITELEM) < 0)
+            if (Emit1(cx, bce, JSOP_INITELEM) < 0)
                 return JS_FALSE;
         }
         JS_ASSERT(atomIndex == pn->pn_count);
 
         if (pn->pn_xflags & PNX_ENDCOMMA) {
             /* Emit a source note so we know to decompile an extra comma. */
-            if (NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
+            if (NewSrcNote(cx, bce, SRC_CONTINUE) < 0)
                 return JS_FALSE;
         }
 
         /*
          * Emit an op to finish the array and, secondarily, to aid in sharp
          * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
          */
-        if (!EmitEndInit(cx, cg, atomIndex))
+        if (!EmitEndInit(cx, bce, atomIndex))
             return JS_FALSE;
         break;
 
       case TOK_RC: {
 #if JS_HAS_SHARP_VARS
         sharpnum = -1;
       do_emit_object:
 #endif
 #if JS_HAS_DESTRUCTURING_SHORTHAND
         if (pn->pn_xflags & PNX_DESTRUCT) {
-            ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
+            ReportCompileErrorNumber(cx, CG_TS(bce), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
             return JS_FALSE;
         }
 #endif
 
-        if (!cg->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && pn->pn_head &&
-            cg->checkSingletonContext()) {
-            if (!EmitSingletonInitialiser(cx, cg, pn))
+        if (!bce->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && pn->pn_head &&
+            bce->checkSingletonContext()) {
+            if (!EmitSingletonInitialiser(cx, bce, pn))
                 return JS_FALSE;
             break;
         }
 
         /*
          * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
          * a new object and in source order evaluating each property value and
          * adding the property to the object, without invoking latent setters.
          * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
          * ignore setters and to avoid dup'ing and popping the object as each
          * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
          */
-        ptrdiff_t offset = CG_NEXT(cg) - CG_BASE(cg);
-        if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
+        ptrdiff_t offset = CG_NEXT(bce) - CG_BASE(bce);
+        if (!EmitNewInit(cx, bce, JSProto_Object, pn, sharpnum))
             return JS_FALSE;
 
         /*
          * Try to construct the shape of the object as we go, so we can emit a
          * JSOP_NEWOBJECT with the final shape instead.
          */
         JSObject *obj = NULL;
-        if (!cg->hasSharps() && cg->compileAndGo()) {
+        if (!bce->hasSharps() && bce->compileAndGo()) {
             gc::AllocKind kind = GuessObjectGCKind(pn->pn_count, false);
             obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
             if (!obj)
                 return JS_FALSE;
         }
 
         uintN methodInits = 0, slowMethodInits = 0;
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
             /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
             pn3 = pn2->pn_left;
             if (pn3->isKind(TOK_NUMBER)) {
-                if (!EmitNumberOp(cx, pn3->pn_dval, cg))
+                if (!EmitNumberOp(cx, pn3->pn_dval, bce))
                     return JS_FALSE;
             }
 
             /* Emit code for the property initializer. */
-            if (!EmitTree(cx, cg, pn2->pn_right))
+            if (!EmitTree(cx, bce, pn2->pn_right))
                 return JS_FALSE;
 
             op = pn2->getOp();
             if (op == JSOP_GETTER || op == JSOP_SETTER) {
                 obj = NULL;
-                if (Emit1(cx, cg, op) < 0)
+                if (Emit1(cx, bce, op) < 0)
                     return JS_FALSE;
             }
 
             /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
             if (pn3->isKind(TOK_NUMBER)) {
                 obj = NULL;
-                if (NewSrcNote(cx, cg, SRC_INITPROP) < 0)
+                if (NewSrcNote(cx, bce, SRC_INITPROP) < 0)
                     return JS_FALSE;
-                if (Emit1(cx, cg, JSOP_INITELEM) < 0)
+                if (Emit1(cx, bce, JSOP_INITELEM) < 0)
                     return JS_FALSE;
             } else {
                 JS_ASSERT(pn3->isKind(TOK_NAME) ||
                           pn3->isKind(TOK_STRING));
                 jsatomid index;
-                if (!cg->makeAtomIndex(pn3->pn_atom, &index))
+                if (!bce->makeAtomIndex(pn3->pn_atom, &index))
                     return JS_FALSE;
 
                 /* Check whether we can optimize to JSOP_INITMETHOD. */
                 ParseNode *init = pn2->pn_right;
                 bool lambda = init->isOp(JSOP_LAMBDA);
                 if (lambda)
                     ++methodInits;
                 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable()) {
@@ -7214,406 +7212,406 @@ frontend::EmitTree(JSContext *cx, CodeGe
                     if (obj->inDictionaryMode())
                         obj = NULL;
                 }
 
                 EMIT_INDEX_OP(op, index);
             }
         }
 
-        if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
+        if (bce->funbox && bce->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
             obj = NULL;
-            if (Emit1(cx, cg, JSOP_UNBRAND) < 0)
+            if (Emit1(cx, bce, JSOP_UNBRAND) < 0)
                 return JS_FALSE;
         }
-        if (!EmitEndInit(cx, cg, pn->pn_count))
+        if (!EmitEndInit(cx, bce, pn->pn_count))
             return JS_FALSE;
 
         if (obj) {
             /*
              * The object survived and has a predictable shape.  Update the original bytecode,
              * as long as we can do so without using a big index prefix/suffix.
              */
-            ObjectBox *objbox = cg->parser->newObjectBox(obj);
+            ObjectBox *objbox = bce->parser->newObjectBox(obj);
             if (!objbox)
                 return JS_FALSE;
-            unsigned index = cg->objectList.index(objbox);
+            unsigned index = bce->objectList.index(objbox);
             if (FitsWithoutBigIndex(index))
                 EMIT_UINT16_IN_PLACE(offset, JSOP_NEWOBJECT, uint16(index));
         }
 
         break;
       }
 
 #if JS_HAS_SHARP_VARS
       case TOK_DEFSHARP:
-        JS_ASSERT(cg->hasSharps());
+        JS_ASSERT(bce->hasSharps());
         sharpnum = pn->pn_num;
         pn = pn->pn_kid;
         if (pn->isKind(TOK_RB))
             goto do_emit_array;
 # if JS_HAS_GENERATORS
         if (pn->isKind(TOK_ARRAYCOMP))
             goto do_emit_array;
 # endif
         if (pn->isKind(TOK_RC))
             goto do_emit_object;
 
-        if (!EmitTree(cx, cg, pn))
+        if (!EmitTree(cx, bce, pn))
             return JS_FALSE;
-        EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
+        EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, bce->sharpSlotBase, (jsatomid) sharpnum);
         break;
 
       case TOK_USESHARP:
-        JS_ASSERT(cg->hasSharps());
-        EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
+        JS_ASSERT(bce->hasSharps());
+        EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, bce->sharpSlotBase, (jsatomid) pn->pn_num);
         break;
 #endif /* JS_HAS_SHARP_VARS */
 
       case TOK_NAME:
         /*
          * Cope with a left-over function definition that was replaced by a use
          * of a later function definition of the same name. See FunctionDef and
          * MakeDefIntoUse in Parser.cpp.
          */
         if (pn->isOp(JSOP_NOP))
             break;
-        if (!EmitNameOp(cx, cg, pn, JS_FALSE))
+        if (!EmitNameOp(cx, bce, pn, JS_FALSE))
             return JS_FALSE;
         break;
 
 #if JS_HAS_XML_SUPPORT
       case TOK_XMLATTR:
       case TOK_XMLSPACE:
       case TOK_XMLTEXT:
       case TOK_XMLCDATA:
       case TOK_XMLCOMMENT:
-        JS_ASSERT(!cg->inStrictMode());
+        JS_ASSERT(!bce->inStrictMode());
         /* FALL THROUGH */
 #endif
       case TOK_STRING:
-        ok = EmitAtomOp(cx, pn, pn->getOp(), cg);
+        ok = EmitAtomOp(cx, pn, pn->getOp(), bce);
         break;
 
       case TOK_NUMBER:
-        ok = EmitNumberOp(cx, pn->pn_dval, cg);
+        ok = EmitNumberOp(cx, pn->pn_dval, bce);
         break;
 
       case TOK_REGEXP:
         JS_ASSERT(pn->isOp(JSOP_REGEXP));
-        ok = EmitIndexOp(cx, JSOP_REGEXP, cg->regexpList.index(pn->pn_objbox), cg);
+        ok = EmitIndexOp(cx, JSOP_REGEXP, bce->regexpList.index(pn->pn_objbox), bce);
         break;
 
 #if JS_HAS_XML_SUPPORT
       case TOK_ANYNAME:
 #endif
       case TOK_PRIMARY:
-        if (Emit1(cx, cg, pn->getOp()) < 0)
+        if (Emit1(cx, bce, pn->getOp()) < 0)
             return JS_FALSE;
         break;
 
       case TOK_DEBUGGER:
-        if (Emit1(cx, cg, JSOP_DEBUGGER) < 0)
+        if (Emit1(cx, bce, JSOP_DEBUGGER) < 0)
             return JS_FALSE;
         break;
 
 #if JS_HAS_XML_SUPPORT
       case TOK_XMLELEM:
       case TOK_XMLLIST:
-        JS_ASSERT(!cg->inStrictMode());
+        JS_ASSERT(!bce->inStrictMode());
         JS_ASSERT(pn->isKind(TOK_XMLLIST) || pn->pn_count != 0);
 
         switch (pn->pn_head ? pn->pn_head->getKind() : TOK_XMLLIST) {
           case TOK_XMLETAGO:
             JS_ASSERT(0);
             /* FALL THROUGH */
           case TOK_XMLPTAGC:
           case TOK_XMLSTAGO:
             break;
           default:
-            if (Emit1(cx, cg, JSOP_STARTXML) < 0)
+            if (Emit1(cx, bce, JSOP_STARTXML) < 0)
                 return JS_FALSE;
         }
 
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
             if (pn2->isKind(TOK_LC) &&
-                Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
+                Emit1(cx, bce, JSOP_STARTXMLEXPR) < 0) {
                 return JS_FALSE;
             }
-            if (!EmitTree(cx, cg, pn2))
+            if (!EmitTree(cx, bce, pn2))
                 return JS_FALSE;
-            if (pn2 != pn->pn_head && Emit1(cx, cg, JSOP_ADD) < 0)
+            if (pn2 != pn->pn_head && Emit1(cx, bce, JSOP_ADD) < 0)
                 return JS_FALSE;
         }
 
         if (pn->pn_xflags & PNX_XMLROOT) {
             if (pn->pn_count == 0) {
                 JS_ASSERT(pn->isKind(TOK_XMLLIST));
                 atom = cx->runtime->atomState.emptyAtom;
                 jsatomid index;
-                if (!cg->makeAtomIndex(atom, &index))
+                if (!bce->makeAtomIndex(atom, &index))
                     return JS_FALSE;
                 EMIT_INDEX_OP(JSOP_STRING, index);
             }
-            if (Emit1(cx, cg, pn->getOp()) < 0)
+            if (Emit1(cx, bce, pn->getOp()) < 0)
                 return JS_FALSE;
         }
 #ifdef DEBUG
         else
             JS_ASSERT(pn->pn_count != 0);
 #endif
         break;
 
       case TOK_XMLPTAGC:
       case TOK_XMLSTAGO:
       case TOK_XMLETAGO:
-        if (!EmitXMLTag(cx, cg, pn))
+        if (!EmitXMLTag(cx, bce, pn))
             return false;
         break;
 
       case TOK_XMLNAME:
-        JS_ASSERT(!cg->inStrictMode());
+        JS_ASSERT(!bce->inStrictMode());
 
         if (pn->isArity(PN_LIST)) {
             JS_ASSERT(pn->pn_count != 0);
             for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
                 if (pn2->isKind(TOK_LC) &&
-                    Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
+                    Emit1(cx, bce, JSOP_STARTXMLEXPR) < 0) {
                     return JS_FALSE;
                 }
-                if (!EmitTree(cx, cg, pn2))
+                if (!EmitTree(cx, bce, pn2))
                     return JS_FALSE;
-                if (pn2 != pn->pn_head && Emit1(cx, cg, JSOP_ADD) < 0)
+                if (pn2 != pn->pn_head && Emit1(cx, bce, JSOP_ADD) < 0)
                     return JS_FALSE;
             }
         } else {
             JS_ASSERT(pn->isArity(PN_NULLARY));
             ok = pn->isOp(JSOP_OBJECT)
-                 ? EmitObjectOp(cx, pn->pn_objbox, pn->getOp(), cg)
-                 : EmitAtomOp(cx, pn, pn->getOp(), cg);
+                 ? EmitObjectOp(cx, pn->pn_objbox, pn->getOp(), bce)
+                 : EmitAtomOp(cx, pn, pn->getOp(), bce);
         }
         break;
 
       case TOK_XMLPI:
-        if (!EmitXMLProcessingInstruction(cx, cg, pn))
+        if (!EmitXMLProcessingInstruction(cx, bce, pn))
             return false;
         break;
 #endif /* JS_HAS_XML_SUPPORT */
 
       default:
         JS_ASSERT(0);
     }
 
-    /* cg->emitLevel == 1 means we're last on the stack, so finish up. */
-    if (ok && cg->emitLevel == 1) {
-        if (cg->spanDeps)
-            ok = OptimizeSpanDeps(cx, cg);
-        if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
+    /* bce->emitLevel == 1 means we're last on the stack, so finish up. */
+    if (ok && bce->emitLevel == 1) {
+        if (bce->spanDeps)
+            ok = OptimizeSpanDeps(cx, bce);
+        if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.end.lineno))
             return JS_FALSE;
     }
 
     return ok;
 }
 
 static intN
-AllocSrcNote(JSContext *cx, CodeGenerator *cg)
-{
-    jssrcnote *notes = CG_NOTES(cg);
+AllocSrcNote(JSContext *cx, BytecodeEmitter *bce)
+{
+    jssrcnote *notes = CG_NOTES(bce);
     jssrcnote *newnotes;
-    uintN index = CG_NOTE_COUNT(cg);
-    uintN max = CG_NOTE_LIMIT(cg);
+    uintN index = CG_NOTE_COUNT(bce);
+    uintN max = CG_NOTE_LIMIT(bce);
 
     if (index == max) {
         size_t newlength;
         if (!notes) {
             JS_ASSERT(!index && !max);
             newlength = SRCNOTE_CHUNK_LENGTH;
             newnotes = (jssrcnote *) cx->malloc_(SRCNOTE_SIZE(newlength));
         } else {
             JS_ASSERT(index <= max);
             newlength = max * 2;
             newnotes = (jssrcnote *) cx->realloc_(notes, SRCNOTE_SIZE(newlength));
         }
         if (!newnotes) {
             js_ReportOutOfMemory(cx);
             return -1;
         }
-        CG_NOTES(cg) = newnotes;
-        CG_NOTE_LIMIT(cg) = newlength;
-    }
-
-    CG_NOTE_COUNT(cg) = index + 1;
+        CG_NOTES(bce) = newnotes;
+        CG_NOTE_LIMIT(bce) = newlength;
+    }
+
+    CG_NOTE_COUNT(bce) = index + 1;
     return (intN)index;
 }
 
 intN
-frontend::NewSrcNote(JSContext *cx, CodeGenerator *cg, SrcNoteType type)
+frontend::NewSrcNote(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type)
 {
     intN index, n;
     jssrcnote *sn;
     ptrdiff_t offset, delta, xdelta;
 
     /*
-     * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
-     * incrementing CG_NOTE_COUNT(cg).
+     * Claim a note slot in CG_NOTES(bce) by growing it if necessary and then
+     * incrementing CG_NOTE_COUNT(bce).
      */
-    index = AllocSrcNote(cx, cg);
+    index = AllocSrcNote(cx, bce);
     if (index < 0)
         return -1;
-    sn = &CG_NOTES(cg)[index];
+    sn = &CG_NOTES(bce)[index];
 
     /*
      * Compute delta from the last annotated bytecode's offset.  If it's too
      * big to fit in sn, allocate one or more xdelta notes and reset sn.
      */
-    offset = CG_OFFSET(cg);
-    delta = offset - CG_LAST_NOTE_OFFSET(cg);
-    CG_LAST_NOTE_OFFSET(cg) = offset;
+    offset = CG_OFFSET(bce);
+    delta = offset - CG_LAST_NOTE_OFFSET(bce);
+    CG_LAST_NOTE_OFFSET(bce) = offset;
     if (delta >= SN_DELTA_LIMIT) {
         do {
             xdelta = JS_MIN(delta, SN_XDELTA_MASK);
             SN_MAKE_XDELTA(sn, xdelta);
             delta -= xdelta;
-            index = AllocSrcNote(cx, cg);
+            index = AllocSrcNote(cx, bce);
             if (index < 0)
                 return -1;
-            sn = &CG_NOTES(cg)[index];
+            sn = &CG_NOTES(bce)[index];
         } while (delta >= SN_DELTA_LIMIT);
     }
 
     /*
      * Initialize type and delta, then allocate the minimum number of notes
      * needed for type's arity.  Usually, we won't need more, but if an offset
-     * does take two bytes, SetSrcNoteOffset will grow CG_NOTES(cg).
+     * does take two bytes, SetSrcNoteOffset will grow CG_NOTES(bce).
      */
     SN_MAKE_NOTE(sn, type, delta);
     for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
-        if (NewSrcNote(cx, cg, SRC_NULL) < 0)
+        if (NewSrcNote(cx, bce, SRC_NULL) < 0)
             return -1;
     }
     return index;
 }
 
 intN
-frontend::NewSrcNote2(JSContext *cx, CodeGenerator *cg, SrcNoteType type, ptrdiff_t offset)
+frontend::NewSrcNote2(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset)
 {
     intN index;
 
-    index = NewSrcNote(cx, cg, type);
+    index = NewSrcNote(cx, bce, type);
     if (index >= 0) {
-        if (!SetSrcNoteOffset(cx, cg, index, 0, offset))
+        if (!SetSrcNoteOffset(cx, bce, index, 0, offset))
             return -1;
     }
     return index;
 }
 
 intN
-frontend::NewSrcNote3(JSContext *cx, CodeGenerator *cg, SrcNoteType type, ptrdiff_t offset1,
+frontend::NewSrcNote3(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1,
             ptrdiff_t offset2)
 {
     intN index;
 
-    index = NewSrcNote(cx, cg, type);
+    index = NewSrcNote(cx, bce, type);
     if (index >= 0) {
-        if (!SetSrcNoteOffset(cx, cg, index, 0, offset1))
+        if (!SetSrcNoteOffset(cx, bce, index, 0, offset1))
             return -1;
-        if (!SetSrcNoteOffset(cx, cg, index, 1, offset2))
+        if (!SetSrcNoteOffset(cx, bce, index, 1, offset2))
             return -1;
     }
     return index;
 }
 
 static JSBool
-GrowSrcNotes(JSContext *cx, CodeGenerator *cg)
-{
-    size_t newlength = CG_NOTE_LIMIT(cg) * 2;
-    jssrcnote *newnotes = (jssrcnote *) cx->realloc_(CG_NOTES(cg), newlength);
+GrowSrcNotes(JSContext *cx, BytecodeEmitter *bce)
+{
+    size_t newlength = CG_NOTE_LIMIT(bce) * 2;
+    jssrcnote *newnotes = (jssrcnote *) cx->realloc_(CG_NOTES(bce), newlength);
     if (!newnotes) {
         js_ReportOutOfMemory(cx);
         return JS_FALSE;
     }
-    CG_NOTES(cg) = newnotes;
-    CG_NOTE_LIMIT(cg) = newlength;
+    CG_NOTES(bce) = newnotes;
+    CG_NOTE_LIMIT(bce) = newlength;
     return JS_TRUE;
 }
 
 jssrcnote *
-frontend::AddToSrcNoteDelta(JSContext *cx, CodeGenerator *cg, jssrcnote *sn, ptrdiff_t delta)
+frontend::AddToSrcNoteDelta(JSContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta)
 {
     ptrdiff_t base, limit, newdelta, diff;
     intN index;
 
     /*
      * Called only from OptimizeSpanDeps and FinishTakingSrcNotes to add to
      * main script note deltas, and only by a small positive amount.
      */
-    JS_ASSERT(cg->current == &cg->main);
+    JS_ASSERT(bce->current == &bce->main);
     JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
 
     base = SN_DELTA(sn);
     limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
     newdelta = base + delta;
     if (newdelta < limit) {
         SN_SET_DELTA(sn, newdelta);
     } else {
-        index = sn - cg->main.notes;
-        if (cg->main.noteCount == cg->main.noteLimit) {
-            if (!GrowSrcNotes(cx, cg))
+        index = sn - bce->main.notes;
+        if (bce->main.noteCount == bce->main.noteLimit) {
+            if (!GrowSrcNotes(cx, bce))
                 return NULL;
-            sn = cg->main.notes + index;
-        }
-        diff = cg->main.noteCount - index;
-        cg->main.noteCount++;
+            sn = bce->main.notes + index;
+        }
+        diff = bce->main.noteCount - index;
+        bce->main.noteCount++;
         memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
         SN_MAKE_XDELTA(sn, delta);
         sn++;
     }
     return sn;
 }
 
 static JSBool
-SetSrcNoteOffset(JSContext *cx, CodeGenerator *cg, uintN index, uintN which, ptrdiff_t offset)
+SetSrcNoteOffset(JSContext *cx, BytecodeEmitter *bce, uintN index, uintN which, ptrdiff_t offset)
 {
     jssrcnote *sn;
     ptrdiff_t diff;
 
     if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
-        ReportStatementTooLarge(cx, cg);
+        ReportStatementTooLarge(cx, bce);
         return JS_FALSE;
     }
 
     /* Find the offset numbered which (i.e., skip exactly which offsets). */
-    sn = &CG_NOTES(cg)[index];
+    sn = &CG_NOTES(bce)[index];
     JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
     JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
     for (sn++; which; sn++, which--) {
         if (*sn & SN_3BYTE_OFFSET_FLAG)
             sn += 2;
     }
 
     /* See if the new offset requires three bytes. */
     if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
         /* Maybe this offset was already set to a three-byte value. */
         if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
             /* Losing, need to insert another two bytes for this offset. */
-            index = sn - CG_NOTES(cg);
+            index = sn - CG_NOTES(bce);
 
             /*
              * Test to see if the source note array must grow to accommodate
              * either the first or second byte of additional storage required
              * by this 3-byte offset.
              */
-            if (CG_NOTE_COUNT(cg) + 1 >= CG_NOTE_LIMIT(cg)) {
-                if (!GrowSrcNotes(cx, cg))
+            if (CG_NOTE_COUNT(bce) + 1 >= CG_NOTE_LIMIT(bce)) {
+                if (!GrowSrcNotes(cx, bce))
                     return JS_FALSE;
-                sn = CG_NOTES(cg) + index;
+                sn = CG_NOTES(bce) + index;
             }
-            CG_NOTE_COUNT(cg) += 2;
-
-            diff = CG_NOTE_COUNT(cg) - (index + 3);
+            CG_NOTE_COUNT(bce) += 2;
+
+            diff = CG_NOTE_COUNT(bce) - (index + 3);
             JS_ASSERT(diff >= 0);
             if (diff > 0)
                 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
         }
         *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
         *sn++ = (jssrcnote)(offset >> 8);
     }
     *sn = (jssrcnote)offset;
@@ -7649,78 +7647,78 @@ DumpSrcNoteSizeHist()
     }
     fputc('\n', fp);
 }
 #endif
 
 /*
  * Fill in the storage at notes with prolog and main srcnotes; the space at
  * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from
- * BytecodeGenerator.h.  SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST
- * CHECKING WHETHER BytecodeGenerator.h's CG_COUNT_FINAL_SRCNOTES MACRO NEEDS
+ * BytecodeEmitter.h.  SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST
+ * CHECKING WHETHER BytecodeEmitter.h's CG_COUNT_FINAL_SRCNOTES MACRO NEEDS
  * CORRESPONDING CHANGES!
  */
 JSBool
-frontend::FinishTakingSrcNotes(JSContext *cx, CodeGenerator *cg, jssrcnote *notes)
+frontend::FinishTakingSrcNotes(JSContext *cx, BytecodeEmitter *bce, jssrcnote *notes)
 {
     uintN prologCount, mainCount, totalCount;
     ptrdiff_t offset, delta;
     jssrcnote *sn;
 
-    JS_ASSERT(cg->current == &cg->main);
-
-    prologCount = cg->prolog.noteCount;
-    if (prologCount && cg->prolog.currentLine != cg->firstLine) {
-        CG_SWITCH_TO_PROLOG(cg);
-        if (NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
+    JS_ASSERT(bce->current == &bce->main);
+
+    prologCount = bce->prolog.noteCount;
+    if (prologCount && bce->prolog.currentLine != bce->firstLine) {
+        CG_SWITCH_TO_PROLOG(bce);
+        if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0)
             return false;
-        prologCount = cg->prolog.noteCount;
-        CG_SWITCH_TO_MAIN(cg);
+        prologCount = bce->prolog.noteCount;
+        CG_SWITCH_TO_MAIN(bce);
     } else {
         /*
          * Either no prolog srcnotes, or no line number change over prolog.
          * We don't need a SRC_SETLINE, but we may need to adjust the offset
          * of the first main note, by adding to its delta and possibly even
          * prepending SRC_XDELTA notes to it to account for prolog bytecodes
          * that came at and after the last annotated bytecode.
          */
-        offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
+        offset = CG_PROLOG_OFFSET(bce) - bce->prolog.lastNoteOffset;
         JS_ASSERT(offset >= 0);
-        if (offset > 0 && cg->main.noteCount != 0) {
+        if (offset > 0 && bce->main.noteCount != 0) {
             /* NB: Use as much of the first main note's delta as we can. */
-            sn = cg->main.notes;
+            sn = bce->main.notes;
             delta = SN_IS_XDELTA(sn)
                     ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
                     : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
             if (offset < delta)
                 delta = offset;
             for (;;) {
-                if (!AddToSrcNoteDelta(cx, cg, sn, delta))
+                if (!AddToSrcNoteDelta(cx, bce, sn, delta))
                     return false;
                 offset -= delta;
                 if (offset == 0)
                     break;
                 delta = JS_MIN(offset, SN_XDELTA_MASK);
-                sn = cg->main.notes;
+                sn = bce->main.notes;
             }
         }
     }
 
-    mainCount = cg->main.noteCount;
+    mainCount = bce->main.noteCount;
     totalCount = prologCount + mainCount;
     if (prologCount)
-        memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
-    memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
+        memcpy(notes, bce->prolog.notes, SRCNOTE_SIZE(prologCount));
+    memcpy(notes + prologCount, bce->main.notes, SRCNOTE_SIZE(mainCount));
     SN_MAKE_TERMINATOR(&notes[totalCount]);
 
     return true;
 }
 
 static JSBool
-NewTryNote(JSContext *cx, CodeGenerator *cg, JSTryNoteKind kind, uintN stackDepth, size_t start,
+NewTryNote(JSContext *cx, BytecodeEmitter *bce, JSTryNoteKind kind, uintN stackDepth, size_t start,
            size_t end)
 {
     JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
     JS_ASSERT(start <= end);
     JS_ASSERT((size_t)(uint32)start == start);
     JS_ASSERT((size_t)(uint32)end == end);
 
     TryNode *tryNode = cx->tempLifoAlloc().new_<TryNode>();
@@ -7728,49 +7726,49 @@ NewTryNote(JSContext *cx, CodeGenerator 
         js_ReportOutOfMemory(cx);
         return JS_FALSE;
     }
 
     tryNode->note.kind = kind;
     tryNode->note.stackDepth = (uint16)stackDepth;
     tryNode->note.start = (uint32)start;
     tryNode->note.length = (uint32)(end - start);
-    tryNode->prev = cg->lastTryNode;
-    cg->lastTryNode = tryNode;
-    cg->ntrynotes++;
+    tryNode->prev = bce->lastTryNode;
+    bce->lastTryNode = tryNode;
+    bce->ntrynotes++;
     return JS_TRUE;
 }
 
 void
-frontend::FinishTakingTryNotes(CodeGenerator *cg, JSTryNoteArray *array)
+frontend::FinishTakingTryNotes(BytecodeEmitter *bce, JSTryNoteArray *array)
 {
     TryNode *tryNode;
     JSTryNote *tn;
 
-    JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
+    JS_ASSERT(array->length > 0 && array->length == bce->ntrynotes);
     tn = array->vector + array->length;
-    tryNode = cg->lastTryNode;
+    tryNode = bce->lastTryNode;
     do {
         *--tn = tryNode->note;
     } while ((tryNode = tryNode->prev) != NULL);
     JS_ASSERT(tn == array->vector);
 }
 
 /*
  * Find the index of the given object for code generator.
  *
  * Since the emitter refers to each parsed object only once, for the index we
  * use the number of already indexes objects. We also add the object to a list
  * to convert the list to a fixed-size array when we complete code generation,
  * see js::CGObjectList::finish below.
  *
- * Most of the objects go to CodeGenerator::objectList but for regexp we use a
- * separated CodeGenerator::regexpList. In this way the emitted index can be
- * directly used to store and fetch a reference to a cloned RegExp object that
- * shares the same JSRegExp private data created for the object literal in
+ * Most of the objects go to BytecodeEmitter::objectList but for regexp we use
+ * a separated BytecodeEmitter::regexpList. In this way the emitted index can
+ * be directly used to store and fetch a reference to a cloned RegExp object
+ * that shares the same JSRegExp private data created for the object literal in
  * objbox. We need a cloned object to hold lastIndex and other direct
  * properties that should not be shared among threads sharing a precompiled
  * function or script.
  *
  * If the code being compiled is function code, allocate a reserved slot in
  * the cloned function object that shares its precompiled script with other
  * cloned function objects and with the compiler-created clone-parent. There
  * are nregexps = script->regexps()->length such reserved slots in each
rename from js/src/frontend/BytecodeGenerator.h
rename to js/src/frontend/BytecodeEmitter.h
--- a/js/src/frontend/BytecodeGenerator.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -33,18 +33,18 @@
  * use your version of this file under the terms of the MPL, indicate your
  * decision by deleting the provisions above and replace them with the notice
  * and other provisions required by the GPL or the LGPL. If you do not delete
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
-#ifndef BytecodeGenerator_h__
-#define BytecodeGenerator_h__
+#ifndef BytecodeEmitter_h__
+#define BytecodeEmitter_h__
 
 /*
  * JS bytecode generation.
  */
 #include "jstypes.h"
 #include "jsatom.h"
 #include "jsopcode.h"
 #include "jsscript.h"
@@ -59,17 +59,17 @@
 namespace js {
 
 /*
  * NB: If you add enumerators for scope statements, add them between STMT_WITH
  * and STMT_CATCH, or you will break the STMT_TYPE_IS_SCOPE macro. If you add
  * non-looping statement enumerators, add them before STMT_DO_LOOP or you will
  * break the STMT_TYPE_IS_LOOP macro.
  *
- * Also remember to keep the statementName array in BytecodeGenerator.cpp in
+ * Also remember to keep the statementName array in BytecodeEmitter.cpp in
  * sync.
  */
 enum StmtType {
     STMT_LABEL,                 /* labeled statement:  L: s */
     STMT_IF,                    /* if (then) statement */
     STMT_ELSE,                  /* else clause of if statement */
     STMT_SEQ,                   /* synthetic sequence of statements */
     STMT_BLOCK,                 /* compound statement: { s1[;... sN] } */
@@ -154,17 +154,17 @@ struct StmtInfo {
  */
 #define CATCHNOTE(stmt)  ((stmt).update)
 #define GOSUBS(stmt)     ((stmt).breaks)
 #define GUARDJUMP(stmt)  ((stmt).continues)
 
 #define SET_STATEMENT_TOP(stmt, top)                                          \
     ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
 
-#define TCF_COMPILING           0x01 /* TreeContext is CodeGenerator */
+#define TCF_COMPILING           0x01 /* TreeContext is BytecodeEmitter */
 #define TCF_IN_FUNCTION         0x02 /* parsing inside function body */
 #define TCF_RETURN_EXPR         0x04 /* function has 'return expr;' */
 #define TCF_RETURN_VOID         0x08 /* function has 'return;' */
 #define TCF_IN_FOR_INIT         0x10 /* parsing init expr of for; exclude 'in' */
 #define TCF_FUN_SETS_OUTER_NAME 0x20 /* function set outer name (lexical or free) */
 #define TCF_FUN_PARAM_ARGUMENTS 0x40 /* function has parameter named arguments */
 #define TCF_FUN_USES_ARGUMENTS  0x80 /* function uses arguments except as a
                                         parameter name */
@@ -191,17 +191,17 @@ struct StmtInfo {
  * The comments atop CheckDestructuring explain the distinction
  * between assignment-like and declaration-like destructuring
  * patterns, and why they need to be treated differently.
  */
 #define TCF_DECL_DESTRUCTURING  0x10000
 
 /*
  * A request flag passed to BytecodeCompiler::compileScript and then down via
- * CodeGenerator to JSScript::NewScriptFromCG, from script_compile_sub and any
+ * BytecodeEmitter to JSScript::NewScriptFromCG, from script_compile_sub and any
  * kindred functions that need to make mutable scripts (even empty ones; i.e.,
  * they can't share the const JSScript::emptyScript() singleton).
  */
 #define TCF_NEED_MUTABLE_SCRIPT 0x20000
 
 /*
  * This function/global/eval code body contained a Use Strict Directive. Treat
  * certain strict warnings as errors, and forbid the use of 'with'. See also
@@ -293,17 +293,17 @@ struct StmtInfo {
                                  TCF_FUN_USES_OWN_NAME   |                    \
                                  TCF_HAS_SHARPS          |                    \
                                  TCF_FUN_CALLS_EVAL      |                    \
                                  TCF_FUN_MIGHT_ALIAS_LOCALS |                 \
                                  TCF_FUN_MUTATES_PARAMETER |                  \
                                  TCF_STRICT_MODE_CODE    |                    \
                                  TCF_FUN_EXTENSIBLE_SCOPE)
 
-struct CodeGenerator;
+struct BytecodeEmitter;
 
 struct TreeContext {                /* tree context for semantic checks */
     uint32          flags;          /* statement state flags, see above */
     uint32          bodyid;         /* block number of program/function body */
     uint32          blockidGen;     /* preincremented block number generator */
     uint32          parenDepth;     /* nesting depth of parens that might turn out
                                        to be generator expressions */
     uint32          yieldCount;     /* number of |yield| tokens encountered at
@@ -382,18 +382,18 @@ struct TreeContext {                /* t
      * time during code generation. To avoid a redundant stats update in such
      * cases, we store uint16(-1) in maxScopeDepth.
      */
     ~TreeContext() {
         parser->tc = this->parent;
     }
 
     /*
-     * js::CodeGenerator derives from js::TreeContext; however, only the
-     * top-level CodeGenerators are actually used as full-fledged tree contexts
+     * js::BytecodeEmitter derives from js::TreeContext; however, only the
+     * top-level BytecodeEmitters are actually used as full-fledged tree contexts
      * (to hold decls and lexdeps). We can avoid allocation overhead by making
      * this distinction explicit.
      */
     enum InitBehavior {
         USED_AS_TREE_CONTEXT,
         USED_AS_CODE_GENERATOR
     };
 
@@ -441,17 +441,17 @@ struct TreeContext {                /* t
     // (going upward) from this context's lexical scope. Always return true if
     // this context is itself a generator.
     bool skipSpansGenerator(unsigned skip);
 
     bool compileAndGo() const { return flags & TCF_COMPILE_N_GO; }
     bool inFunction() const { return flags & TCF_IN_FUNCTION; }
 
     bool compiling() const { return flags & TCF_COMPILING; }
-    inline CodeGenerator *asCodeGenerator();
+    inline BytecodeEmitter *asBytecodeEmitter();
 
     bool usesArguments() const {
         return flags & TCF_FUN_USES_ARGUMENTS;
     }
 
     void noteCallsEval() {
         flags |= TCF_FUN_CALLS_EVAL;
     }
@@ -604,17 +604,17 @@ class GCConstList {
     Vector<Value> list;
   public:
     GCConstList(JSContext *cx) : list(cx) {}
     bool append(Value v) { return list.append(v); }
     size_t length() const { return list.length(); }
     void finish(JSConstArray *array);
 };
 
-struct CodeGenerator : public TreeContext
+struct BytecodeEmitter : public TreeContext
 {
     struct {
         jsbytecode  *base;          /* base of JS bytecode vector */
         jsbytecode  *limit;         /* one byte beyond end of bytecode */
         jsbytecode  *next;          /* pointer to next free bytecode */
         jssrcnote   *notes;         /* source notes, see below */
         uintN       noteCount;      /* number of source notes so far */
         uintN       noteLimit;      /* limit number for source notes in notePool */
@@ -665,40 +665,40 @@ struct CodeGenerator : public TreeContex
     /* Vectors of pn_cookie slot values. */
     typedef Vector<uint32, 8> SlotVector;
     SlotVector      closedArgs;
     SlotVector      closedVars;
 
     uint16          traceIndex;     /* index for the next JSOP_TRACE instruction */
     uint16          typesetCount;   /* Number of JOF_TYPESET opcodes generated */
 
-    CodeGenerator(Parser *parser, uintN lineno);
+    BytecodeEmitter(Parser *parser, uintN lineno);
     bool init(JSContext *cx, TreeContext::InitBehavior ib = USED_AS_CODE_GENERATOR);
 
     JSContext *context() {
         return parser->context;
     }
 
     /*
-     * Note that cgs are magic: they own the arena "top-of-stack" space
-     * above their tempMark points. This means that you cannot alloc from
-     * tempLifoAlloc and save the pointer beyond the next CodeGenerator
+     * Note that BytecodeEmitters are magic: they own the arena "top-of-stack"
+     * space above their tempMark points. This means that you cannot alloc from
+     * tempLifoAlloc and save the pointer beyond the next BytecodeEmitter
      * destructor call.
      */
-    ~CodeGenerator();
+    ~BytecodeEmitter();
 
     /*
      * Adds a use of a variable that is statically known to exist on the
      * global object.
      *
      * The actual slot of the variable on the global object is not known
      * until after compilation. Properties must be resolved before being
      * added, to avoid aliasing properties that should be resolved. This makes
      * slot prediction based on the global object's free slot impossible. So,
-     * we use the slot to index into cg->globalScope->defs, and perform a
+     * we use the slot to index into bce->globalScope->defs, and perform a
      * fixup of the script at the very end of compilation.
      *
      * If the global use can be cached, |cookie| will be set to |slot|.
      * Otherwise, |cookie| is set to the free cookie value.
      */
     bool addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie);
 
     bool hasUpvarIndices() const {
@@ -743,102 +743,100 @@ struct CodeGenerator : public TreeContex
             if (STMT_IS_LOOP(stmt))
                 return false;
         }
         flags |= TCF_HAS_SINGLETONS;
         return true;
     }
 };
 
-#define CG_TS(cg)               TS((cg)->parser)
+#define CG_TS(bce)               TS((bce)->parser)
 
-#define CG_BASE(cg)             ((cg)->current->base)
-#define CG_LIMIT(cg)            ((cg)->current->limit)
-#define CG_NEXT(cg)             ((cg)->current->next)
-#define CG_CODE(cg,offset)      (CG_BASE(cg) + (offset))
-#define CG_OFFSET(cg)           (CG_NEXT(cg) - CG_BASE(cg))
+#define CG_BASE(bce)             ((bce)->current->base)
+#define CG_LIMIT(bce)            ((bce)->current->limit)
+#define CG_NEXT(bce)             ((bce)->current->next)
+#define CG_CODE(bce,offset)      (CG_BASE(bce) + (offset))
+#define CG_OFFSET(bce)           (CG_NEXT(bce) - CG_BASE(bce))
 
-#define CG_NOTES(cg)            ((cg)->current->notes)
-#define CG_NOTE_COUNT(cg)       ((cg)->current->noteCount)
-#define CG_NOTE_LIMIT(cg)       ((cg)->current->noteLimit)
-#define CG_LAST_NOTE_OFFSET(cg) ((cg)->current->lastNoteOffset)
-#define CG_CURRENT_LINE(cg)     ((cg)->current->currentLine)
+#define CG_NOTES(bce)            ((bce)->current->notes)
+#define CG_NOTE_COUNT(bce)       ((bce)->current->noteCount)
+#define CG_NOTE_LIMIT(bce)       ((bce)->current->noteLimit)
+#define CG_LAST_NOTE_OFFSET(bce) ((bce)->current->lastNoteOffset)
+#define CG_CURRENT_LINE(bce)     ((bce)->current->currentLine)
 
-#define CG_PROLOG_BASE(cg)      ((cg)->prolog.base)
-#define CG_PROLOG_LIMIT(cg)     ((cg)->prolog.limit)
-#define CG_PROLOG_NEXT(cg)      ((cg)->prolog.next)
-#define CG_PROLOG_CODE(cg,poff) (CG_PROLOG_BASE(cg) + (poff))
-#define CG_PROLOG_OFFSET(cg)    (CG_PROLOG_NEXT(cg) - CG_PROLOG_BASE(cg))
+#define CG_PROLOG_BASE(bce)      ((bce)->prolog.base)
+#define CG_PROLOG_LIMIT(bce)     ((bce)->prolog.limit)
+#define CG_PROLOG_NEXT(bce)      ((bce)->prolog.next)
+#define CG_PROLOG_CODE(bce,poff) (CG_PROLOG_BASE(bce) + (poff))
+#define CG_PROLOG_OFFSET(bce)    (CG_PROLOG_NEXT(bce) - CG_PROLOG_BASE(bce))
 
-#define CG_SWITCH_TO_MAIN(cg)   ((cg)->current = &(cg)->main)
-#define CG_SWITCH_TO_PROLOG(cg) ((cg)->current = &(cg)->prolog)
+#define CG_SWITCH_TO_MAIN(bce)   ((bce)->current = &(bce)->main)
+#define CG_SWITCH_TO_PROLOG(bce) ((bce)->current = &(bce)->prolog)
 
-inline CodeGenerator *
-TreeContext::asCodeGenerator()
+inline BytecodeEmitter *
+TreeContext::asBytecodeEmitter()
 {
     JS_ASSERT(compiling());
-    return static_cast<CodeGenerator *>(this);
+    return static_cast<BytecodeEmitter *>(this);
 }
 
 namespace frontend {
 
 /*
  * Emit one bytecode.
  */
 ptrdiff_t
-Emit1(JSContext *cx, CodeGenerator *cg, JSOp op);
+Emit1(JSContext *cx, BytecodeEmitter *bce, JSOp op);
 
 /*
  * Emit two bytecodes, an opcode (op) with a byte of immediate operand (op1).
  */
 ptrdiff_t
-Emit2(JSContext *cx, CodeGenerator *cg, JSOp op, jsbytecode op1);
+Emit2(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1);
 
 /*
  * Emit three bytecodes, an opcode with two bytes of immediate operands.
  */
 ptrdiff_t
-Emit3(JSContext *cx, CodeGenerator *cg, JSOp op, jsbytecode op1,
-         jsbytecode op2);
+Emit3(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1, jsbytecode op2);
 
 /*
  * Emit five bytecodes, an opcode with two 16-bit immediates.
  */
 ptrdiff_t
-Emit5(JSContext *cx, CodeGenerator *cg, JSOp op, uint16 op1,
-         uint16 op2);
+Emit5(JSContext *cx, BytecodeEmitter *bce, JSOp op, uint16 op1, uint16 op2);
 
 /*
  * Emit (1 + extra) bytecodes, for N bytes of op and its immediate operand.
  */
 ptrdiff_t
-EmitN(JSContext *cx, CodeGenerator *cg, JSOp op, size_t extra);
+EmitN(JSContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra);
 
 /*
  * Unsafe macro to call SetJumpOffset and return false if it does.
  */
-#define CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx,cg,pc,off,BAD_EXIT)               \
+#define CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx,bce,pc,off,BAD_EXIT)              \
     JS_BEGIN_MACRO                                                            \
-        if (!SetJumpOffset(cx, cg, pc, off)) {                             \
+        if (!SetJumpOffset(cx, bce, pc, off)) {                               \
             BAD_EXIT;                                                         \
         }                                                                     \
     JS_END_MACRO
 
-#define CHECK_AND_SET_JUMP_OFFSET(cx,cg,pc,off)                               \
-    CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx,cg,pc,off,return JS_FALSE)
+#define CHECK_AND_SET_JUMP_OFFSET(cx,bce,pc,off)                              \
+    CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx,bce,pc,off,return JS_FALSE)
 
-#define CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx,cg,off,BAD_EXIT)               \
-    CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx, cg, CG_CODE(cg,off),                 \
-                                     CG_OFFSET(cg) - (off), BAD_EXIT)
+#define CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx,bce,off,BAD_EXIT)              \
+    CHECK_AND_SET_JUMP_OFFSET_CUSTOM(cx, bce, CG_CODE(bce,off),               \
+                                     CG_OFFSET(bce) - (off), BAD_EXIT)
 
-#define CHECK_AND_SET_JUMP_OFFSET_AT(cx,cg,off)                               \
-    CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, off, return JS_FALSE)
+#define CHECK_AND_SET_JUMP_OFFSET_AT(cx,bce,off)                              \
+    CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, bce, off, return JS_FALSE)
 
 JSBool
-SetJumpOffset(JSContext *cx, CodeGenerator *cg, jsbytecode *pc, ptrdiff_t off);
+SetJumpOffset(JSContext *cx, BytecodeEmitter *bce, jsbytecode *pc, ptrdiff_t off);
 
 /*
  * Push the C-stack-allocated struct at stmt onto the stmtInfo stack.
  */
 void
 PushStatement(TreeContext *tc, StmtInfo *stmt, StmtType type, ptrdiff_t top);
 
 /*
@@ -852,37 +850,37 @@ PushBlockScope(TreeContext *tc, StmtInfo
 /*
  * Pop tc->topStmt. If the top StmtInfo struct is not stack-allocated, it
  * is up to the caller to free it.
  */
 void
 PopStatementTC(TreeContext *tc);
 
 /*
- * Like PopStatementTC(cg), also patch breaks and continues unless the top
+ * Like PopStatementTC(bce), also patch breaks and continues unless the top
  * statement info record represents a try-catch-finally suite. May fail if a
  * jump offset overflows.
  */
 JSBool
-PopStatementCG(JSContext *cx, CodeGenerator *cg);
+PopStatementCG(JSContext *cx, BytecodeEmitter *bce);
 
 /*
  * Define and lookup a primitive jsval associated with the const named by atom.
  * DefineCompileTimeConstant analyzes the constant-folded initializer at pn
- * and saves the const's value in cg->constList, if it can be used at compile
+ * and saves the const's value in bce->constList, if it can be used at compile
  * time. It returns true unless an error occurred.
  *
  * If the initializer's value could not be saved, DefineCompileTimeConstant
  * calls will return the undefined value. DefineCompileTimeConstant tries
  * to find a const value memorized for atom, returning true with *vp set to a
  * value other than undefined if the constant was found, true with *vp set to
  * JSVAL_VOID if not found, and false on error.
  */
 JSBool
-DefineCompileTimeConstant(JSContext *cx, CodeGenerator *cg, JSAtom *atom, ParseNode *pn);
+DefineCompileTimeConstant(JSContext *cx, BytecodeEmitter *bce, JSAtom *atom, ParseNode *pn);
 
 /*
  * Find a lexically scoped variable (one declared by let, catch, or an array
  * comprehension) named by atom, looking in tc's compile-time scopes.
  *
  * If a WITH statement is reached along the scope stack, return its statement
  * info record, so callers can tell that atom is ambiguous. If slotp is not
  * null, then if atom is found, set *slotp to its stack slot, otherwise to -1.
@@ -892,26 +890,26 @@ DefineCompileTimeConstant(JSContext *cx,
  *
  * In any event, directly return the statement info record in which atom was
  * found. Otherwise return null.
  */
 StmtInfo *
 LexicalLookup(TreeContext *tc, JSAtom *atom, jsint *slotp, StmtInfo *stmt = NULL);
 
 /*
- * Emit code into cg for the tree rooted at pn.
+ * Emit code into bce for the tree rooted at pn.
  */
 JSBool
-EmitTree(JSContext *cx, CodeGenerator *cg, ParseNode *pn);
+EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn);
 
 /*
- * Emit function code using cg for the tree rooted at body.
+ * Emit function code using bce for the tree rooted at body.
  */
 JSBool
-EmitFunctionScript(JSContext *cx, CodeGenerator *cg, ParseNode *body);
+EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *body);
 
 } /* namespace frontend */
 
 /*
  * Source notes generated along with bytecode for decompiling and debugging.
  * A source note is a uint8 with 5 bits of type and 3 of offset from the pc of
  * the previous note. If 3 bits of offset aren't enough, extended delta notes
  * (SRC_XDELTA) consisting of 2 set high order bits followed by 6 offset bits
@@ -921,17 +919,17 @@ EmitFunctionScript(JSContext *cx, CodeGe
  *                 Source Note               Extended Delta
  *              +7-6-5-4-3+2-1-0+           +7-6-5+4-3-2-1-0+
  *              |note-type|delta|           |1 1| ext-delta |
  *              +---------+-----+           +---+-----------+
  *
  * At most one "gettable" note (i.e., a note of type other than SRC_NEWLINE,
  * SRC_SETLINE, and SRC_XDELTA) applies to a given bytecode.
  *
- * NB: the js_SrcNoteSpec array in BytecodeGenerator.cpp is indexed by this
+ * NB: the js_SrcNoteSpec array in BytecodeEmitter.cpp is indexed by this
  * enum, so its initializers need to match the order here.
  *
  * Note on adding new source notes: every pair of bytecodes (A, B) where A and
  * B have disjoint sets of source notes that could apply to each bytecode may
  * reuse the same note type value for two notes (snA, snB) that have the same
  * arity, offsetBias, and isSpanDep initializers in JSSrcNoteSpec. This is
  * why SRC_IF and SRC_INITPROP have the same value below. For bad historical
  * reasons, some bytecodes below that could be overlayed have not been, but
@@ -1051,73 +1049,74 @@ enum SrcNoteType {
 
 /* A source note array is terminated by an all-zero element. */
 #define SN_MAKE_TERMINATOR(sn)  (*(sn) = SRC_NULL)
 #define SN_IS_TERMINATOR(sn)    (*(sn) == SRC_NULL)
 
 namespace frontend {
 
 /*
- * Append a new source note of the given type (and therefore size) to cg's
- * notes dynamic array, updating cg->noteCount. Return the new note's index
- * within the array pointed at by cg->current->notes. Return -1 if out of
+ * Append a new source note of the given type (and therefore size) to bce's
+ * notes dynamic array, updating bce->noteCount. Return the new note's index
+ * within the array pointed at by bce->current->notes. Return -1 if out of
  * memory.
  */
 intN
-NewSrcNote(JSContext *cx, CodeGenerator *cg, SrcNoteType type);
+NewSrcNote(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type);
 
 intN
-NewSrcNote2(JSContext *cx, CodeGenerator *cg, SrcNoteType type, ptrdiff_t offset);
+NewSrcNote2(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset);
 
 intN
-NewSrcNote3(JSContext *cx, CodeGenerator *cg, SrcNoteType type, ptrdiff_t offset1,
+NewSrcNote3(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1,
                ptrdiff_t offset2);
 
 /*
  * NB: this function can add at most one extra extended delta note.
  */
 jssrcnote *
-AddToSrcNoteDelta(JSContext *cx, CodeGenerator *cg, jssrcnote *sn, ptrdiff_t delta);
+AddToSrcNoteDelta(JSContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta);
 
 /*
  * Finish taking source notes in cx's notePool, copying final notes to the new
  * stable store allocated by the caller and passed in via notes. Return false
  * on malloc failure, which means this function reported an error.
  *
  * To compute the number of jssrcnotes to allocate and pass in via notes, use
  * the CG_COUNT_FINAL_SRCNOTES macro. This macro knows a lot about details of
  * FinishTakingSrcNotes, so DON'T CHANGE js::frontend::FinishTakingSrcNotes
  * WITHOUT CHECKING WHETHER THIS MACRO NEEDS CORRESPONDING CHANGES!
  */
-#define CG_COUNT_FINAL_SRCNOTES(cg, cnt)                                      \
+#define CG_COUNT_FINAL_SRCNOTES(bce, cnt)                                     \
     JS_BEGIN_MACRO                                                            \
-        ptrdiff_t diff_ = CG_PROLOG_OFFSET(cg) - (cg)->prolog.lastNoteOffset; \
-        cnt = (cg)->prolog.noteCount + (cg)->main.noteCount + 1;              \
-        if ((cg)->prolog.noteCount &&                                         \
-            (cg)->prolog.currentLine != (cg)->firstLine) {                    \
+        ptrdiff_t diff_ =                                                     \
+            CG_PROLOG_OFFSET(bce) - (bce)->prolog.lastNoteOffset;             \
+        cnt = (bce)->prolog.noteCount + (bce)->main.noteCount + 1;            \
+        if ((bce)->prolog.noteCount &&                                        \
+            (bce)->prolog.currentLine != (bce)->firstLine) {                  \
             if (diff_ > SN_DELTA_MASK)                                        \
                 cnt += JS_HOWMANY(diff_ - SN_DELTA_MASK, SN_XDELTA_MASK);     \
-            cnt += 2 + (((cg)->firstLine > SN_3BYTE_OFFSET_MASK) << 1);       \
+            cnt += 2 + (((bce)->firstLine > SN_3BYTE_OFFSET_MASK) << 1);      \
         } else if (diff_ > 0) {                                               \
-            if (cg->main.noteCount) {                                         \
-                jssrcnote *sn_ = (cg)->main.notes;                            \
+            if ((bce)->main.noteCount) {                                      \
+                jssrcnote *sn_ = (bce)->main.notes;                           \
                 diff_ -= SN_IS_XDELTA(sn_)                                    \
                          ? SN_XDELTA_MASK - (*sn_ & SN_XDELTA_MASK)           \
                          : SN_DELTA_MASK - (*sn_ & SN_DELTA_MASK);            \
             }                                                                 \
             if (diff_ > 0)                                                    \
                 cnt += JS_HOWMANY(diff_, SN_XDELTA_MASK);                     \
         }                                                                     \
     JS_END_MACRO
 
 JSBool
-FinishTakingSrcNotes(JSContext *cx, CodeGenerator *cg, jssrcnote *notes);
+FinishTakingSrcNotes(JSContext *cx, BytecodeEmitter *bce, jssrcnote *notes);
 
 void
-FinishTakingTryNotes(CodeGenerator *cg, JSTryNoteArray *array);
+FinishTakingTryNotes(BytecodeEmitter *bce, JSTryNoteArray *array);
 
 } /* namespace frontend */
 } /* namespace js */
 
 struct JSSrcNoteSpec {
     const char      *name;      /* name for disassembly/debugging output */
     int8            arity;      /* number of offset operands */
     uint8           offsetBias; /* bias of offset(s) from annotated pc */
@@ -1129,9 +1128,9 @@ extern JS_FRIEND_DATA(JSSrcNoteSpec)  js
 extern JS_FRIEND_API(uintN)         js_SrcNoteLength(jssrcnote *sn);
 
 /*
  * Get and set the offset operand identified by which (0 for the first, etc.).
  */
 extern JS_FRIEND_API(ptrdiff_t)
 js_GetSrcNoteOffset(jssrcnote *sn, uintN which);
 
-#endif /* BytecodeGenerator_h__ */
+#endif /* BytecodeEmitter_h__ */
--- a/js/src/frontend/FoldConstants.cpp
+++ b/js/src/frontend/FoldConstants.cpp
@@ -37,17 +37,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "frontend/FoldConstants.h"
 
 #include "jslibmath.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/ParseNode.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
 
 using namespace js;
 
--- a/js/src/frontend/ParseNode-inl.h
+++ b/js/src/frontend/ParseNode-inl.h
@@ -36,17 +36,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef ParseNode_inl_h__
 #define ParseNode_inl_h__
 
 #include "frontend/ParseNode.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/TokenStream.h"
 
 namespace js {
 
 inline bool
 ParseNode::isConstant()
 {
     switch (pn_type) {
--- a/js/src/frontend/ParseNode.h
+++ b/js/src/frontend/ParseNode.h
@@ -831,17 +831,17 @@ CloneLeftHandSide(ParseNode *opn, TreeCo
  *                   dn = pre-allocate a Definition for x;
  *                   map x to dn in tc->lexdeps;
  *               }
  *           }
  *           append pn to dn's use chain;
  *       }
  *   }
  *
- * See frontend/BytecodeGenerator.h for js::TreeContext and its top*Stmt,
+ * See frontend/BytecodeEmitter.h for js::TreeContext and its top*Stmt,
  * decls, and lexdeps members.
  *
  * Notes:
  *
  *  0. To avoid bloating ParseNode, we steal a bit from pn_arity for pn_defn
  *     and set it on a ParseNode instead of allocating a Definition.
  *
  *  1. Due to hoisting, a definition cannot be eliminated even if its "Variable
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -42,17 +42,17 @@
  * JS parser.
  *
  * This is a recursive-descent parser for the JavaScript language specified by
  * "The JavaScript 1.5 Language Specification".  It uses lexical and semantic
  * feedback to disambiguate non-LL(1) structures.  It generates trees of nodes
  * induced by the recursive parsing (not precise syntax trees, see Parser.h).
  * After tree construction, it rewrites trees to fold constants and evaluate
  * compile-time expressions.  Finally, it calls js::frontend::EmitTree (see
- * CodeGenerator.h) to generate bytecode.
+ * BytecodeEmitter.h) to generate bytecode.
  *
  * This parser attempts no error recovery.
  */
 
 #include "frontend/Parser.h"
 
 #include <stdlib.h>
 #include <string.h>
@@ -73,17 +73,17 @@
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
 
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/FoldConstants.h"
 #include "frontend/ParseMaps.h"
 #include "frontend/TokenStream.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
 
@@ -1847,17 +1847,17 @@ LeaveFunction(ParseNode *fn, TreeContext
     }
 
     funbox->bindings.transfer(funtc->parser->context, &funtc->bindings);
 
     return true;
 }
 
 static bool
-DefineGlobal(ParseNode *pn, CodeGenerator *cg, PropertyName *name);
+DefineGlobal(ParseNode *pn, BytecodeEmitter *bce, PropertyName *name);
 
 /*
  * FIXME? this Parser method was factored from Parser::functionDef with minimal
  * change, hence the funtc ref param and funbox. It probably should match
  * functionBody, etc., and use tc and tc->funbox instead of taking explicit
  * parameters.
  */
 bool
@@ -2087,17 +2087,17 @@ Parser::functionDef(PropertyName *funNam
                 return NULL;
         }
 
         /*
          * A function directly inside another's body needs only a local
          * variable to bind its name to its value, and not an activation object
          * property (it might also need the activation property, if the outer
          * function contains with statements, e.g., but the stack slot wins
-         * when BytecodeGenerator.cpp's BindNameToSlot can optimize a JSOP_NAME
+         * when BytecodeEmitter.cpp's BindNameToSlot can optimize a JSOP_NAME
          * into a JSOP_GETLOCAL bytecode).
          */
         if (bodyLevel && tc->inFunction()) {
             /*
              * Define a local in the outer function so that BindNameToSlot
              * can properly optimize accesses. Note that we need a local
              * variable, not an argument, for the function statement. Thus
              * we add a variable even if a parameter with the given name
@@ -2304,17 +2304,17 @@ Parser::functionDef(PropertyName *funNam
         pn->pn_body->append(body);
         pn->pn_body->pn_pos = body->pn_pos;
     } else {
         pn->pn_body = body;
     }
 
     if (!outertc->inFunction() && bodyLevel && kind == Statement && outertc->compiling()) {
         JS_ASSERT(pn->pn_cookie.isFree());
-        if (!DefineGlobal(pn, outertc->asCodeGenerator(), funName))
+        if (!DefineGlobal(pn, outertc->asBytecodeEmitter(), funName))
             return NULL;
     }
 
     pn->pn_blockid = outertc->blockid();
 
     if (!LeaveFunction(pn, &funtc, funName, kind))
         return NULL;
 
@@ -2614,17 +2614,17 @@ BindLet(JSContext *cx, BindData *data, J
     const Shape *shape = blockObj->defineBlockVariable(cx, ATOM_TO_JSID(atom), n);
     if (!shape)
         return false;
 
     /*
      * Store pn temporarily in what would be shape-mapped slots in a cloned
      * block object (once the prototype's final population is known, after all
      * 'let' bindings for this block have been parsed). We free these slots in
-     * BytecodeGenerator.cpp:EmitEnterBlock so they don't tie up unused space
+     * BytecodeEmitter.cpp:EmitEnterBlock so they don't tie up unused space
      * in the so-called "static" prototype Block.
      */
     blockObj->setSlot(shape->slot, PrivateValue(pn));
     return true;
 }
 
 static void
 PopStatement(TreeContext *tc)
@@ -2671,27 +2671,27 @@ OuterLet(TreeContext *tc, StmtInfo *stmt
  *
  * For now, don't try to lookup eval frame variables at compile time. This is
  * sub-optimal: we could handle eval-called-from-global-code gvars since eval
  * gets its own script and frame. The eval-from-function-code case is harder,
  * since functions do not atomize gvars and then reserve their atom indexes as
  * stack frame slots.
  */
 static bool
-DefineGlobal(ParseNode *pn, CodeGenerator *cg, PropertyName *name)
+DefineGlobal(ParseNode *pn, BytecodeEmitter *bce, PropertyName *name)
 {
-    GlobalScope *globalScope = cg->compiler()->globalScope;
+    GlobalScope *globalScope = bce->compiler()->globalScope;
     JSObject *globalObj = globalScope->globalObj;
 
-    if (!cg->compileAndGo() || !globalObj || cg->compilingForEval())
+    if (!bce->compileAndGo() || !globalObj || bce->compilingForEval())
         return true;
 
     AtomIndexAddPtr p = globalScope->names.lookupForAdd(name);
     if (!p) {
-        JSContext *cx = cg->parser->context;
+        JSContext *cx = bce->parser->context;
 
         JSObject *holder;
         JSProperty *prop;
         if (!globalObj->lookupProperty(cx, name, &holder, &prop))
             return false;
 
         FunctionBox *funbox = pn->isKind(TOK_FUNCTION) ? pn->pn_funbox : NULL;
 
@@ -2797,17 +2797,17 @@ BindTopLevelVar(JSContext *cx, BindData 
     if (pn->pn_dflags & PND_CONST)
         return true;
 
     /*
      * If this is a global variable, we're compile-and-go, and a global object
      * is present, try to bake in either an already available slot or a
      * predicted slot that will be defined after compiling is completed.
      */
-    return DefineGlobal(pn, tc->asCodeGenerator(), pn->pn_atom->asPropertyName());
+    return DefineGlobal(pn, tc->asBytecodeEmitter(), pn->pn_atom->asPropertyName());
 }
 
 static bool
 BindFunctionLocal(JSContext *cx, BindData *data, MultiDeclRange &mdl, TreeContext *tc)
 {
     JS_ASSERT(tc->inFunction());
 
     ParseNode *pn = data->pn;
--- a/js/src/frontend/Parser.h
+++ b/js/src/frontend/Parser.h
@@ -52,33 +52,33 @@
 #include "jswin.h"
 
 #include "frontend/ParseMaps.h"
 #include "frontend/ParseNode.h"
 
 namespace js {
 
 struct GlobalScope {
-    GlobalScope(JSContext *cx, JSObject *globalObj, CodeGenerator *cg)
-      : globalObj(globalObj), cg(cg), defs(cx), names(cx)
+    GlobalScope(JSContext *cx, JSObject *globalObj, BytecodeEmitter *bce)
+      : globalObj(globalObj), bce(bce), defs(cx), names(cx)
     { }
 
     struct GlobalDef {
         JSAtom        *atom;        // If non-NULL, specifies the property name to add.
         FunctionBox   *funbox;      // If non-NULL, function value for the property.
                                     // This value is only set/used if atom is non-NULL.
         uint32        knownSlot;    // If atom is NULL, this is the known shape slot.
 
         GlobalDef() { }
         GlobalDef(uint32 knownSlot) : atom(NULL), knownSlot(knownSlot) { }
         GlobalDef(JSAtom *atom, FunctionBox *box) : atom(atom), funbox(box) { }
     };
 
     JSObject        *globalObj;
-    CodeGenerator   *cg;
+    BytecodeEmitter *bce;
 
     /*
      * This is the table of global names encountered during parsing. Each
      * global name appears in the list only once, and the |names| table
      * maps back into |defs| for fast lookup.
      *
      * A definition may either specify an existing global property, or a new
      * one that must be added after compilation succeeds.
--- a/js/src/frontend/TokenStream.cpp
+++ b/js/src/frontend/TokenStream.cpp
@@ -60,17 +60,17 @@
 #include "jsatom.h"
 #include "jscntxt.h"
 #include "jsversion.h"
 #include "jsexn.h"
 #include "jsnum.h"
 #include "jsopcode.h"
 #include "jsscript.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "frontend/TokenStream.h"
 #include "vm/RegExpObject.h"
 
 #include "jsscriptinlines.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -82,17 +82,17 @@
 #include "prmjtime.h"
 #include "jsweakmap.h"
 #include "jswrapper.h"
 #include "jstypedarray.h"
 
 #include "ds/LifoAlloc.h"
 #include "builtin/RegExp.h"
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 
 #include "jsatominlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 
 #include "vm/RegExpObject-inl.h"
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -61,17 +61,17 @@
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
 #include "jswatchpoint.h"
 #include "jswrapper.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "vm/Debugger.h"
 
 #include "jsatominlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "jsinterpinlines.h"
 #include "jsscopeinlines.h"
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -67,17 +67,17 @@
 #include "jsproxy.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
 #include "jsexn.h"
 #include "jstracer.h"
 
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/TokenStream.h"
 #include "vm/CallObject.h"
 #include "vm/Debugger.h"
 
 #if JS_HAS_GENERATORS
 # include "jsiter.h"
 #endif
 
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -67,17 +67,17 @@
 #include "jsopcode.h"
 #include "jspropertycache.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
 #include "jstracer.h"
 #include "jslibmath.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #ifdef JS_METHODJIT
 #include "methodjit/MethodJIT.h"
 #include "methodjit/MethodJIT-inl.h"
 #include "methodjit/Logging.h"
 #endif
 #include "vm/Debugger.h"
 
 #include "jsatominlines.h"
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -77,17 +77,17 @@
 #include "jsstr.h"
 #include "jstracer.h"
 #include "jsdbgapi.h"
 #include "json.h"
 #include "jswatchpoint.h"
 #include "jswrapper.h"
 
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 
 #include "jsarrayinlines.h"
 #include "jsinterpinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 #include "jsobjinlines.h"
 
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -64,17 +64,17 @@
 #include "jsiter.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/TokenStream.h"
 #include "vm/Debugger.h"
 
 #include "jscntxtinlines.h"
 #include "jsobjinlines.h"
 #include "jsopcodeinlines.h"
 #include "jsscriptinlines.h"
 
@@ -957,17 +957,17 @@ js_GetPrinterOutput(JSPrinter *jp)
     if (!str)
         return NULL;
     jp->pool.freeAll();
     INIT_SPRINTER(cx, &jp->sprinter, &jp->pool, 0);
     return str;
 }
 
 /*
- * NB: Indexed by SRC_DECL_* defines from frontend/BytecodeGenerator.h.
+ * NB: Indexed by SRC_DECL_* defines from frontend/BytecodeEmitter.h.
  */
 static const char * const var_prefix[] = {"var ", "const ", "let "};
 
 static const char *
 VarPrefix(jssrcnote *sn)
 {
     if (sn && (SN_TYPE(sn) == SRC_DECL || SN_TYPE(sn) == SRC_GROUPASSIGN)) {
         ptrdiff_t type = js_GetSrcNoteOffset(sn, 0);
--- a/js/src/jsopcode.h
+++ b/js/src/jsopcode.h
@@ -173,17 +173,17 @@ typedef enum JSOp {
 #define JUMP_OFFSET_MIN         ((int16)0x8000)
 #define JUMP_OFFSET_MAX         ((int16)0x7fff)
 
 /*
  * When a short jump won't hold a relative offset, its 2-byte immediate offset
  * operand is an unsigned index of a span-dependency record, maintained until
  * code generation finishes -- after which some (but we hope not nearly all)
  * span-dependent jumps must be extended (see js::frontend::OptimizeSpanDeps in
- * frontend/BytecodeGenerator.cpp).
+ * frontend/BytecodeEmitter.cpp).
  *
  * If the span-dependency record index overflows SPANDEP_INDEX_MAX, the jump
  * offset will contain SPANDEP_INDEX_HUGE, indicating that the record must be
  * found (via binary search) by its "before span-dependency optimization" pc
  * offset (from script main entry point).
  */
 #define GET_SPANDEP_INDEX(pc)   ((uint16)GET_UINT16(pc))
 #define SET_SPANDEP_INDEX(pc,i) ((pc)[1] = JUMP_OFFSET_HI(i),                 \
@@ -240,17 +240,17 @@ typedef enum JSOp {
                                          ((uint32)((pc)[2]) << 16) |          \
                                          ((uint32)((pc)[3]) << 8)  |          \
                                          (uint32)(pc)[4]))
 #define SET_INT32(pc,i)         ((pc)[1] = (jsbytecode)((uint32)(i) >> 24),   \
                                  (pc)[2] = (jsbytecode)((uint32)(i) >> 16),   \
                                  (pc)[3] = (jsbytecode)((uint32)(i) >> 8),    \
                                  (pc)[4] = (jsbytecode)(uint32)(i))
 
-/* Index limit is determined by SN_3BYTE_OFFSET_FLAG, see frontend/BytecodeGenerator.h. */
+/* Index limit is determined by SN_3BYTE_OFFSET_FLAG, see frontend/BytecodeEmitter.h. */
 #define INDEX_LIMIT_LOG2        23
 #define INDEX_LIMIT             ((uint32)1 << INDEX_LIMIT_LOG2)
 
 /* Actual argument count operand format helpers. */
 #define ARGC_HI(argc)           UINT16_HI(argc)
 #define ARGC_LO(argc)           UINT16_LO(argc)
 #define GET_ARGC(pc)            GET_UINT16(pc)
 #define ARGC_LIMIT              UINT16_LIMIT
--- a/js/src/jsprvtd.h
+++ b/js/src/jsprvtd.h
@@ -157,17 +157,17 @@ class StackFrame;
 class StackSegment;
 class StackSpace;
 class ContextStack;
 class FrameRegsIter;
 class CallReceiver;
 class CallArgs;
 
 struct BytecodeCompiler;
-struct CodeGenerator;
+struct BytecodeEmitter;
 struct Definition;
 struct FunctionBox;
 struct ObjectBox;
 struct ParseNode;
 struct Parser;
 class TokenStream;
 struct Token;
 struct TokenPos;
--- a/js/src/jsreflect.cpp
+++ b/js/src/jsreflect.cpp
@@ -53,17 +53,17 @@
 #include "jsbool.h"
 #include "jsval.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "jsobj.h"
 #include "jsarray.h"
 #include "jsnum.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "frontend/TokenStream.h"
 #include "vm/RegExpObject.h"
 
 #include "jsscriptinlines.h"
 
 using namespace mozilla;
 using namespace js;
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -62,17 +62,17 @@
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jstracer.h"
 #if JS_HAS_XDR
 #include "jsxdrapi.h"
 #endif
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "methodjit/MethodJIT.h"
 #include "methodjit/Retcon.h"
 #include "vm/Debugger.h"
 
 #include "jsinferinlines.h"
 #include "jsinterpinlines.h"
 #include "jsobjinlines.h"
@@ -1053,177 +1053,177 @@ JSScript::NewScript(JSContext *cx, uint3
     script->id_ = 0;
 #endif
 
     JS_ASSERT(script->getVersion() == version);
     return script;
 }
 
 JSScript *
-JSScript::NewScriptFromCG(JSContext *cx, CodeGenerator *cg)
+JSScript::NewScriptFromCG(JSContext *cx, BytecodeEmitter *bce)
 {
     uint32 mainLength, prologLength, nsrcnotes, nfixed;
     JSScript *script;
     const char *filename;
     JSFunction *fun;
 
     /* The counts of indexed things must be checked during code generation. */
-    JS_ASSERT(cg->atomIndices->count() <= INDEX_LIMIT);
-    JS_ASSERT(cg->objectList.length <= INDEX_LIMIT);
-    JS_ASSERT(cg->regexpList.length <= INDEX_LIMIT);
+    JS_ASSERT(bce->atomIndices->count() <= INDEX_LIMIT);
+    JS_ASSERT(bce->objectList.length <= INDEX_LIMIT);
+    JS_ASSERT(bce->regexpList.length <= INDEX_LIMIT);
 
-    mainLength = CG_OFFSET(cg);
-    prologLength = CG_PROLOG_OFFSET(cg);
+    mainLength = CG_OFFSET(bce);
+    prologLength = CG_PROLOG_OFFSET(bce);
 
-    if (!cg->bindings.ensureShape(cx))
+    if (!bce->bindings.ensureShape(cx))
         return NULL;
 
-    CG_COUNT_FINAL_SRCNOTES(cg, nsrcnotes);
-    uint16 nClosedArgs = uint16(cg->closedArgs.length());
-    JS_ASSERT(nClosedArgs == cg->closedArgs.length());
-    uint16 nClosedVars = uint16(cg->closedVars.length());
-    JS_ASSERT(nClosedVars == cg->closedVars.length());
-    size_t upvarIndexCount = cg->upvarIndices.hasMap() ? cg->upvarIndices->count() : 0;
+    CG_COUNT_FINAL_SRCNOTES(bce, nsrcnotes);
+    uint16 nClosedArgs = uint16(bce->closedArgs.length());
+    JS_ASSERT(nClosedArgs == bce->closedArgs.length());
+    uint16 nClosedVars = uint16(bce->closedVars.length());
+    JS_ASSERT(nClosedVars == bce->closedVars.length());
+    size_t upvarIndexCount = bce->upvarIndices.hasMap() ? bce->upvarIndices->count() : 0;
     script = NewScript(cx, prologLength + mainLength, nsrcnotes,
-                       cg->atomIndices->count(), cg->objectList.length,
-                       upvarIndexCount, cg->regexpList.length,
-                       cg->ntrynotes, cg->constList.length(),
-                       cg->globalUses.length(), nClosedArgs, nClosedVars,
-                       cg->typesetCount, cg->version());
+                       bce->atomIndices->count(), bce->objectList.length,
+                       upvarIndexCount, bce->regexpList.length,
+                       bce->ntrynotes, bce->constList.length(),
+                       bce->globalUses.length(), nClosedArgs, nClosedVars,
+                       bce->typesetCount, bce->version());
     if (!script)
         return NULL;
 
-    cg->bindings.makeImmutable();
+    bce->bindings.makeImmutable();
 
     JS_ASSERT(script->mainOffset == 0);
     script->mainOffset = prologLength;
-    memcpy(script->code, CG_PROLOG_BASE(cg), prologLength * sizeof(jsbytecode));
-    memcpy(script->main(), CG_BASE(cg), mainLength * sizeof(jsbytecode));
-    nfixed = cg->inFunction()
-             ? cg->bindings.countVars()
-             : cg->sharpSlots();
+    memcpy(script->code, CG_PROLOG_BASE(bce), prologLength * sizeof(jsbytecode));
+    memcpy(script->main(), CG_BASE(bce), mainLength * sizeof(jsbytecode));
+    nfixed = bce->inFunction()
+             ? bce->bindings.countVars()
+             : bce->sharpSlots();
     JS_ASSERT(nfixed < SLOTNO_LIMIT);
     script->nfixed = (uint16) nfixed;
-    js_InitAtomMap(cx, cg->atomIndices.getMap(), script->atoms);
+    js_InitAtomMap(cx, bce->atomIndices.getMap(), script->atoms);
 
-    filename = cg->parser->tokenStream.getFilename();
+    filename = bce->parser->tokenStream.getFilename();
     if (filename) {
         script->filename = SaveScriptFilename(cx, filename);
         if (!script->filename)
             return NULL;
     }
-    script->lineno = cg->firstLine;
-    if (script->nfixed + cg->maxStackDepth >= JS_BIT(16)) {
-        ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_NEED_DIET, "script");
+    script->lineno = bce->firstLine;
+    if (script->nfixed + bce->maxStackDepth >= JS_BIT(16)) {
+        ReportCompileErrorNumber(cx, CG_TS(bce), NULL, JSREPORT_ERROR, JSMSG_NEED_DIET, "script");
         return NULL;
     }
-    script->nslots = script->nfixed + cg->maxStackDepth;
-    script->staticLevel = uint16(cg->staticLevel);
-    script->principals = cg->parser->principals;
+    script->nslots = script->nfixed + bce->maxStackDepth;
+    script->staticLevel = uint16(bce->staticLevel);
+    script->principals = bce->parser->principals;
     if (script->principals)
         JSPRINCIPALS_HOLD(cx, script->principals);
 
-    script->sourceMap = (jschar *) cg->parser->tokenStream.releaseSourceMap();
+    script->sourceMap = (jschar *) bce->parser->tokenStream.releaseSourceMap();
 
-    if (!FinishTakingSrcNotes(cx, cg, script->notes()))
+    if (!FinishTakingSrcNotes(cx, bce, script->notes()))
         return NULL;
-    if (cg->ntrynotes != 0)
-        FinishTakingTryNotes(cg, script->trynotes());
-    if (cg->objectList.length != 0)
-        cg->objectList.finish(script->objects());
-    if (cg->regexpList.length != 0)
-        cg->regexpList.finish(script->regexps());
-    if (cg->constList.length() != 0)
-        cg->constList.finish(script->consts());
-    if (cg->flags & TCF_NO_SCRIPT_RVAL)
+    if (bce->ntrynotes != 0)
+        FinishTakingTryNotes(bce, script->trynotes());
+    if (bce->objectList.length != 0)
+        bce->objectList.finish(script->objects());
+    if (bce->regexpList.length != 0)
+        bce->regexpList.finish(script->regexps());
+    if (bce->constList.length() != 0)
+        bce->constList.finish(script->consts());
+    if (bce->flags & TCF_NO_SCRIPT_RVAL)
         script->noScriptRval = true;
-    if (cg->hasSharps())
+    if (bce->hasSharps())
         script->hasSharps = true;
-    if (cg->flags & TCF_STRICT_MODE_CODE)
+    if (bce->flags & TCF_STRICT_MODE_CODE)
         script->strictModeCode = true;
-    if (cg->flags & TCF_COMPILE_N_GO) {
+    if (bce->flags & TCF_COMPILE_N_GO) {
         script->compileAndGo = true;
-        const StackFrame *fp = cg->parser->callerFrame;
+        const StackFrame *fp = bce->parser->callerFrame;
         if (fp && fp->isFunctionFrame())
             script->savedCallerFun = true;
     }
-    if (cg->callsEval())
+    if (bce->callsEval())
         script->usesEval = true;
-    if (cg->flags & TCF_FUN_USES_ARGUMENTS)
+    if (bce->flags & TCF_FUN_USES_ARGUMENTS)
         script->usesArguments = true;
-    if (cg->flags & TCF_HAS_SINGLETONS)
+    if (bce->flags & TCF_HAS_SINGLETONS)
         script->hasSingletons = true;
 
-    if (cg->hasUpvarIndices()) {
-        JS_ASSERT(cg->upvarIndices->count() <= cg->upvarMap.length());
-        memcpy(script->upvars()->vector, cg->upvarMap.begin(),
-               cg->upvarIndices->count() * sizeof(cg->upvarMap[0]));
-        cg->upvarIndices->clear();
-        cg->upvarMap.clear();
+    if (bce->hasUpvarIndices()) {
+        JS_ASSERT(bce->upvarIndices->count() <= bce->upvarMap.length());
+        memcpy(script->upvars()->vector, bce->upvarMap.begin(),
+               bce->upvarIndices->count() * sizeof(bce->upvarMap[0]));
+        bce->upvarIndices->clear();
+        bce->upvarMap.clear();
     }
 
-    if (cg->globalUses.length()) {
-        memcpy(script->globals()->vector, &cg->globalUses[0],
-               cg->globalUses.length() * sizeof(GlobalSlotArray::Entry));
+    if (bce->globalUses.length()) {
+        memcpy(script->globals()->vector, &bce->globalUses[0],
+               bce->globalUses.length() * sizeof(GlobalSlotArray::Entry));
     }
 
     if (script->nClosedArgs)
-        memcpy(script->closedSlots, &cg->closedArgs[0], script->nClosedArgs * sizeof(uint32));
+        memcpy(script->closedSlots, &bce->closedArgs[0], script->nClosedArgs * sizeof(uint32));
     if (script->nClosedVars) {
-        memcpy(&script->closedSlots[script->nClosedArgs], &cg->closedVars[0],
+        memcpy(&script->closedSlots[script->nClosedArgs], &bce->closedVars[0],
                script->nClosedVars * sizeof(uint32));
     }
 
-    script->bindings.transfer(cx, &cg->bindings);
+    script->bindings.transfer(cx, &bce->bindings);
 
     fun = NULL;
-    if (cg->inFunction()) {
+    if (bce->inFunction()) {
         /*
          * We initialize fun->script() to be the script constructed above
          * so that the debugger has a valid fun->script().
          */
-        fun = cg->fun();
+        fun = bce->fun();
         JS_ASSERT(fun->isInterpreted());
         JS_ASSERT(!fun->script());
 #ifdef DEBUG
         if (JSScript::isValidOffset(script->upvarsOffset))
             JS_ASSERT(script->upvars()->length == script->bindings.countUpvars());
         else
             JS_ASSERT(script->bindings.countUpvars() == 0);
 #endif
-        if (cg->flags & TCF_FUN_HEAVYWEIGHT)
+        if (bce->flags & TCF_FUN_HEAVYWEIGHT)
             fun->flags |= JSFUN_HEAVYWEIGHT;
 
         /* Watch for scripts whose functions will not be cloned. These are singletons. */
         bool singleton =
-            cx->typeInferenceEnabled() && cg->parent && cg->parent->compiling() &&
-            cg->parent->asCodeGenerator()->checkSingletonContext();
+            cx->typeInferenceEnabled() && bce->parent && bce->parent->compiling() &&
+            bce->parent->asBytecodeEmitter()->checkSingletonContext();
 
         if (!script->typeSetFunction(cx, fun, singleton))
             return NULL;
 
         fun->setScript(script);
         script->u.globalObject = fun->getParent() ? fun->getParent()->getGlobal() : NULL;
     } else {
         /*
          * Initialize script->object, if necessary, so that the debugger has a
          * valid holder object.
          */
-        if (cg->flags & TCF_NEED_SCRIPT_GLOBAL)
+        if (bce->flags & TCF_NEED_SCRIPT_GLOBAL)
             script->u.globalObject = GetCurrentGlobal(cx);
     }
 
     /* Tell the debugger about this compiled script. */
     js_CallNewScriptHook(cx, script, fun);
-    if (!cg->parent) {
+    if (!bce->parent) {
         GlobalObject *compileAndGoGlobal = NULL;
         if (script->compileAndGo) {
             compileAndGoGlobal = script->u.globalObject;
             if (!compileAndGoGlobal)
-                compileAndGoGlobal = cg->scopeChain()->getGlobal();
+                compileAndGoGlobal = bce->scopeChain()->getGlobal();
         }
         Debugger::onNewScript(cx, script, compileAndGoGlobal);
     }
 
     return script;
 }
 
 size_t
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -439,17 +439,17 @@ struct JSScript : public js::gc::Cell {
      * kind (function or other) of new JSScript.
      */
     static JSScript *NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natoms,
                                uint32 nobjects, uint32 nupvars, uint32 nregexps,
                                uint32 ntrynotes, uint32 nconsts, uint32 nglobals,
                                uint16 nClosedArgs, uint16 nClosedVars, uint32 nTypeSets,
                                JSVersion version);
 
-    static JSScript *NewScriptFromCG(JSContext *cx, js::CodeGenerator *cg);
+    static JSScript *NewScriptFromCG(JSContext *cx, js::BytecodeEmitter *bce);
 
 #ifdef JS_CRASH_DIAGNOSTICS
     /*
      * Make sure that the cookie size does not affect the GC alignment
      * requirements.
      */
     uint32          cookie1[Cell::CellSize / sizeof(uint32)];
 #endif
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -74,17 +74,17 @@
 #include "jsopcode.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jstracer.h"
 #include "jsxml.h"
 #include "jstypedarray.h"
 
 #include "builtin/RegExp.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 
 #include "jsatominlines.h"
 #include "jscntxtinlines.h"
 #include "jsfuninlines.h"
 #include "jsinterpinlines.h"
 #include "jspropertycacheinlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -56,17 +56,17 @@
 #include "jsscriptinlines.h"
 #include "InlineFrameAssembler.h"
 #include "jscompartment.h"
 #include "jsobjinlines.h"
 #include "jsopcodeinlines.h"
 #include "jshotloop.h"
 
 #include "builtin/RegExp.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "vm/RegExpStatics.h"
 #include "vm/RegExpObject.h"
 
 #include "jsautooplen.h"
 #include "jstypedarrayinlines.h"
 #include "vm/RegExpObject-inl.h"
 
 using namespace js;
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -42,17 +42,17 @@
 #include "jscntxt.h"
 #include "jslibmath.h"
 #include "jsnum.h"
 #include "jsscope.h"
 #include "jsobjinlines.h"
 #include "jsscriptinlines.h"
 #include "jstypedarrayinlines.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "methodjit/MethodJIT.h"
 #include "methodjit/Compiler.h"
 #include "methodjit/StubCalls.h"
 #include "methodjit/FrameState-inl.h"
 
 #include "jsautooplen.h"
 
 using namespace js;
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -74,17 +74,17 @@
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jstracer.h"
 #include "jstypedarray.h"
 #include "jstypedarrayinlines.h"
 #include "jsxml.h"
 #include "jsperf.h"
 
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "methodjit/MethodJIT.h"
 
 #include "prmjtime.h"
 
 #ifdef JSDEBUGGER
 #include "jsdebug.h"
 #ifdef JSDEBUGGER_JAVA_UI
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -46,17 +46,17 @@
 #include "jsobj.h"
 #include "jswrapper.h"
 #include "jsarrayinlines.h"
 #include "jsinterpinlines.h"
 #include "jsobjinlines.h"
 #include "jsopcodeinlines.h"
 
 #include "frontend/BytecodeCompiler.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 #include "methodjit/Retcon.h"
 
 #include "vm/Stack-inl.h"
 
 using namespace js;
 
 
 /*** Forward declarations ************************************************************************/
--- a/js/src/vm/GlobalObject.cpp
+++ b/js/src/vm/GlobalObject.cpp
@@ -41,17 +41,17 @@
 #include "GlobalObject.h"
 
 #include "jscntxt.h"
 #include "jsexn.h"
 #include "jsmath.h"
 #include "json.h"
 
 #include "builtin/RegExp.h"
-#include "frontend/BytecodeGenerator.h"
+#include "frontend/BytecodeEmitter.h"
 
 #include "jsobjinlines.h"
 #include "vm/RegExpObject-inl.h"
 
 using namespace js;
 
 JSObject *
 js_InitObjectClass(JSContext *cx, JSObject *obj)