Bug 649576: Extricate JSHashTable from JSAtomList death grip. (r=luke)
authorChris Leary <cdleary@mozilla.com>
Fri, 24 Jun 2011 14:22:30 -0700
changeset 71869 3d646df22a4b6280fe7d6bd5c617841eda96c516
parent 71868 e77af15dc4d410e0fa21683d3fb9d53f3a882a74
child 71870 7208baa27f172e85d81ad3f81155a4102f354830
push id20619
push usercleary@mozilla.com
push dateMon, 27 Jun 2011 18:09:34 +0000
treeherdermozilla-central@f999ca02a687 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs649576
milestone7.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 649576: Extricate JSHashTable from JSAtomList death grip. (r=luke)
js/src/Makefile.in
js/src/frontend/ParseMaps-inl.h
js/src/frontend/ParseMaps.cpp
js/src/frontend/ParseMaps.h
js/src/jit-test/tests/basic/multiple-declared-args-syntax.js
js/src/jit-test/tests/basic/strict-catch-ident-syntax.js
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jsdbgapi.cpp
js/src/jsemit.cpp
js/src/jsemit.h
js/src/jshashtable.h
js/src/jsobj.h
js/src/jsparse.cpp
js/src/jsparse.h
js/src/jsprvtd.h
js/src/jsscan.h
js/src/jsscript.cpp
js/src/jstl.h
js/src/jsvector.h
js/src/mfbt/InlineMap.h
mfbt/Util.h
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -181,16 +181,17 @@ CPPSRCS		= \
 		jswrapper.cpp \
 		jsxdrapi.cpp \
 		jsxml.cpp \
 		prmjtime.cpp \
 		sharkctl.cpp \
 		GlobalObject.cpp \
 		Stack.cpp \
 		String.cpp \
+		ParseMaps.cpp \
 		$(NULL)
 
 # Changes to internal header files, used externally, massively slow down
 # browser builds.  Don't add new files here unless you know what you're
 # doing!
 INSTALLED_HEADERS = \
 		js-config.h \
 		jsautocfg.h \
@@ -252,21 +253,22 @@ INSTALLED_HEADERS = \
 		jsversion.h \
 		jswrapper.h \
 		jsxdrapi.h \
 		jsval.h \
 		jsvalue.h \
 		prmjtime.h \
 		$(NULL)
 
-###############################################
-# BEGIN include sources for the vm subdirectory
+######################################################
+# BEGIN include sources for the engine subdirectories
 #
 VPATH		+= \
-		$(srcdir)/vm 	\
+		$(srcdir)/vm \
+		$(srcdir)/frontend \
 		$(NULL)
 
 EXPORTS_NAMESPACES = vm
 
 EXPORTS_vm = \
 		ArgumentsObject.h \
 		GlobalObject.h	 \
 		Stack.h \
@@ -640,17 +642,17 @@ check-malloc-function-usage: $(filter-ou
 		"in Makefile.in" "cx->calloc_ or rt->calloc_" $^
 	$(srcdir)/config/check_source_count.py "\bjs_realloc\b" 0 \
 		"in Makefile.in" "cx->realloc_ or rt->realloc_" $^
 	$(srcdir)/config/check_source_count.py "\bjs_free\b" 0 \
 		"in Makefile.in" "cx->free_" $^
 
 	# We desire these numbers to go down, not up. See "User guide to memory
 	# management within SpiderMonkey" in jsutil.h.
-	$(srcdir)/config/check_source_count.py OffTheBooks:: 53 \
+	$(srcdir)/config/check_source_count.py OffTheBooks:: 54 \
 		"in Makefile.in" "{cx,rt}->{new_,new_array,malloc_,calloc_,realloc_}" $^
 	# This should go to zero, if possible.
 	$(srcdir)/config/check_source_count.py UnwantedForeground:: 33 \
 		"in Makefile.in" "{cx,rt}->{free_,delete_,array_delete}" $^
 
 ifneq ($(OS_ARCH),WINNT) # FIXME: this should be made work on Windows too.
 check:: check-malloc-function-usage
 endif
new file mode 100644
--- /dev/null
+++ b/js/src/frontend/ParseMaps-inl.h
@@ -0,0 +1,161 @@
+/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99 ft=cpp:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey JavaScript engine.
+ *
+ * The Initial Developer of the Original Code is
+ * Mozilla Corporation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *   Chris Leary <cdleary@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef ParseMapPool_inl_h__
+#define ParseMapPool_inl_h__
+
+#include "jscntxt.h"
+#include "jsparse.h" /* Need sizeof(JSDefinition). */
+
+#include "ParseMaps.h"
+
+namespace js {
+
+template <>
+inline AtomDefnMap *
+ParseMapPool::acquire<AtomDefnMap>()
+{
+    return reinterpret_cast<AtomDefnMap *>(allocate());
+}
+
+template <>
+inline AtomIndexMap *
+ParseMapPool::acquire<AtomIndexMap>()
+{
+    return reinterpret_cast<AtomIndexMap *>(allocate());
+}
+
+template <>
+inline AtomDOHMap *
+ParseMapPool::acquire<AtomDOHMap>()
+{
+    return reinterpret_cast<AtomDOHMap *>(allocate());
+}
+
+inline void *
+ParseMapPool::allocate()
+{
+    if (recyclable.empty())
+        return allocateFresh();
+
+    void *map = recyclable.popCopy();
+    asAtomMap(map)->clear();
+    return map;
+}
+
+inline JSDefinition *
+AtomDecls::lookupFirst(JSAtom *atom)
+{
+    JS_ASSERT(map);
+    AtomDOHPtr p = map->lookup(atom);
+    if (!p)
+        return NULL;
+    if (p.value().isHeader()) {
+        /* Just return the head defn. */
+        return p.value().header()->defn;
+    }
+    return p.value().defn();
+}
+
+inline MultiDeclRange
+AtomDecls::lookupMulti(JSAtom *atom)
+{
+    JS_ASSERT(map);
+    AtomDOHPtr p = map->lookup(atom);
+    if (!p)
+        return MultiDeclRange((JSDefinition *) NULL);
+
+    DefnOrHeader &doh = p.value();
+    if (doh.isHeader())
+        return MultiDeclRange(doh.header());
+    else
+        return MultiDeclRange(doh.defn());
+}
+
+inline bool
+AtomDecls::addUnique(JSAtom *atom, JSDefinition *defn)
+{
+    JS_ASSERT(map);
+    AtomDOHAddPtr p = map->lookupForAdd(atom);
+    if (p) {
+        JS_ASSERT(!p.value().isHeader());
+        p.value() = DefnOrHeader(defn);
+        return true;
+    }
+    return map->add(p, atom, DefnOrHeader(defn));
+}
+
+template <class Map>
+inline bool
+AtomThingMapPtr<Map>::ensureMap(JSContext *cx)
+{
+    if (map_)
+        return true;
+    map_ = cx->parseMapPool().acquire<Map>();
+    return !!map_;
+}
+
+template <class Map>
+inline void
+AtomThingMapPtr<Map>::releaseMap(JSContext *cx)
+{
+    if (!map_)
+        return;
+    cx->parseMapPool().release(map_);
+    map_ = NULL;
+}
+
+inline bool
+AtomDecls::init()
+{
+    map = cx->parseMapPool().acquire<AtomDOHMap>();
+    return map;
+}
+
+inline
+AtomDecls::~AtomDecls()
+{
+    if (map)
+        cx->parseMapPool().release(map);
+}
+
+} /* namespace js */
+
+#endif
new file mode 100644
--- /dev/null
+++ b/js/src/frontend/ParseMaps.cpp
@@ -0,0 +1,196 @@
+/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99 ft=cpp:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey JavaScript engine.
+ *
+ * The Initial Developer of the Original Code is
+ * Mozilla Corporation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *   Chris Leary <cdleary@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#include "ParseMaps-inl.h"
+
+using namespace js;
+
+void
+ParseMapPool::checkInvariants()
+{
+    /*
+     * Having all values be of the same size permits us to easily reuse the
+     * allocated space for each of the map types.
+     */
+    JS_STATIC_ASSERT(sizeof(JSDefinition *) == sizeof(jsatomid));
+    JS_STATIC_ASSERT(sizeof(JSDefinition *) == sizeof(DefnOrHeader));
+    JS_STATIC_ASSERT(sizeof(AtomDefnMap::Entry) == sizeof(AtomIndexMap::Entry));
+    JS_STATIC_ASSERT(sizeof(AtomDefnMap::Entry) == sizeof(AtomDOHMap::Entry));
+    JS_STATIC_ASSERT(sizeof(AtomMapT::Entry) == sizeof(AtomDOHMap::Entry));
+    /* Ensure that the HasTable::clear goes quickly via memset. */
+    JS_STATIC_ASSERT(tl::IsPodType<AtomIndexMap::WordMap::Entry>::result);
+    JS_STATIC_ASSERT(tl::IsPodType<AtomDOHMap::WordMap::Entry>::result);
+    JS_STATIC_ASSERT(tl::IsPodType<AtomDefnMap::WordMap::Entry>::result);
+}
+
+void
+ParseMapPool::purgeAll()
+{
+    for (void **it = all.begin(), **end = all.end(); it != end; ++it)
+        cx->delete_<AtomMapT>(asAtomMap(*it));
+
+    all.clearAndFree();
+    recyclable.clearAndFree();
+}
+
+void *
+ParseMapPool::allocateFresh()
+{
+    size_t newAllLength = all.length() + 1;
+    if (!all.reserve(newAllLength) || !recyclable.reserve(newAllLength))
+        return NULL;
+
+    AtomMapT *map = cx->new_<AtomMapT>(cx);
+    if (!map)
+        return NULL;
+
+    all.infallibleAppend(map);
+    return (void *) map;
+}
+
+#ifdef DEBUG
+void
+AtomDecls::dump()
+{
+    for (AtomDOHRange r = map->all(); !r.empty(); r.popFront()) {
+        fprintf(stderr, "atom: ");
+        js_DumpAtom(r.front().key());
+        const DefnOrHeader &doh = r.front().value();
+        if (doh.isHeader()) {
+            AtomDeclNode *node = doh.header();
+            do {
+                fprintf(stderr, "  node: %p\n", (void *) node);
+                fprintf(stderr, "    defn: %p\n", (void *) node->defn);
+                node = node->next;
+            } while (node);
+        } else {
+            fprintf(stderr, "  defn: %p\n", (void *) doh.defn());
+        }
+    }
+}
+
+void
+DumpAtomDefnMap(const AtomDefnMapPtr &map)
+{
+    if (map->empty()) {
+        fprintf(stderr, "empty\n");
+        return;
+    }
+
+    for (AtomDefnRange r = map->all(); !r.empty(); r.popFront()) {
+        fprintf(stderr, "atom: ");
+        js_DumpAtom(r.front().key());
+        fprintf(stderr, "defn: %p\n", (void *) r.front().value());
+    }
+}
+#endif
+
+AtomDeclNode *
+AtomDecls::allocNode(JSDefinition *defn)
+{
+    AtomDeclNode *p;
+    JS_ARENA_ALLOCATE_TYPE(p, AtomDeclNode, &cx->tempPool);
+    if (!p) {
+        js_ReportOutOfMemory(cx);
+        return NULL;
+    }
+    return new (p) AtomDeclNode(defn);
+}
+
+bool
+AtomDecls::addShadow(JSAtom *atom, JSDefinition *defn)
+{
+    AtomDeclNode *node = allocNode(defn);
+    if (!node)
+        return false;
+
+    AtomDOHAddPtr p = map->lookupForAdd(atom);
+    if (!p)
+        return map->add(p, atom, DefnOrHeader(node));
+
+    AtomDeclNode *toShadow;
+    if (p.value().isHeader()) {
+        toShadow = p.value().header();
+    } else {
+        toShadow = allocNode(p.value().defn());
+        if (!toShadow)
+            return false;
+    }
+    node->next = toShadow;
+    p.value() = DefnOrHeader(node);
+    return true;
+}
+
+AtomDeclNode *
+AtomDecls::lastAsNode(DefnOrHeader *doh)
+{
+    if (doh->isHeader()) {
+        AtomDeclNode *last = doh->header();
+        while (last->next)
+            last = last->next;
+        return last;
+    }
+
+    /* Otherwise, we need to turn the existing defn into a node. */
+    AtomDeclNode *node = allocNode(doh->defn());
+    if (!node)
+        return NULL;
+    *doh = DefnOrHeader(node);
+    return node;
+}
+
+bool
+AtomDecls::addHoist(JSAtom *atom, JSDefinition *defn)
+{
+    AtomDeclNode *node = allocNode(defn);
+    if (!node)
+        return false;
+
+    AtomDOHAddPtr p = map->lookupForAdd(atom);
+    if (p) {
+        AtomDeclNode *last = lastAsNode(&p.value());
+        if (!last)
+            return false;
+        last->next = node;
+        return true;
+    }
+
+    return map->add(p, atom, DefnOrHeader(node));
+}
new file mode 100644
--- /dev/null
+++ b/js/src/frontend/ParseMaps.h
@@ -0,0 +1,425 @@
+/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99 ft=cpp:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey JavaScript engine.
+ *
+ * The Initial Developer of the Original Code is
+ * Mozilla Corporation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *   Chris Leary <cdleary@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef ParseMaps_h__
+#define ParseMaps_h__
+
+#include "jsvector.h"
+
+#include "mfbt/InlineMap.h"
+
+namespace js {
+
+/*
+ * A pool that permits the reuse of the backing storage for the defn, index, or
+ * defn-or-header (multi) maps.
+ *
+ * The pool owns all the maps that are given out, and is responsible for
+ * relinquishing all resources when |purgeAll| is triggered.
+ */
+class ParseMapPool
+{
+    typedef Vector<void *, 32, SystemAllocPolicy> RecyclableMaps;
+
+    RecyclableMaps      all;
+    RecyclableMaps      recyclable;
+    JSContext           *cx;
+
+    void checkInvariants();
+
+    void recycle(void *map) {
+        JS_ASSERT(map);
+#ifdef DEBUG
+        bool ok = false;
+        /* Make sure the map is in |all| but not already in |recyclable|. */
+        for (void **it = all.begin(), **end = all.end(); it != end; ++it) {
+            if (*it == map) {
+                ok = true;
+                break;
+            }
+        }
+        JS_ASSERT(ok);
+        for (void **it = recyclable.begin(), **end = recyclable.end(); it != end; ++it)
+            JS_ASSERT(*it != map);
+#endif
+        JS_ASSERT(recyclable.length() < all.length());
+        recyclable.infallibleAppend(map); /* Reserved in allocateFresh. */
+    }
+
+    void *allocateFresh();
+    void *allocate();
+
+    /* Arbitrary atom map type, that has keys and values of the same kind. */
+    typedef AtomIndexMap AtomMapT;
+
+    static AtomMapT *asAtomMap(void *ptr) {
+        return reinterpret_cast<AtomMapT *>(ptr);
+    }
+
+  public:
+    explicit ParseMapPool(JSContext *cx) : cx(cx) {}
+
+    ~ParseMapPool() {
+        purgeAll();
+    }
+
+    void purgeAll();
+
+    bool empty() const {
+        return all.empty();
+    }
+
+    /* Fallibly aquire one of the supported map types from the pool. */
+    template <typename T>
+    T *acquire();
+
+    /* Release one of the supported map types back to the pool. */
+
+    void release(AtomIndexMap *map) {
+        recycle((void *) map);
+    }
+
+    void release(AtomDefnMap *map) {
+        recycle((void *) map);
+    }
+
+    void release(AtomDOHMap *map) {
+        recycle((void *) map);
+    }
+}; /* ParseMapPool */
+
+/*
+ * N.B. This is a POD-type so that it can be included in the JSParseNode union.
+ * If possible, use the corresponding |OwnedAtomThingMapPtr| variant.
+ */
+template <class Map>
+struct AtomThingMapPtr
+{
+    Map *map_;
+
+    void init() { clearMap(); }
+
+    bool ensureMap(JSContext *cx);
+    void releaseMap(JSContext *cx);
+
+    bool hasMap() const { return map_; }
+    Map *getMap() { return map_; }
+    void setMap(Map *newMap) { JS_ASSERT(!map_); map_ = newMap; }
+    void clearMap() { map_ = NULL; }
+
+    Map *operator->() { return map_; }
+    const Map *operator->() const { return map_; }
+    Map &operator*() const { return *map_; }
+};
+
+struct AtomDefnMapPtr : public AtomThingMapPtr<AtomDefnMap>
+{
+    JS_ALWAYS_INLINE
+    JSDefinition *lookupDefn(JSAtom *atom) {
+        AtomDefnMap::Ptr p = map_->lookup(atom);
+        return p ? p.value() : NULL;
+    }
+};
+
+typedef AtomThingMapPtr<AtomIndexMap> AtomIndexMapPtr;
+
+/*
+ * Wrapper around an AtomThingMapPtr (or its derivatives) that automatically
+ * releases a map on destruction, if one has been acquired.
+ */
+template <typename AtomThingMapPtrT>
+class OwnedAtomThingMapPtr : public AtomThingMapPtrT
+{
+    JSContext *cx;
+
+  public:
+    explicit OwnedAtomThingMapPtr(JSContext *cx) : cx(cx) {
+        AtomThingMapPtrT::init();
+    }
+
+    ~OwnedAtomThingMapPtr() {
+        AtomThingMapPtrT::releaseMap(cx);
+    }
+};
+
+typedef OwnedAtomThingMapPtr<AtomDefnMapPtr> OwnedAtomDefnMapPtr;
+typedef OwnedAtomThingMapPtr<AtomIndexMapPtr> OwnedAtomIndexMapPtr;
+
+/* Node structure for chaining in AtomDecls. */
+struct AtomDeclNode
+{
+    JSDefinition *defn;
+    AtomDeclNode *next;
+
+    explicit AtomDeclNode(JSDefinition *defn)
+      : defn(defn), next(NULL)
+    {}
+};
+
+/*
+ * Tagged union of a JSDefinition and an AtomDeclNode, for use in AtomDecl's
+ * internal map.
+ */
+class DefnOrHeader
+{
+    union {
+        JSDefinition    *defn;
+        AtomDeclNode    *head;
+        uintptr_t       bits;
+    } u;
+
+  public:
+    DefnOrHeader() {
+        u.bits = 0;
+    }
+
+    explicit DefnOrHeader(JSDefinition *defn) {
+        u.defn = defn;
+        JS_ASSERT(!isHeader());
+    }
+
+    explicit DefnOrHeader(AtomDeclNode *node) {
+        u.head = node;
+        u.bits |= 0x1;
+        JS_ASSERT(isHeader());
+    }
+
+    bool isHeader() const {
+        return u.bits & 0x1;
+    }
+
+    JSDefinition *defn() const {
+        JS_ASSERT(!isHeader());
+        return u.defn;
+    }
+
+    AtomDeclNode *header() const {
+        JS_ASSERT(isHeader());
+        return (AtomDeclNode *) (u.bits & ~0x1);
+    }
+
+#ifdef DEBUG
+    void dump();
+#endif
+};
+
+namespace tl {
+
+template <> struct IsPodType<DefnOrHeader> {
+    static const bool result = true;
+};
+
+} /* namespace tl */
+
+/*
+ * Multimap for function-scope atom declarations.
+ *
+ * Wraps an internal DeclOrHeader map with multi-map functionality.
+ *
+ * In the common case, no block scoping is used, and atoms have a single
+ * associated definition. In the uncommon (block scoping) case, we map the atom
+ * to a chain of definition nodes.
+ */
+class AtomDecls
+{
+    /* AtomDeclsIter needs to get at the DOHMap directly. */
+    friend class AtomDeclsIter;
+
+    JSContext   *cx;
+    AtomDOHMap  *map;
+
+    AtomDecls(const AtomDecls &other);
+    void operator=(const AtomDecls &other);
+
+    AtomDeclNode *allocNode(JSDefinition *defn);
+
+    /*
+     * Fallibly return the value in |doh| as a node.
+     * Update the defn currently occupying |doh| to a node if necessary.
+     */
+    AtomDeclNode *lastAsNode(DefnOrHeader *doh);
+
+  public:
+    explicit AtomDecls(JSContext *cx)
+      : cx(cx), map(NULL)
+    {}
+
+    ~AtomDecls();
+
+    bool init();
+
+    void clear() {
+        map->clear();
+    }
+
+    /* Return the definition at the head of the chain for |atom|. */
+    inline JSDefinition *lookupFirst(JSAtom *atom);
+
+    /* Perform a lookup that can iterate over the definitions associated with |atom|. */
+    inline MultiDeclRange lookupMulti(JSAtom *atom);
+
+    /* Add-or-update a known-unique definition for |atom|. */
+    inline bool addUnique(JSAtom *atom, JSDefinition *defn);
+    bool addShadow(JSAtom *atom, JSDefinition *defn);
+    bool addHoist(JSAtom *atom, JSDefinition *defn);
+
+    /* Updating the definition for an entry that is known to exist is infallible. */
+    void update(JSAtom *atom, JSDefinition *defn) {
+        JS_ASSERT(map);
+        AtomDOHMap::Ptr p = map->lookup(atom);
+        JS_ASSERT(p);
+        JS_ASSERT(!p.value().isHeader());
+        p.value() = DefnOrHeader(defn);
+    }
+
+    /* Remove the node at the head of the chain for |atom|. */
+    void remove(JSAtom *atom) {
+        JS_ASSERT(map);
+        AtomDOHMap::Ptr p = map->lookup(atom);
+        if (!p)
+            return;
+
+        DefnOrHeader &doh = p.value();
+        if (!doh.isHeader()) {
+            map->remove(p);
+            return;
+        }
+
+        AtomDeclNode *node = doh.header();
+        AtomDeclNode *newHead = node->next;
+        if (newHead)
+            p.value() = DefnOrHeader(newHead);
+        else
+            map->remove(p);
+    }
+
+    AtomDOHMap::Range all() {
+        JS_ASSERT(map);
+        return map->all();
+    }
+
+#ifdef DEBUG
+    void dump();
+#endif
+};
+
+/*
+ * Lookup state tracker for those situations where the caller wants to traverse
+ * multiple definitions associated with a single atom. This occurs due to block
+ * scoping.
+ */
+class MultiDeclRange
+{
+    friend class AtomDecls;
+
+    AtomDeclNode *node;
+    JSDefinition *defn;
+
+    explicit MultiDeclRange(JSDefinition *defn) : node(NULL), defn(defn) {}
+    explicit MultiDeclRange(AtomDeclNode *node) : node(node), defn(node->defn) {}
+
+  public:
+    void popFront() {
+        JS_ASSERT(!empty());
+        if (!node) {
+            defn = NULL;
+            return;
+        }
+        node = node->next;
+        defn = node ? node->defn : NULL;
+    }
+
+    JSDefinition *front() {
+        JS_ASSERT(!empty());
+        return defn;
+    }
+
+    bool empty() const {
+        JS_ASSERT_IF(!defn, !node);
+        return !defn;
+    }
+};
+
+/* Iterates over all the definitions in an AtomDecls. */
+class AtomDeclsIter
+{
+    AtomDOHMap::Range   r;     /* Range over the map. */
+    AtomDeclNode        *link; /* Optional next node in the current atom's chain. */
+
+  public:
+    explicit AtomDeclsIter(AtomDecls *decls) : r(decls->all()), link(NULL) {}
+
+    JSDefinition *operator()() {
+        if (link) {
+            JS_ASSERT(link != link->next);
+            JSDefinition *result = link->defn;
+            link = link->next;
+            JS_ASSERT(result);
+            return result;
+        }
+
+        if (r.empty())
+            return NULL;
+
+        const DefnOrHeader &doh = r.front().value();
+        r.popFront();
+
+        if (!doh.isHeader())
+            return doh.defn();
+
+        JS_ASSERT(!link);
+        AtomDeclNode *node = doh.header();
+        link = node->next;
+        return node->defn;
+    }
+};
+
+typedef AtomDefnMap::Range      AtomDefnRange;
+typedef AtomDefnMap::AddPtr     AtomDefnAddPtr;
+typedef AtomDefnMap::Ptr        AtomDefnPtr;
+typedef AtomIndexMap::AddPtr    AtomIndexAddPtr;
+typedef AtomIndexMap::Ptr       AtomIndexPtr;
+typedef AtomDOHMap::Ptr         AtomDOHPtr;
+typedef AtomDOHMap::AddPtr      AtomDOHAddPtr;
+typedef AtomDOHMap::Range       AtomDOHRange;
+
+} /* namepsace js */
+
+#endif
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/basic/multiple-declared-args-syntax.js
@@ -0,0 +1,1 @@
+assertEq((function (a, b, a) { return a; })(2, 4, 6), 6);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/basic/strict-catch-ident-syntax.js
@@ -0,0 +1,9 @@
+/* Parse correctly. */
+
+function assignToClassListStrict(e) {
+  "use strict";
+  try {
+    e.classList = "foo";
+    ok(false, "assigning to classList didn't throw");
+  } catch (e) { }
+}
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -636,341 +636,53 @@ js_DumpAtoms(JSContext *cx, FILE *fp)
         if (entry.isInterned())
             fputs(" interned", fp);
         putc('\n', fp);
     }
     putc('\n', fp);
 }
 #endif
 
-static JSHashNumber
-js_hash_atom_ptr(const void *key)
-{
-    const JSAtom *atom = (const JSAtom *) key;
-    return ATOM_HASH(atom);
-}
-
 #if JS_BITS_PER_WORD == 32
 # define TEMP_SIZE_START_LOG2   5
 #else
 # define TEMP_SIZE_START_LOG2   6
 #endif
 #define TEMP_SIZE_LIMIT_LOG2    (TEMP_SIZE_START_LOG2 + NUM_TEMP_FREELISTS)
 
 #define TEMP_SIZE_START         JS_BIT(TEMP_SIZE_START_LOG2)
 #define TEMP_SIZE_LIMIT         JS_BIT(TEMP_SIZE_LIMIT_LOG2)
 
 JS_STATIC_ASSERT(TEMP_SIZE_START >= sizeof(JSHashTable));
 
-static void *
-js_alloc_temp_space(void *priv, size_t size)
+void
+js_InitAtomMap(JSContext *cx, JSAtomMap *map, AtomIndexMap *indices)
 {
-    Parser *parser = (Parser *) priv;
+    /* Map length must already be initialized. */
+    JS_ASSERT(indices->count() == map->length);
 
-    void *space;
-    if (size < TEMP_SIZE_LIMIT) {
-        int bin = JS_CeilingLog2(size) - TEMP_SIZE_START_LOG2;
-        JS_ASSERT(unsigned(bin) < NUM_TEMP_FREELISTS);
-
-        space = parser->tempFreeList[bin];
-        if (space) {
-            parser->tempFreeList[bin] = *(void **)space;
-            return space;
+    if (indices->isMap()) {
+        typedef AtomIndexMap::WordMap WordMap;
+        const WordMap &wm = indices->asMap();
+        for (WordMap::Range r = wm.all(); !r.empty(); r.popFront()) {
+            JSAtom *atom = r.front().key;
+            jsatomid index = r.front().value;
+            JS_ASSERT(index < map->length);
+            map->vector[index] = atom;
+        }
+    } else {
+        for (const AtomIndexMap::InlineElem *it = indices->asInline(), *end = indices->inlineEnd();
+             it != end; ++it) {
+            JSAtom *atom = it->key;
+            if (!atom)
+                continue;
+            JS_ASSERT(it->value < map->length);
+            map->vector[it->value] = atom;
         }
     }
-
-    JS_ARENA_ALLOCATE(space, &parser->context->tempPool, size);
-    if (!space)
-        js_ReportOutOfMemory(parser->context);
-    return space;
-}
-
-static void
-js_free_temp_space(void *priv, void *item, size_t size)
-{
-    if (size >= TEMP_SIZE_LIMIT)
-        return;
-
-    Parser *parser = (Parser *) priv;
-    int bin = JS_CeilingLog2(size) - TEMP_SIZE_START_LOG2;
-    JS_ASSERT(unsigned(bin) < NUM_TEMP_FREELISTS);
-
-    *(void **)item = parser->tempFreeList[bin];
-    parser->tempFreeList[bin] = item;
-}
-
-static JSHashEntry *
-js_alloc_temp_entry(void *priv, const void *key)
-{
-    Parser *parser = (Parser *) priv;
-    JSAtomListElement *ale;
-
-    ale = parser->aleFreeList;
-    if (ale) {
-        parser->aleFreeList = ALE_NEXT(ale);
-        return &ale->entry;
-    }
-
-    JS_ARENA_ALLOCATE_TYPE(ale, JSAtomListElement, &parser->context->tempPool);
-    if (!ale) {
-        js_ReportOutOfMemory(parser->context);
-        return NULL;
-    }
-    return &ale->entry;
-}
-
-static void
-js_free_temp_entry(void *priv, JSHashEntry *he, uintN flag)
-{
-    Parser *parser = (Parser *) priv;
-    JSAtomListElement *ale = (JSAtomListElement *) he;
-
-    ALE_SET_NEXT(ale, parser->aleFreeList);
-    parser->aleFreeList = ale;
-}
-
-static JSHashAllocOps temp_alloc_ops = {
-    js_alloc_temp_space,    js_free_temp_space,
-    js_alloc_temp_entry,    js_free_temp_entry
-};
-
-JSAtomListElement *
-JSAtomList::rawLookup(JSAtom *atom, JSHashEntry **&hep)
-{
-    if (table) {
-        hep = JS_HashTableRawLookup(table, ATOM_HASH(atom), atom);
-        return (JSAtomListElement *) *hep;
-    }
-
-    JSHashEntry **alep = &list;
-    hep = NULL;
-    JSAtomListElement *ale;
-    while ((ale = (JSAtomListElement *)*alep) != NULL) {
-        if (ALE_ATOM(ale) == atom) {
-            /* Hit, move atom's element to the front of the list. */
-            *alep = ale->entry.next;
-            ale->entry.next = list;
-            list = &ale->entry;
-            break;
-        }
-        alep = &ale->entry.next;
-    }
-    return ale;
-}
-
-#define ATOM_LIST_HASH_THRESHOLD        12
-
-JSAtomListElement *
-JSAtomList::add(Parser *parser, JSAtom *atom, AddHow how)
-{
-    JS_ASSERT(!set);
-
-    JSAtomListElement *ale, *ale2, *next;
-    JSHashEntry **hep;
-
-    ale = rawLookup(atom, hep);
-    if (!ale || how != UNIQUE) {
-        if (count < ATOM_LIST_HASH_THRESHOLD && !table) {
-            /* Few enough for linear search and no hash table yet needed. */
-            ale = (JSAtomListElement *)js_alloc_temp_entry(parser, atom);
-            if (!ale)
-                return NULL;
-            ALE_SET_ATOM(ale, atom);
-
-            if (how == HOIST) {
-                ale->entry.next = NULL;
-                hep = (JSHashEntry **) &list;
-                while (*hep)
-                    hep = &(*hep)->next;
-                *hep = &ale->entry;
-            } else {
-                ale->entry.next = list;
-                list = &ale->entry;
-            }
-        } else {
-            /*
-             * We should hash, or else we already are hashing, but count was
-             * reduced by JSAtomList::rawRemove below ATOM_LIST_HASH_THRESHOLD.
-             * Check whether we should create the table.
-             */
-            if (!table) {
-                /* No hash table yet, so hep had better be null! */
-                JS_ASSERT(!hep);
-                table = JS_NewHashTable(count + 1, js_hash_atom_ptr,
-                                        JS_CompareValues, JS_CompareValues,
-                                        &temp_alloc_ops, parser);
-                if (!table)
-                    return NULL;
-
-                /*
-                 * Set ht->nentries explicitly, because we are moving entries
-                 * from list to ht, not calling JS_HashTable(Raw|)Add.
-                 */
-                table->nentries = count;
-
-                /*
-                 * Insert each ale on list into the new hash table. Append to
-                 * the hash chain rather than inserting at the bucket head, to
-                 * preserve order among entries with the same key.
-                 */
-                for (ale2 = (JSAtomListElement *)list; ale2; ale2 = next) {
-                    next = ALE_NEXT(ale2);
-                    ale2->entry.keyHash = ATOM_HASH(ALE_ATOM(ale2));
-                    hep = JS_HashTableRawLookup(table, ale2->entry.keyHash,
-                                                ale2->entry.key);
-                    while (*hep)
-                        hep = &(*hep)->next;
-                    *hep = &ale2->entry;
-                    ale2->entry.next = NULL;
-                }
-                list = NULL;
-
-                /* Set hep for insertion of atom's ale, immediately below. */
-                hep = JS_HashTableRawLookup(table, ATOM_HASH(atom), atom);
-            }
-
-            /* Finally, add an entry for atom into the hash bucket at hep. */
-            ale = (JSAtomListElement *)
-                  JS_HashTableRawAdd(table, hep, ATOM_HASH(atom), atom, NULL);
-            if (!ale)
-                return NULL;
-
-            /*
-             * If hoisting, move ale to the end of its chain after we called
-             * JS_HashTableRawAdd, since RawAdd may have grown the table and
-             * then recomputed hep to refer to the pointer to the first entry
-             * with the given key.
-             */
-            if (how == HOIST && ale->entry.next) {
-                JS_ASSERT(*hep == &ale->entry);
-                *hep = ale->entry.next;
-                ale->entry.next = NULL;
-                do {
-                    hep = &(*hep)->next;
-                } while (*hep);
-                *hep = &ale->entry;
-            }
-        }
-
-        ALE_SET_INDEX(ale, count++);
-    }
-    return ale;
-}
-
-void
-JSAtomList::rawRemove(Parser *parser, JSAtomListElement *ale, JSHashEntry **hep)
-{
-    JS_ASSERT(!set);
-    JS_ASSERT(count != 0);
-
-    if (table) {
-        JS_ASSERT(hep);
-        JS_HashTableRawRemove(table, hep, &ale->entry);
-    } else {
-        JS_ASSERT(!hep);
-        hep = &list;
-        while (*hep != &ale->entry) {
-            JS_ASSERT(*hep);
-            hep = &(*hep)->next;
-        }
-        *hep = ale->entry.next;
-        js_free_temp_entry(parser, &ale->entry, HT_FREE_ENTRY);
-    }
-
-    --count;
-}
-
-JSAutoAtomList::~JSAutoAtomList()
-{
-    if (table) {
-        JS_HashTableDestroy(table);
-    } else {
-        JSHashEntry *hep = list;
-        while (hep) {
-            JSHashEntry *next = hep->next;
-            js_free_temp_entry(parser, hep, HT_FREE_ENTRY);
-            hep = next;
-        }
-    }
-}
-
-JSAtomListElement *
-JSAtomListIterator::operator ()()
-{
-    JSAtomListElement *ale;
-    JSHashTable *ht;
-
-    if (index == uint32(-1))
-        return NULL;
-
-    ale = next;
-    if (!ale) {
-        ht = list->table;
-        if (!ht)
-            goto done;
-        do {
-            if (index == JS_BIT(JS_HASH_BITS - ht->shift))
-                goto done;
-            next = (JSAtomListElement *) ht->buckets[index++];
-        } while (!next);
-        ale = next;
-    }
-
-    next = ALE_NEXT(ale);
-    return ale;
-
-  done:
-    index = uint32(-1);
-    return NULL;
-}
-
-static intN
-js_map_atom(JSHashEntry *he, intN i, void *arg)
-{
-    JSAtomListElement *ale = (JSAtomListElement *)he;
-    JSAtom **vector = (JSAtom **) arg;
-
-    vector[ALE_INDEX(ale)] = ALE_ATOM(ale);
-    return HT_ENUMERATE_NEXT;
-}
-
-#ifdef DEBUG
-static jsrefcount js_atom_map_count;
-static jsrefcount js_atom_map_hash_table_count;
-#endif
-
-void
-js_InitAtomMap(JSContext *cx, JSAtomMap *map, JSAtomList *al)
-{
-    JSAtom **vector;
-    JSAtomListElement *ale;
-
-    /* Map length must already be initialized. */
-    JS_ASSERT(al->count == map->length);
-#ifdef DEBUG
-    JS_ATOMIC_INCREMENT(&js_atom_map_count);
-#endif
-    ale = (JSAtomListElement *)al->list;
-    if (!ale && !al->table) {
-        JS_ASSERT(!map->vector);
-        return;
-    }
-
-    vector = map->vector;
-    if (al->table) {
-#ifdef DEBUG
-        JS_ATOMIC_INCREMENT(&js_atom_map_hash_table_count);
-#endif
-        JS_HashTableEnumerateEntries(al->table, js_map_atom, vector);
-    } else {
-        do {
-            vector[ALE_INDEX(ale)] = ALE_ATOM(ale);
-        } while ((ale = ALE_NEXT(ale)) != NULL);
-    }
-    al->clear();
 }
 
 #if JS_HAS_XML_SUPPORT
 bool
 js_InternNonIntElementIdSlow(JSContext *cx, JSObject *obj, const Value &idval,
                              jsid *idp)
 {
     JS_ASSERT(idval.isObject());
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -34,24 +34,21 @@
  * and other provisions required by the GPL or the LGPL. If you do not delete
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef jsatom_h___
 #define jsatom_h___
-/*
- * JS atom table.
- */
+
 #include <stddef.h>
 #include "jsversion.h"
 #include "jsapi.h"
 #include "jsprvtd.h"
-#include "jshash.h"
 #include "jshashtable.h"
 #include "jspubtd.h"
 #include "jsstr.h"
 #include "jslock.h"
 #include "jsvalue.h"
 
 #include "vm/String.h"
 
@@ -139,133 +136,19 @@ struct DefaultHasher<jsid>
 
 /*
  * Return a printable, lossless char[] representation of a string-type atom.
  * The lifetime of the result matches the lifetime of bytes.
  */
 extern const char *
 js_AtomToPrintableString(JSContext *cx, JSAtom *atom, JSAutoByteString *bytes);
 
-struct JSAtomListElement {
-    JSHashEntry         entry;
-};
-
-#define ALE_ATOM(ale)   ((JSAtom *) (ale)->entry.key)
-#define ALE_INDEX(ale)  (jsatomid(uintptr_t((ale)->entry.value)))
-#define ALE_VALUE(ale)  ((jsboxedword) (ale)->entry.value)
-#define ALE_NEXT(ale)   ((JSAtomListElement *) (ale)->entry.next)
-
-/*
- * In an upvars list, ALE_DEFN(ale)->resolve() is the outermost definition the
- * name may reference. If a with block or a function that calls eval encloses
- * the use, the name may end up referring to something else at runtime.
- */
-#define ALE_DEFN(ale)   ((JSDefinition *) (ale)->entry.value)
-
-#define ALE_SET_ATOM(ale,atom)  ((ale)->entry.key = (const void *)(atom))
-#define ALE_SET_INDEX(ale,index)((ale)->entry.value = (void *)(index))
-#define ALE_SET_DEFN(ale, dn)   ((ale)->entry.value = (void *)(dn))
-#define ALE_SET_VALUE(ale, v)   ((ale)->entry.value = (void *)(v))
-#define ALE_SET_NEXT(ale,nxt)   ((ale)->entry.next = (JSHashEntry *)(nxt))
-
-/*
- * NB: JSAtomSet must be plain-old-data as it is embedded in the pn_u union in
- * JSParseNode. JSAtomList encapsulates all operational uses of a JSAtomSet.
- *
- * The JSAtomList name is traditional, even though the implementation is a map
- * (not to be confused with JSAtomMap). In particular the "ALE" and "ale" short
- * names for JSAtomListElement variables roll off the fingers, compared to ASE
- * or AME alternatives.
- */
-struct JSAtomSet {
-    JSHashEntry         *list;          /* literals indexed for mapping */
-    JSHashTable         *table;         /* hash table if list gets too long */
-    jsuint              count;          /* count of indexed literals */
-};
-
-struct JSAtomList : public JSAtomSet
-{
-#ifdef DEBUG
-    const JSAtomSet* set;               /* asserted null in mutating methods */
-#endif
-
-    JSAtomList() {
-        list = NULL; table = NULL; count = 0;
-#ifdef DEBUG
-        set = NULL;
-#endif
-    }
-
-    JSAtomList(const JSAtomSet& as) {
-        list = as.list; table = as.table; count = as.count;
-#ifdef DEBUG
-        set = &as;
-#endif
-    }
-
-    void clear() { JS_ASSERT(!set); list = NULL; table = NULL; count = 0; }
-
-    JSAtomListElement *lookup(JSAtom *atom) {
-        JSHashEntry **hep;
-        return rawLookup(atom, hep);
-    }
-
-    JSAtomListElement *rawLookup(JSAtom *atom, JSHashEntry **&hep);
-
-    enum AddHow { UNIQUE, SHADOW, HOIST };
-
-    JSAtomListElement *add(js::Parser *parser, JSAtom *atom, AddHow how = UNIQUE);
-
-    void remove(js::Parser *parser, JSAtom *atom) {
-        JSHashEntry **hep;
-        JSAtomListElement *ale = rawLookup(atom, hep);
-        if (ale)
-            rawRemove(parser, ale, hep);
-    }
-
-    void rawRemove(js::Parser *parser, JSAtomListElement *ale, JSHashEntry **hep);
-};
-
-/*
- * A subclass of JSAtomList with a destructor.  This atom list owns its
- * hash table and its entries, but no keys or values.
- */
-struct JSAutoAtomList: public JSAtomList
-{
-    JSAutoAtomList(js::Parser *p): parser(p) {}
-    ~JSAutoAtomList();
-  private:
-    js::Parser *parser;         /* For freeing list entries. */
-};
-
-/*
- * Iterate over an atom list. We define a call operator to minimize the syntax
- * tax for users. We do not use a more standard pattern using ++ and * because
- * (a) it's the wrong pattern for a non-scalar; (b) it's overkill -- one method
- * is enough. (This comment is overkill!)
- */
-class JSAtomListIterator {
-    JSAtomList*         list;
-    JSAtomListElement*  next;
-    uint32              index;
-
-  public:
-    JSAtomListIterator(JSAtomList* al) : list(al) { reset(); }
-
-    void reset() {
-        next = (JSAtomListElement *) list->list;
-        index = 0;
-    }
-
-    JSAtomListElement* operator ()();
-};
-
 struct JSAtomMap {
-    JSAtom              **vector;       /* array of ptrs to indexed atoms */
-    jsatomid            length;         /* count of (to-be-)indexed atoms */
+    JSAtom **vector;    /* array of ptrs to indexed atoms */
+    uint32 length;      /* count of (to-be-)indexed atoms */
 };
 
 namespace js {
 
 enum InternBehavior
 {
     DoNotInternAtom = 0,
     InternAtom = 1
@@ -668,17 +551,18 @@ inline bool
 js_ValueToStringId(JSContext *cx, const js::Value &v, jsid *idp);
 
 inline bool
 js_InternNonIntElementId(JSContext *cx, JSObject *obj, const js::Value &idval,
                          jsid *idp);
 inline bool
 js_InternNonIntElementId(JSContext *cx, JSObject *obj, const js::Value &idval,
                          jsid *idp, js::Value *vp);
+
 /*
  * For all unmapped atoms recorded in al, add a mapping from the atom's index
  * to its address. map->length must already be set to the number of atoms in
  * the list and map->vector must point to pre-allocated memory.
  */
 extern void
-js_InitAtomMap(JSContext *cx, JSAtomMap *map, JSAtomList *al);
+js_InitAtomMap(JSContext *cx, JSAtomMap *map, js::AtomIndexMap *indices);
 
 #endif /* jsatom_h___ */
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -80,17 +80,19 @@
 #include "jsscript.h"
 #include "jsstaticcheck.h"
 #include "jsstr.h"
 #include "jstracer.h"
 
 #ifdef JS_METHODJIT
 # include "assembler/assembler/MacroAssembler.h"
 #endif
+#include "frontend/ParseMaps.h"
 
+#include "jsatominlines.h"
 #include "jscntxtinlines.h"
 #include "jscompartment.h"
 #include "jsobjinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 namespace js {
@@ -1324,18 +1326,21 @@ JSContext::JSContext(JSRuntime *rt)
 JSContext::~JSContext()
 {
 #ifdef JS_THREADSAFE
     JS_ASSERT(!thread_);
 #endif
 
     /* Free the stuff hanging off of cx. */
     VOUCH_DOES_NOT_REQUIRE_STACK();
+    if (parseMapPool_)
+        Foreground::delete_<ParseMapPool>(parseMapPool_);
+
+    JS_FinishArenaPool(&regExpPool);
     JS_FinishArenaPool(&tempPool);
-    JS_FinishArenaPool(&regExpPool);
 
     if (lastMessage)
         Foreground::free_(lastMessage);
 
     /* Remove any argument formatters. */
     JSArgumentFormatMap *map = argumentFormatMap;
     while (map) {
         JSArgumentFormatMap *temp = map;
@@ -1449,31 +1454,35 @@ JSRuntime::onOutOfMemory(void *p, size_t
         js_ReportOutOfMemory(cx);
     return NULL;
 }
 
 /*
  * Release pool's arenas if the stackPool has existed for longer than the
  * limit specified by gcEmptyArenaPoolLifespan.
  */
-inline void
+static void
 FreeOldArenas(JSRuntime *rt, JSArenaPool *pool)
 {
     JSArena *a = pool->current;
     if (a == pool->first.next && a->avail == a->base + sizeof(int64)) {
         int64 age = JS_Now() - *(int64 *) a->base;
         if (age > int64(rt->gcEmptyArenaPoolLifespan) * 1000)
             JS_FreeArenaPool(pool);
     }
 }
 
 void
 JSContext::purge()
 {
     FreeOldArenas(runtime, &regExpPool);
+    if (!activeCompilations) {
+        Foreground::delete_<ParseMapPool>(parseMapPool_);
+        parseMapPool_ = NULL;
+    }
 }
 
 #if defined(JS_TRACER) || defined(JS_METHODJIT)
 static bool
 ComputeIsJITBroken()
 {
 #ifndef ANDROID
     return false;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -950,16 +950,21 @@ struct JSContext
     void resetCompartment();
 
     /* Wrap cx->exception for the current compartment. */
     void wrapPendingException();
 
     /* Temporary arena pool used while compiling and decompiling. */
     JSArenaPool         tempPool;
 
+  private:
+    /* Lazily initialized pool of maps used during parse/emit. */
+    js::ParseMapPool    *parseMapPool_;
+
+  public:
     /* Temporary arena pool used while evaluate regular expressions. */
     JSArenaPool         regExpPool;
 
     /* Top-level object and pointer to top stack frame's scope chain. */
     JSObject            *globalObject;
 
     /* State for object and array toSource conversion. */
     JSSharpObjectMap    sharpObjectMap;
@@ -979,16 +984,23 @@ struct JSContext
 
     /* Client opaque pointers. */
     void                *data;
     void                *data2;
 
     inline js::RegExpStatics *regExpStatics();
 
   public:
+    js::ParseMapPool &parseMapPool() {
+        JS_ASSERT(parseMapPool_);
+        return *parseMapPool_;
+    }
+
+    inline bool ensureParseMapPool();
+
     /*
      * The default script compilation version can be set iff there is no code running.
      * This typically occurs via the JSAPI right after a context is constructed.
      */
     bool canSetDefaultVersion() const {
         return !stack.hasfp() && !hasVersionOverride;
     }
 
@@ -1258,16 +1270,23 @@ struct JSContext
 
     void setPendingException(js::Value v);
 
     void clearPendingException() {
         this->throwing = false;
         this->exception.setUndefined();
     }
 
+    /*
+     * Count of currently active compilations.
+     * When there are compilations active for the context, the GC must not
+     * purge the ParseMapPool.
+     */
+    uintN activeCompilations;
+
 #ifdef DEBUG
     /*
      * Controls whether a quadratic-complexity assertion is performed during
      * stack iteration, defaults to true.
      */
     bool stackIterAssertionEnabled;
 #endif
 
@@ -2287,18 +2306,16 @@ js_GetScriptedCaller(JSContext *cx, js::
 extern jsbytecode*
 js_GetCurrentBytecodePC(JSContext* cx);
 
 extern bool
 js_CurrentPCIsInImacro(JSContext *cx);
 
 namespace js {
 
-class RegExpStatics;
-
 extern JS_FORCES_STACK JS_FRIEND_API(void)
 LeaveTrace(JSContext *cx);
 
 extern bool
 CanLeaveTrace(JSContext *cx);
 
 } /* namespace js */
 
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -43,16 +43,18 @@
 
 #include "jscntxt.h"
 #include "jscompartment.h"
 #include "jsstaticcheck.h"
 #include "jsxml.h"
 #include "jsregexp.h"
 #include "jsgc.h"
 
+#include "frontend/ParseMaps.h"
+
 namespace js {
 
 static inline GlobalObject *
 GetGlobalForScopeChain(JSContext *cx)
 {
     /*
      * This is essentially GetScopeChain(cx)->getGlobal(), but without
      * falling off trace.
@@ -412,9 +414,18 @@ JSContext::regExpStatics()
 
 inline void
 JSContext::setPendingException(js::Value v) {
     this->throwing = true;
     this->exception = v;
     assertSameCompartment(this, v);
 }
 
+inline bool
+JSContext::ensureParseMapPool()
+{
+    if (parseMapPool_)
+        return true;
+    parseMapPool_ = js::OffTheBooks::new_<js::ParseMapPool>(this);
+    return parseMapPool_;
+}
+
 #endif /* jscntxtinlines_h___ */
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -1958,50 +1958,49 @@ JS_GetFunctionTotalSize(JSContext *cx, J
 }
 
 #include "jsemit.h"
 
 JS_PUBLIC_API(size_t)
 JS_GetScriptTotalSize(JSContext *cx, JSScript *script)
 {
     size_t nbytes, pbytes;
-    jsatomid i;
     jssrcnote *sn, *notes;
     JSObjectArray *objarray;
     JSPrincipals *principals;
 
     nbytes = sizeof *script;
     if (script->u.object)
         nbytes += JS_GetObjectTotalSize(cx, script->u.object);
 
     nbytes += script->length * sizeof script->code[0];
     nbytes += script->atomMap.length * sizeof script->atomMap.vector[0];
-    for (i = 0; i < script->atomMap.length; i++)
+    for (size_t i = 0; i < script->atomMap.length; i++)
         nbytes += GetAtomTotalSize(cx, script->atomMap.vector[i]);
 
     if (script->filename)
         nbytes += strlen(script->filename) + 1;
 
     notes = script->notes();
     for (sn = notes; !SN_IS_TERMINATOR(sn); sn = SN_NEXT(sn))
         continue;
     nbytes += (sn - notes + 1) * sizeof *sn;
 
     if (JSScript::isValidOffset(script->objectsOffset)) {
         objarray = script->objects();
-        i = objarray->length;
+        size_t i = objarray->length;
         nbytes += sizeof *objarray + i * sizeof objarray->vector[0];
         do {
             nbytes += JS_GetObjectTotalSize(cx, objarray->vector[--i]);
         } while (i != 0);
     }
 
     if (JSScript::isValidOffset(script->regexpsOffset)) {
         objarray = script->regexps();
-        i = objarray->length;
+        size_t i = objarray->length;
         nbytes += sizeof *objarray + i * sizeof objarray->vector[0];
         do {
             nbytes += JS_GetObjectTotalSize(cx, objarray->vector[--i]);
         } while (i != 0);
     }
 
     if (JSScript::isValidOffset(script->trynotesOffset)) {
         nbytes += sizeof(JSTryNoteArray) +
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -69,16 +69,18 @@
 #include "jsautooplen.h"        // generated headers last
 #include "jsstaticcheck.h"
 
 #include "jsatominlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 
+#include "frontend/ParseMaps-inl.h"
+
 /* Allocation chunk counts, must be powers of two in general. */
 #define BYTECODE_CHUNK  256     /* code allocation increment */
 #define SRCNOTE_CHUNK   64      /* initial srcnote allocation increment */
 #define TRYNOTE_CHUNK   64      /* trynote allocation increment */
 
 /* Macros to compute byte sizes from typed element counts. */
 #define BYTECODE_SIZE(n)        ((n) * sizeof(jsbytecode))
 #define SRCNOTE_SIZE(n)         ((n) * sizeof(jssrcnote))
@@ -104,54 +106,61 @@ JSTreeContext::trace(JSTracer *trc)
 }
 
 JSCodeGenerator::JSCodeGenerator(Parser *parser,
                                  JSArenaPool *cpool, JSArenaPool *npool,
                                  uintN lineno)
   : JSTreeContext(parser),
     codePool(cpool), notePool(npool),
     codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
+    atomIndices(parser->context),
     stackDepth(0), maxStackDepth(0),
     ntrynotes(0), lastTryNode(NULL),
     spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
     numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
     arrayCompDepth(0),
     emitLevel(0),
     constMap(parser->context),
     constList(parser->context),
+    upvarIndices(parser->context),
     globalUses(parser->context),
+    globalMap(parser->context),
     closedArgs(parser->context),
     closedVars(parser->context),
     traceIndex(0)
 {
     flags = TCF_COMPILING;
     memset(&prolog, 0, sizeof prolog);
     memset(&main, 0, sizeof main);
     current = &main;
     firstLine = prolog.currentLine = main.currentLine = lineno;
     prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
     memset(&upvarMap, 0, sizeof upvarMap);
 }
 
-bool JSCodeGenerator::init()
+bool
+JSCodeGenerator::init(JSContext *cx, JSTreeContext::InitBehavior ib)
 {
-    return constMap.init();
+    roLexdeps.init();
+    return JSTreeContext::init(cx, ib) && constMap.init() && atomIndices.ensureMap(cx);
 }
 
 JSCodeGenerator::~JSCodeGenerator()
 {
     JS_ARENA_RELEASE(codePool, codeMark);
     JS_ARENA_RELEASE(notePool, noteMark);
 
+    JSContext *cx = parser->context;
+
     /* NB: non-null only after OOM. */
     if (spanDeps)
-        parser->context->free_(spanDeps);
+        cx->free_(spanDeps);
 
     if (upvarMap.vector)
-        parser->context->free_(upvarMap.vector);
+        cx->free_(upvarMap.vector);
 }
 
 static ptrdiff_t
 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
 {
     jsbytecode *base, *limit, *next;
     ptrdiff_t offset, length;
     size_t incr, size;
@@ -1546,27 +1555,29 @@ EmitKnownBlockChain(JSContext *cx, JSCod
 }
 
 static JSBool
 EmitBlockChain(JSContext *cx, JSCodeGenerator *cg)
 {
     return EmitKnownBlockChain(cx, cg, cg->blockChainBox);
 }
 
+static const jsatomid INVALID_ATOMID = -1;
+
 static ptrdiff_t
 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
-         ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
+         ptrdiff_t *lastp, jsatomid labelIndex = INVALID_ATOMID, JSSrcNoteType noteType = SRC_NULL)
 {
     intN index;
 
     if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
         return -1;
 
-    if (label)
-        index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
+    if (labelIndex != INVALID_ATOMID)
+        index = js_NewSrcNote2(cx, cg, noteType, ptrdiff_t(labelIndex));
     else if (noteType != SRC_NULL)
         index = js_NewSrcNote(cx, cg, noteType);
     else
         index = 0;
     if (index < 0)
         return -1;
 
     ptrdiff_t result = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
@@ -1818,30 +1829,28 @@ EmitIndexOp(JSContext *cx, JSOp op, uint
  * caller's lexical environment, and embedding a false return on error.
  */
 #define EMIT_INDEX_OP(op, index)                                              \
     JS_BEGIN_MACRO                                                            \
         if (!EmitIndexOp(cx, op, index, cg))                                  \
             return JS_FALSE;                                                  \
     JS_END_MACRO
 
-static JSBool
+static bool
 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
 {
-    JSAtomListElement *ale;
-
     JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
-    if (op == JSOP_GETPROP &&
-        pn->pn_atom == cx->runtime->atomState.lengthAtom) {
+    if (op == JSOP_GETPROP && pn->pn_atom == cx->runtime->atomState.lengthAtom)
         return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
-    }
-    ale = cg->atomList.add(cg->parser, pn->pn_atom);
-    if (!ale)
-        return JS_FALSE;
-    return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
+
+    jsatomid index;
+    if (!cg->makeAtomIndex(pn->pn_atom, &index))
+        return false;
+
+    return EmitIndexOp(cx, op, index, cg);
 }
 
 static JSBool
 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
              JSCodeGenerator *cg)
 {
     JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
     return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
@@ -2037,22 +2046,23 @@ BindKnownGlobal(JSContext *cx, JSCodeGen
     // Cookie is an outparam; make sure caller knew to clear it.
     JS_ASSERT(pn->pn_cookie.isFree());
 
     if (cg->mightAliasLocals())
         return true;
 
     GlobalScope *globalScope = cg->compiler()->globalScope;
 
-    uint32 index;
+    jsatomid index;
     if (dn->pn_cookie.isFree()) {
         // The definition wasn't bound, so find its atom's index in the
         // mapping of defined globals.
-        JSAtomListElement *ale = globalScope->names.lookup(atom);
-        index = ALE_INDEX(ale);
+        AtomIndexPtr p = globalScope->names.lookup(atom);
+        JS_ASSERT(!!p);
+        index = p.value();
     } else {
         JSCodeGenerator *globalcg = globalScope->cg;
 
         // If the definition is bound, and we're in the same cg, we can re-use
         // its cookie.
         if (globalcg == cg) {
             pn->pn_cookie = dn->pn_cookie;
             pn->pn_dflags |= PND_BOUND;
@@ -2114,18 +2124,16 @@ BindGlobal(JSContext *cx, JSCodeGenerato
  */
 static JSBool
 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
 {
     JSDefinition *dn;
     JSOp op;
     JSAtom *atom;
     JSDefinition::Kind dn_kind;
-    JSAtomListElement *ale;
-    uintN index;
 
     JS_ASSERT(pn->pn_type == TOK_NAME);
 
     /* Idempotency tests come first, since we may be called more than once. */
     if (pn->pn_dflags & PND_BOUND)
         return JS_TRUE;
 
     /* No cookie initialized for these two, they're pre-bound by definition. */
@@ -2231,18 +2239,18 @@ BindNameToSlot(JSContext *cx, JSCodeGene
 
             JS_ASSERT(caller->isScriptFrame());
 
             /*
              * If this is an eval in the global scope, then unbound variables
              * must be globals, so try to use GNAME ops.
              */
             if (caller->isGlobalFrame() && TryConvertToGname(cg, pn, &op)) {
-                ale = cg->atomList.add(cg->parser, atom);
-                if (!ale)
+                jsatomid _;
+                if (!cg->makeAtomIndex(atom, &_))
                     return JS_FALSE;
 
                 pn->pn_op = op;
                 pn->pn_dflags |= PND_BOUND;
                 return JS_TRUE;
             }
 
             /*
@@ -2251,33 +2259,33 @@ BindNameToSlot(JSContext *cx, JSCodeGene
              */
             return JS_TRUE;
         }
 
         /* Optimize accesses to undeclared globals. */
         if (!cg->mightAliasLocals() && !TryConvertToGname(cg, pn, &op))
             return JS_TRUE;
 
-        ale = cg->atomList.add(cg->parser, atom);
-        if (!ale)
+        jsatomid _;
+        if (!cg->makeAtomIndex(atom, &_))
             return JS_FALSE;
 
         pn->pn_op = op;
         pn->pn_dflags |= PND_BOUND;
 
         return JS_TRUE;
     }
 
     uint16 level = cookie.level();
     JS_ASSERT(cg->staticLevel >= level);
 
     const uintN skip = cg->staticLevel - level;
     if (skip != 0) {
         JS_ASSERT(cg->inFunction());
-        JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->lexdeps.lookup(atom));
+        JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->roLexdeps->lookup(atom));
         JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
         JS_ASSERT(cg->fun()->u.i.skipmin <= skip);
 
         /*
          * If op is a mutating opcode, this upvar's lookup skips too many levels,
          * or the function is heavyweight, we fall back on JSOP_*NAME*.
          */
         if (op != JSOP_NAME)
@@ -2285,31 +2293,33 @@ BindNameToSlot(JSContext *cx, JSCodeGene
         if (level >= UpvarCookie::UPVAR_LEVEL_LIMIT)
             return JS_TRUE;
         if (cg->flags & TCF_FUN_HEAVYWEIGHT)
             return JS_TRUE;
 
         if (!cg->fun()->isFlatClosure())
             return JS_TRUE;
 
-        ale = cg->upvarList.lookup(atom);
-        if (ale) {
-            index = ALE_INDEX(ale);
+        if (!cg->upvarIndices.ensureMap(cx))
+            return JS_FALSE;
+
+        AtomIndexAddPtr p = cg->upvarIndices->lookupForAdd(atom);
+        jsatomid index;
+        if (p) {
+            index = p.value();
         } else {
             if (!cg->bindings.addUpvar(cx, atom))
                 return JS_FALSE;
 
-            ale = cg->upvarList.add(cg->parser, atom);
-            if (!ale)
-                return JS_FALSE;
-            index = ALE_INDEX(ale);
-            JS_ASSERT(index == cg->upvarList.count - 1);
+            index = cg->upvarIndices->count();
+            if (!cg->upvarIndices->add(p, atom, index))
+                return JS_FALSE;
 
             UpvarCookie *vector = cg->upvarMap.vector;
-            uint32 length = cg->lexdeps.count;
+            uint32 length = cg->roLexdeps->count();
             if (!vector || cg->upvarMap.length != length) {
                 vector = (UpvarCookie *) cx->realloc_(vector, length * sizeof *vector);
                 if (!vector) {
                     JS_ReportOutOfMemory(cx);
                     return JS_FALSE;
                 }
                 cg->upvarMap.vector = vector;
                 cg->upvarMap.length = length;
@@ -2437,45 +2447,45 @@ BindNameToSlot(JSContext *cx, JSCodeGene
     pn->pn_cookie.set(0, cookie.slot());
     pn->pn_dflags |= PND_BOUND;
     return JS_TRUE;
 }
 
 bool
 JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
 {
-    JSAtomListElement *ale = globalMap.lookup(atom);
-    if (ale) {
-        cookie->set(0, uint16(ALE_INDEX(ale)));
+    if (!globalMap.ensureMap(context()))
+        return false;
+
+    AtomIndexAddPtr p = globalMap->lookupForAdd(atom);
+    if (p) {
+        jsatomid index = p.value();
+        cookie->set(0, index);
         return true;
     }
 
     /* Don't bother encoding indexes >= uint16 */
     if (globalUses.length() >= UINT16_LIMIT) {
         cookie->makeFree();
         return true;
     }
 
     /* Find or add an existing atom table entry. */
-    ale = atomList.add(parser, atom);
-    if (!ale)
+    jsatomid allAtomIndex;
+    if (!makeAtomIndex(atom, &allAtomIndex))
         return false;
 
-    cookie->set(0, globalUses.length());
-
-    GlobalSlotArray::Entry entry = { ALE_INDEX(ale), slot };
+    jsatomid globalUseIndex = globalUses.length();
+    cookie->set(0, globalUseIndex);
+
+    GlobalSlotArray::Entry entry = { allAtomIndex, slot };
     if (!globalUses.append(entry))
         return false;
 
-    ale = globalMap.add(parser, atom);
-    if (!ale)
-        return false;
-
-    ALE_SET_INDEX(ale, cookie->asInteger());
-    return true;
+    return globalMap->add(p, atom, globalUseIndex);
 }
 
 /*
  * If pn contains a useful expression, return true with *answer set to true.
  * If pn contains a useless expression, return true with *answer set to false.
  * Return false on error.
  *
  * The caller should initialize *answer to false and invoke this function on
@@ -2755,32 +2765,32 @@ EmitXMLName(JSContext *cx, JSParseNode *
                        CG_OFFSET(cg) - pn2->pn_offset) < 0) {
         return JS_FALSE;
     }
 
     return js_Emit1(cx, cg, op) >= 0;
 }
 #endif
 
-static JSBool
+static bool
 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
 {
     /*
      * Special case for obj.__proto__ to deoptimize away from fast paths in the
      * interpreter and trace recorder, which skip dense array instances by
      * going up to Array.prototype before looking up the property name.
      */
-    JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
-    if (!ale)
-        return JS_FALSE;
-    if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
-        return JS_FALSE;
+    jsatomid index;
+    if (!cg->makeAtomIndex(pn->pn_atom, &index))
+        return false;
+    if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
+        return false;
     if (js_Emit1(cx, cg, op) < 0)
-        return JS_FALSE;
-    return JS_TRUE;
+        return false;
+    return true;
 }
 
 static JSBool
 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
            JSBool callContext)
 {
     JSParseNode *pn2, *pndot, *pnup, *pndown;
     ptrdiff_t top;
@@ -2823,23 +2833,19 @@ EmitPropOp(JSContext *cx, JSParseNode *p
             } else {
                 switch (pn2->pn_op) {
                   case JSOP_GETARG:
                     op = JSOP_GETARGPROP;
                     goto do_indexconst;
                   case JSOP_GETLOCAL:
                     op = JSOP_GETLOCALPROP;
                   do_indexconst: {
-                        JSAtomListElement *ale;
                         jsatomid atomIndex;
-
-                        ale = cg->atomList.add(cg->parser, pn->pn_atom);
-                        if (!ale)
+                        if (!cg->makeAtomIndex(pn->pn_atom, &atomIndex))
                             return JS_FALSE;
-                        atomIndex = ALE_INDEX(ale);
                         return EmitSlotIndexOp(cx, op, pn2->pn_cookie.asInteger(), atomIndex, cg);
                     }
 
                   default:;
                 }
             }
         }
     }
@@ -3090,17 +3096,16 @@ EmitSwitch(JSContext *cx, JSCodeGenerato
 {
     JSOp switchOp;
     JSBool ok, hasDefault, constPropagated;
     ptrdiff_t top, off, defaultOffset;
     JSParseNode *pn2, *pn3, *pn4;
     uint32 caseCount, tableLength;
     JSParseNode **table;
     int32_t i, low, high;
-    JSAtomListElement *ale;
     intN noteIndex;
     size_t switchSize, tableSize;
     jsbytecode *pc, *savepc;
 #if JS_HAS_BLOCK_SCOPE
     JSObjectBox *box;
 #endif
 
     /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
@@ -3508,41 +3513,37 @@ EmitSwitch(JSContext *cx, JSCodeGenerato
             if (switchOp == JSOP_TABLESWITCH) {
                 for (i = 0; i < (jsint)tableLength; i++) {
                     pn3 = table[i];
                     if (pn3 &&
                         (pn4 = pn3->pn_left) != NULL &&
                         pn4->pn_type == TOK_NAME) {
                         /* Note a propagated constant with the const's name. */
                         JS_ASSERT(!pn4->maybeExpr());
-                        ale = cg->atomList.add(cg->parser, pn4->pn_atom);
-                        if (!ale)
+                        jsatomid index;
+                        if (!cg->makeAtomIndex(pn4->pn_atom, &index))
                             goto bad;
                         CG_NEXT(cg) = pc;
-                        if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
-                                           ALE_INDEX(ale)) < 0) {
+                        if (js_NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
                             goto bad;
-                        }
                     }
                     pc += JUMP_OFFSET_LEN;
                 }
             } else {
                 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
                     pn4 = pn3->pn_left;
                     if (pn4 && pn4->pn_type == TOK_NAME) {
                         /* Note a propagated constant with the const's name. */
                         JS_ASSERT(!pn4->maybeExpr());
-                        ale = cg->atomList.add(cg->parser, pn4->pn_atom);
-                        if (!ale)
+                        jsatomid index;
+                        if (!cg->makeAtomIndex(pn4->pn_atom, &index))
                             goto bad;
                         CG_NEXT(cg) = pc;
-                        if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
-                                           ALE_INDEX(ale)) < 0) {
+                        if (js_NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
                             goto bad;
-                        }
                     }
                     pc += INDEX_LEN + JUMP_OFFSET_LEN;
                 }
             }
             CG_NEXT(cg) = savepc;
         }
     }
 
@@ -3718,55 +3719,52 @@ js_EmitFunctionScript(JSContext *cx, JSC
 /* A function, so that we avoid macro-bloating all the other callsites. */
 static JSBool
 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
 {
     UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
     return JS_TRUE;
 }
 
-static JSBool
+static bool
 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
                  JSParseNode *pn, jsatomid *result)
 {
     jsatomid atomIndex;
-    JSAtomListElement *ale;
 
     if (!pn->pn_cookie.isFree()) {
-        atomIndex = (jsatomid) pn->pn_cookie.slot();
+        atomIndex = pn->pn_cookie.slot();
     } else {
-        ale = cg->atomList.add(cg->parser, pn->pn_atom);
-        if (!ale)
-            return JS_FALSE;
-        atomIndex = ALE_INDEX(ale);
+        if (!cg->makeAtomIndex(pn->pn_atom, &atomIndex))
+            return false;
     }
 
     if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
         (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT)) &&
         !(pn->pn_dflags & PND_GVAR))
     {
         CG_SWITCH_TO_PROLOG(cg);
         if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
-            return JS_FALSE;
+            return false;
         EMIT_INDEX_OP(prologOp, atomIndex);
         CG_SWITCH_TO_MAIN(cg);
     }
 
     if (cg->inFunction() &&
         JOF_OPTYPE(pn->pn_op) == JOF_LOCAL &&
         pn->pn_cookie.slot() < cg->bindings.countVars() &&
         cg->shouldNoteClosedName(pn))
     {
         if (!cg->closedVars.append(pn->pn_cookie.slot()))
-            return JS_FALSE;
+            return false;
     }
 
     if (result)
         *result = atomIndex;
-    return JS_TRUE;
+    return true;
 }
 
 #if JS_HAS_DESTRUCTURING
 
 typedef JSBool
 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
                             JSParseNode *pn);
 
@@ -4545,17 +4543,16 @@ public:
 JSBool
 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
 {
     JSBool ok, useful, wantval;
     JSStmtInfo *stmt, stmtInfo;
     ptrdiff_t top, off, tmp, beq, jmp, tmp2, tmp3;
     JSParseNode *pn2, *pn3;
     JSAtom *atom;
-    JSAtomListElement *ale;
     jsatomid atomIndex;
     uintN index;
     ptrdiff_t noteIndex, noteIndex2;
     JSSrcNoteType noteType;
     jsbytecode *pc;
     JSOp op;
     TokenKind type;
     uint32 argc;
@@ -4612,17 +4609,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
             js_ReportOutOfMemory(cx);
             return JS_FALSE;
         }
         JSCodeGenerator *cg2 =
             new (cg2space) JSCodeGenerator(cg->parser,
                                            cg->codePool, cg->notePool,
                                            pn->pn_pos.begin.lineno);
 
-        if (!cg2->init())
+        if (!cg2->init(cx))
             return JS_FALSE;
 
         cg2->flags = pn->pn_funbox->tcflags | TCF_COMPILING | TCF_IN_FUNCTION |
                      (cg->flags & TCF_FUN_MIGHT_ALIAS_LOCALS);
         cg2->bindings.transfer(cx, &pn->pn_funbox->bindings);
 #if JS_HAS_SHARP_VARS
         if (cg2->flags & TCF_HAS_SHARPS) {
             cg2->sharpSlotBase = cg2->bindings.sharpSlotBase(cx);
@@ -4731,20 +4728,21 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                     return JS_FALSE;
             }
         }
         ok = js_EmitTree(cx, cg, pnlast);
         break;
       }
 
       case TOK_UPVARS:
-        JS_ASSERT(cg->lexdeps.count == 0);
-        JS_ASSERT(pn->pn_names.count != 0);
-        cg->lexdeps = pn->pn_names;
+        JS_ASSERT(pn->pn_names->count() != 0);
+        cg->roLexdeps = pn->pn_names;
         ok = js_EmitTree(cx, cg, pn->pn_tree);
+        cg->roLexdeps.clearMap();
+        pn->pn_names.releaseMap(cx);
         break;
 
       case TOK_IF:
         /* Initialize so we can detect else-if chains and avoid recursion. */
         stmtInfo.type = STMT_IF;
         beq = jmp = -1;
         noteIndex = -1;
 
@@ -4791,17 +4789,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
             stmtInfo.type = STMT_ELSE;
 
             /*
              * Emit a JSOP_BACKPATCH op to jump from the end of our then part
              * around the else part.  The js_PopStatementCG call at the bottom
              * of this switch case will fix up the backpatch chain linked from
              * stmtInfo.breaks.
              */
-            jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
+            jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks);
             if (jmp < 0)
                 return JS_FALSE;
 
             /* Ensure the branch-if-false comes here, then emit the else. */
             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
             if (pn3->pn_type == TOK_IF) {
                 pn = pn3;
                 goto if_again;
@@ -5092,17 +5090,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                     cookie = pn3->pn_cookie;
                 }
                 if (pn3->isConst()) {
                     ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
                                              JSMSG_BAD_FOR_LEFTSIDE);
                     return JS_FALSE;
                 }
                 if (!cookie.isFree()) {
-                    atomIndex = (jsatomid) cookie.asInteger();
+                    atomIndex = cookie.asInteger();
                     EMIT_UINT16_IMM_OP(op, atomIndex);
                 } else {
                     if (!EmitAtomOp(cx, pn3, op, cg))
                         return JS_FALSE;
                 }
                 break;
               }
 
@@ -5348,63 +5346,68 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
         if (pn2->pn_type == TOK_IN) {
             if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
                 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
                 return JS_FALSE;
             }
         }
         break;
 
-      case TOK_BREAK:
+      case TOK_BREAK: {
         stmt = cg->topStmt;
         atom = pn->pn_atom;
+
+        jsatomid labelIndex;
         if (atom) {
-            ale = cg->atomList.add(cg->parser, atom);
-            if (!ale)
-                return JS_FALSE;
+            if (!cg->makeAtomIndex(atom, &labelIndex))
+                return JS_FALSE;
+
             while (stmt->type != STMT_LABEL || stmt->label != atom)
                 stmt = stmt->down;
             noteType = SRC_BREAK2LABEL;
         } else {
-            ale = NULL;
+            labelIndex = INVALID_ATOMID;
             while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
                 stmt = stmt->down;
             noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
         }
 
-        if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
+        if (EmitGoto(cx, cg, stmt, &stmt->breaks, labelIndex, noteType) < 0)
             return JS_FALSE;
         break;
-
-      case TOK_CONTINUE:
+      }
+
+      case TOK_CONTINUE: {
         stmt = cg->topStmt;
         atom = pn->pn_atom;
+
+        jsatomid labelIndex;
         if (atom) {
             /* Find the loop statement enclosed by the matching label. */
             JSStmtInfo *loop = NULL;
-            ale = cg->atomList.add(cg->parser, atom);
-            if (!ale)
+            if (!cg->makeAtomIndex(atom, &labelIndex))
                 return JS_FALSE;
             while (stmt->type != STMT_LABEL || stmt->label != atom) {
                 if (STMT_IS_LOOP(stmt))
                     loop = stmt;
                 stmt = stmt->down;
             }
             stmt = loop;
             noteType = SRC_CONT2LABEL;
         } else {
-            ale = NULL;
+            labelIndex = INVALID_ATOMID;
             while (!STMT_IS_LOOP(stmt))
                 stmt = stmt->down;
             noteType = SRC_CONTINUE;
         }
 
-        if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
+        if (EmitGoto(cx, cg, stmt, &stmt->continues, labelIndex, noteType) < 0)
             return JS_FALSE;
         break;
+      }
 
       case TOK_WITH:
         if (!js_EmitTree(cx, cg, pn->pn_left))
             return JS_FALSE;
         js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
         if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
             return JS_FALSE;
 
@@ -5989,27 +5992,28 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                 }
             }
         }
         break;
 
       case TOK_COLON:
         /* Emit an annotated nop so we know to decompile a label. */
         atom = pn->pn_atom;
-        ale = cg->atomList.add(cg->parser, atom);
-        if (!ale)
+
+        jsatomid index;
+        if (!cg->makeAtomIndex(atom, &index))
             return JS_FALSE;
+
         pn2 = pn->expr();
         noteType = (pn2->pn_type == TOK_LC ||
                     (pn2->pn_type == TOK_LEXICALSCOPE &&
                      pn2->expr()->pn_type == TOK_LC))
                    ? SRC_LABELBRACE
                    : SRC_LABEL;
-        noteIndex = js_NewSrcNote2(cx, cg, noteType,
-                                   (ptrdiff_t) ALE_INDEX(ale));
+        noteIndex = js_NewSrcNote2(cx, cg, noteType, ptrdiff_t(index));
         if (noteIndex < 0 ||
             js_Emit1(cx, cg, JSOP_NOP) < 0) {
             return JS_FALSE;
         }
 
         /* Emit code for the labeled statement. */
         js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
         stmtInfo.label = atom;
@@ -6061,35 +6065,31 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
          */
         pn2 = pn->pn_left;
         atomIndex = (jsatomid) -1;              /* quell GCC overwarning */
         switch (PN_TYPE(pn2)) {
           case TOK_NAME:
             if (!BindNameToSlot(cx, cg, pn2))
                 return JS_FALSE;
             if (!pn2->pn_cookie.isFree()) {
-                atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
+                atomIndex = pn2->pn_cookie.asInteger();
             } else {
-                ale = cg->atomList.add(cg->parser, pn2->pn_atom);
-                if (!ale)
+                if (!cg->makeAtomIndex(pn2->pn_atom, &atomIndex))
                     return JS_FALSE;
-                atomIndex = ALE_INDEX(ale);
                 if (!pn2->isConst()) {
                     JSOp op = PN_OP(pn2) == JSOP_SETGNAME ? JSOP_BINDGNAME : JSOP_BINDNAME;
                     EMIT_INDEX_OP(op, atomIndex);
                 }
             }
             break;
           case TOK_DOT:
             if (!js_EmitTree(cx, cg, pn2->expr()))
                 return JS_FALSE;
-            ale = cg->atomList.add(cg->parser, pn2->pn_atom);
-            if (!ale)
-                return JS_FALSE;
-            atomIndex = ALE_INDEX(ale);
+            if (!cg->makeAtomIndex(pn2->pn_atom, &atomIndex))
+                return JS_FALSE;
             break;
           case TOK_LB:
             JS_ASSERT(pn2->pn_arity == PN_BINARY);
             if (!js_EmitTree(cx, cg, pn2->pn_left))
                 return JS_FALSE;
             if (!js_EmitTree(cx, cg, pn2->pn_right))
                 return JS_FALSE;
             break;
@@ -6442,17 +6442,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
             pn2->pn_op = op;
             if (!BindNameToSlot(cx, cg, pn2))
                 return JS_FALSE;
             op = PN_OP(pn2);
             if (op == JSOP_CALLEE) {
                 if (js_Emit1(cx, cg, op) < 0)
                     return JS_FALSE;
             } else if (!pn2->pn_cookie.isFree()) {
-                atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
+                atomIndex = pn2->pn_cookie.asInteger();
                 EMIT_UINT16_IMM_OP(op, atomIndex);
             } else {
                 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
                 if (!EmitAtomOp(cx, pn2, op, cg))
                     return JS_FALSE;
                 break;
             }
             if (pn2->isConst()) {
@@ -6957,18 +6957,18 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                 obj = NULL;
                 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
                     return JS_FALSE;
                 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
                     return JS_FALSE;
             } else {
                 JS_ASSERT(pn3->pn_type == TOK_NAME ||
                           pn3->pn_type == TOK_STRING);
-                ale = cg->atomList.add(cg->parser, pn3->pn_atom);
-                if (!ale)
+                jsatomid index;
+                if (!cg->makeAtomIndex(pn3->pn_atom, &index))
                     return JS_FALSE;
 
                 /* Check whether we can optimize to JSOP_INITMETHOD. */
                 JSParseNode *init = pn2->pn_right;
                 bool lambda = PN_OP(init) == JSOP_LAMBDA;
                 if (lambda)
                     ++methodInits;
                 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable()) {
@@ -6993,17 +6993,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                                               UndefinedValue(), NULL, NULL,
                                               JSPROP_ENUMERATE, 0, 0)) {
                         return false;
                     }
                     if (obj->inDictionaryMode())
                         obj = NULL;
                 }
 
-                EMIT_INDEX_OP(op, ALE_INDEX(ale));
+                EMIT_INDEX_OP(op, index);
             }
         }
 
         if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
             obj = NULL;
             if (js_Emit1(cx, cg, JSOP_UNBRAND) < 0)
                 return JS_FALSE;
         }
@@ -7125,20 +7125,20 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
             if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
                 return JS_FALSE;
         }
 
         if (pn->pn_xflags & PNX_XMLROOT) {
             if (pn->pn_count == 0) {
                 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
                 atom = cx->runtime->atomState.emptyAtom;
-                ale = cg->atomList.add(cg->parser, atom);
-                if (!ale)
+                jsatomid index;
+                if (!cg->makeAtomIndex(atom, &index))
                     return JS_FALSE;
-                EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
+                EMIT_INDEX_OP(JSOP_STRING, index);
             }
             if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
                 return JS_FALSE;
         }
 #ifdef DEBUG
         else
             JS_ASSERT(pn->pn_count != 0);
 #endif
@@ -7148,23 +7148,24 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
       case TOK_XMLSTAGO:
       case TOK_XMLETAGO:
       {
         uint32 i;
 
         if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
             return JS_FALSE;
 
-        ale = cg->atomList.add(cg->parser,
-                               (pn->pn_type == TOK_XMLETAGO)
-                               ? cx->runtime->atomState.etagoAtom
-                               : cx->runtime->atomState.stagoAtom);
-        if (!ale)
-            return JS_FALSE;
-        EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
+        {
+            jsatomid index;
+            JSAtom *tmp = (pn->pn_type == TOK_XMLETAGO) ? cx->runtime->atomState.etagoAtom
+                                                        : cx->runtime->atomState.stagoAtom;
+            if (!cg->makeAtomIndex(tmp, &index))
+                return JS_FALSE;
+            EMIT_INDEX_OP(JSOP_STRING, index);
+        }
 
         JS_ASSERT(pn->pn_count != 0);
         pn2 = pn->pn_head;
         if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
             return JS_FALSE;
         if (!js_EmitTree(cx, cg, pn2))
             return JS_FALSE;
         if (js_Emit1(cx, cg, JSOP_ADD) < 0)
@@ -7182,23 +7183,24 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                     return JS_FALSE;
             }
             if (js_Emit1(cx, cg,
                          (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
                 return JS_FALSE;
             }
         }
 
-        ale = cg->atomList.add(cg->parser,
-                               (pn->pn_type == TOK_XMLPTAGC)
-                               ? cx->runtime->atomState.ptagcAtom
-                               : cx->runtime->atomState.tagcAtom);
-        if (!ale)
-            return JS_FALSE;
-        EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
+        {
+            jsatomid index;
+            JSAtom *tmp = (pn->pn_type == TOK_XMLPTAGC) ? cx->runtime->atomState.ptagcAtom
+                                                        : cx->runtime->atomState.tagcAtom;
+            if (!cg->makeAtomIndex(tmp, &index))
+                return JS_FALSE;
+            EMIT_INDEX_OP(JSOP_STRING, index);
+        }
         if (js_Emit1(cx, cg, JSOP_ADD) < 0)
             return JS_FALSE;
 
         if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
             return JS_FALSE;
         break;
       }
 
@@ -7218,25 +7220,26 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
         } else {
             JS_ASSERT(pn->pn_arity == PN_NULLARY);
             ok = (pn->pn_op == JSOP_OBJECT)
                  ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
                  : EmitAtomOp(cx, pn, PN_OP(pn), cg);
         }
         break;
 
-      case TOK_XMLPI:
-        ale = cg->atomList.add(cg->parser, pn->pn_atom2);
-        if (!ale)
-            return JS_FALSE;
-        if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
+      case TOK_XMLPI: {
+        jsatomid index;
+        if (!cg->makeAtomIndex(pn->pn_atom2, &index))
+            return false;
+        if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
             return JS_FALSE;
         if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
             return JS_FALSE;
         break;
+      }
 #endif /* JS_HAS_XML_SUPPORT */
 
       default:
         JS_ASSERT(0);
     }
 
     /* cg->emitLevel == 1 means we're last on the stack, so finish up. */
     if (ok && cg->emitLevel == 1) {
--- a/js/src/jsemit.h
+++ b/js/src/jsemit.h
@@ -46,16 +46,20 @@
 #include "jstypes.h"
 #include "jsatom.h"
 #include "jsopcode.h"
 #include "jsparse.h"
 #include "jsscript.h"
 #include "jsprvtd.h"
 #include "jspubtd.h"
 
+#include "frontend/ParseMaps.h"
+
+#include "jsatominlines.h"
+
 JS_BEGIN_EXTERN_C
 
 /*
  * NB: If you add enumerators for scope statements, add them between STMT_WITH
  * and STMT_CATCH, or you will break the STMT_TYPE_IS_SCOPE macro. If you add
  * non-looping statement enumerators, add them before STMT_DO_LOOP or you will
  * break the STMT_TYPE_IS_LOOP macro.
  *
@@ -301,17 +305,17 @@ struct JSTreeContext {              /* t
                                        at non-zero depth in current paren tree */
     JSStmtInfo      *topStmt;       /* top of statement info stack */
     JSStmtInfo      *topScopeStmt;  /* top lexical scope statement */
     JSObjectBox     *blockChainBox; /* compile time block scope chain (NB: one
                                        deeper than the topScopeStmt/downScope
                                        chain when in head of let block/expr) */
     JSParseNode     *blockNode;     /* parse node for a block with let declarations
                                        (block with its own lexical scope)  */
-    JSAtomList      decls;          /* function, const, and var declarations */
+    js::AtomDecls   decls;          /* function, const, and var declarations */
     js::Parser      *parser;        /* ptr to common parsing and lexing data */
     JSParseNode     *yieldNode;     /* parse node for a yield expression that might
                                        be an error if we turn out to be inside a
                                        generator expression */
     JSParseNode     *argumentsNode; /* parse node for an arguments variable that
                                        might be an error if we turn out to be
                                        inside a generator expression */
 
@@ -335,17 +339,17 @@ struct JSTreeContext {              /* t
         JS_ASSERT(!inFunction());
         return scopeChain_;
     }
     void setScopeChain(JSObject *scopeChain) {
         JS_ASSERT(!inFunction());
         scopeChain_ = scopeChain;
     }
 
-    JSAtomList      lexdeps;        /* unresolved lexical name dependencies */
+    js::OwnedAtomDefnMapPtr lexdeps;/* unresolved lexical name dependencies */
     JSTreeContext   *parent;        /* enclosing function or global context */
     uintN           staticLevel;    /* static compilation unit nesting level */
 
     JSFunctionBox   *funbox;        /* null or box for function we're compiling
                                        if (flags & TCF_IN_FUNCTION) and not in
                                        Compiler::compileFunctionBody */
     JSFunctionBox   *functionList;
 
@@ -353,35 +357,50 @@ struct JSTreeContext {              /* t
 
     js::Bindings    bindings;       /* bindings in this code, including
                                        arguments if we're compiling a function */
 
     void trace(JSTracer *trc);
 
     JSTreeContext(js::Parser *prs)
       : flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
-        topStmt(NULL), topScopeStmt(NULL),
-        blockChainBox(NULL), blockNode(NULL), parser(prs),
-        yieldNode(NULL), argumentsNode(NULL),
-        scopeChain_(NULL), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
-        innermostWith(NULL), bindings(prs->context, prs->emptyCallShape),
-        sharpSlotBase(-1)
+        topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
+        decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
+        lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
+        innermostWith(NULL), bindings(prs->context, prs->emptyCallShape), sharpSlotBase(-1)
     {
         prs->tc = this;
     }
 
     /*
      * For functions the tree context is constructed and destructed a second
      * time during code generation. To avoid a redundant stats update in such
      * cases, we store uint16(-1) in maxScopeDepth.
      */
     ~JSTreeContext() {
         parser->tc = this->parent;
     }
 
+    /*
+     * JSCodeGenerator derives from JSTreeContext; however, only the top-level
+     * JSCodeGenerators are actually used as full-fledged tree contexts (to
+     * hold decls and lexdeps). We can avoid allocation overhead by making
+     * this distinction explicit.
+     */
+    enum InitBehavior {
+        USED_AS_TREE_CONTEXT,
+        USED_AS_CODE_GENERATOR
+    };
+
+    bool init(JSContext *cx, InitBehavior ib = USED_AS_TREE_CONTEXT) {
+        if (ib == USED_AS_CODE_GENERATOR)
+            return true;
+        return decls.init() && lexdeps.ensureMap(cx);
+    }
+
     uintN blockid() { return topStmt ? topStmt->blockid : bodyid; }
 
     JSObject *blockChain() {
         return blockChainBox ? blockChainBox->object : NULL;
     }
 
     /*
      * True if we are at the topmost level of a entire script or function body.
@@ -588,17 +607,18 @@ struct JSCodeGenerator : public JSTreeCo
         jsbytecode  *next;          /* pointer to next free bytecode */
         jssrcnote   *notes;         /* source notes, see below */
         uintN       noteCount;      /* number of source notes so far */
         uintN       noteMask;       /* growth increment for notes */
         ptrdiff_t   lastNoteOffset; /* code offset for last source note */
         uintN       currentLine;    /* line number for tree-based srcnote gen */
     } prolog, main, *current;
 
-    JSAtomList      atomList;       /* literals indexed for mapping */
+    js::OwnedAtomIndexMapPtr atomIndices; /* literals indexed for mapping */
+    js::AtomDefnMapPtr roLexdeps;
     uintN           firstLine;      /* first line, for js_NewScriptFromCG */
 
     intN            stackDepth;     /* current stack depth in script frame */
     uintN           maxStackDepth;  /* maximum stack depth so far */
 
     uintN           ntrynotes;      /* number of allocated so far try notes */
     JSTryNode       *lastTryNode;   /* the last allocated try node */
 
@@ -618,40 +638,44 @@ struct JSCodeGenerator : public JSTreeCo
     ConstMap        constMap;       /* compile time constants */
 
     JSGCConstList   constList;      /* constants to be included with the script */
 
     JSCGObjectList  objectList;     /* list of emitted objects */
     JSCGObjectList  regexpList;     /* list of emitted regexp that will be
                                        cloned during execution */
 
-    JSAtomList      upvarList;      /* map of atoms to upvar indexes */
+    js::OwnedAtomIndexMapPtr upvarIndices; /* map of atoms to upvar indexes */
     JSUpvarArray    upvarMap;       /* indexed upvar pairs (JS_realloc'ed) */
 
     typedef js::Vector<js::GlobalSlotArray::Entry, 16> GlobalUseVector;
 
     GlobalUseVector globalUses;     /* per-script global uses */
-    JSAtomList      globalMap;      /* per-script map of global name to globalUses vector */
+    js::OwnedAtomIndexMapPtr globalMap; /* per-script map of global name to globalUses vector */
 
     /* Vectors of pn_cookie slot values. */
     typedef js::Vector<uint32, 8> SlotVector;
     SlotVector      closedArgs;
     SlotVector      closedVars;
 
     uint16          traceIndex;     /* index for the next JSOP_TRACE instruction */
 
     /*
      * Initialize cg to allocate bytecode space from codePool, source note
      * space from notePool, and all other arena-allocated temporaries from
      * parser->context->tempPool.
      */
     JSCodeGenerator(js::Parser *parser,
                     JSArenaPool *codePool, JSArenaPool *notePool,
                     uintN lineno);
-    bool init();
+    bool init(JSContext *cx, JSTreeContext::InitBehavior ib = USED_AS_CODE_GENERATOR);
+
+    JSContext *context() {
+        return parser->context;
+    }
 
     /*
      * Release cg->codePool, cg->notePool, and parser->context->tempPool to
      * marks set by JSCodeGenerator's ctor. Note that cgs are magic: they own
      * the arena pool "tops-of-stack" space above their codeMark, noteMark, and
      * tempMark points.  This means you cannot alloc from tempPool and save the
      * pointer beyond the next JSCodeGenerator destructor call.
      */
@@ -668,31 +692,51 @@ struct JSCodeGenerator : public JSTreeCo
      * we use the slot to index into cg->globalScope->defs, and perform a
      * fixup of the script at the very end of compilation.
      *
      * If the global use can be cached, |cookie| will be set to |slot|.
      * Otherwise, |cookie| is set to the free cookie value.
      */
     bool addGlobalUse(JSAtom *atom, uint32 slot, js::UpvarCookie *cookie);
 
+    bool hasUpvarIndices() const {
+        return upvarIndices.hasMap() && !upvarIndices->empty();
+    }
+
     bool hasSharps() const {
         bool rv = !!(flags & TCF_HAS_SHARPS);
         JS_ASSERT((sharpSlotBase >= 0) == rv);
         return rv;
     }
 
     uintN sharpSlots() const {
         return hasSharps() ? SHARP_NSLOTS : 0;
     }
 
     bool compilingForEval() const { return !!(flags & TCF_COMPILE_FOR_EVAL); }
     JSVersion version() const { return parser->versionWithFlags(); }
 
     bool shouldNoteClosedName(JSParseNode *pn);
 
+    JS_ALWAYS_INLINE
+    bool makeAtomIndex(JSAtom *atom, jsatomid *indexp) {
+        js::AtomIndexAddPtr p = atomIndices->lookupForAdd(atom);
+        if (p) {
+            *indexp = p.value();
+            return true;
+        }
+
+        jsatomid index = atomIndices->count();
+        if (!atomIndices->add(p, atom, index))
+            return false;
+
+        *indexp = index;
+        return true;
+    }
+
     bool checkSingletonContext() {
         if (!compileAndGo() || inFunction())
             return false;
         for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
             if (STMT_IS_LOOP(stmt))
                 return false;
         }
         flags |= TCF_HAS_SINGLETONS;
--- a/js/src/jshashtable.h
+++ b/js/src/jshashtable.h
@@ -50,63 +50,75 @@ namespace js {
 
 /* Integral types for all hash functions. */
 typedef uint32 HashNumber;
 
 /*****************************************************************************/
 
 namespace detail {
 
+template <class T, class HashPolicy, class AllocPolicy>
+class HashTable;
+
+template <class T>
+class HashTableEntry {
+    HashNumber keyHash;
+
+    typedef typename tl::StripConst<T>::result NonConstT;
+
+    static const HashNumber sFreeKey = 0;
+    static const HashNumber sRemovedKey = 1;
+    static const HashNumber sCollisionBit = 1;
+
+    template <class, class, class> friend class HashTable;
+
+    static bool isLiveHash(HashNumber hash)
+    {
+        return hash > sRemovedKey;
+    }
+
+  public:
+    HashTableEntry() : keyHash(0), t() {}
+    void operator=(const HashTableEntry &rhs) { keyHash = rhs.keyHash; t = rhs.t; }
+
+    NonConstT t;
+
+    bool isFree() const           { return keyHash == sFreeKey; }
+    void setFree()                { keyHash = sFreeKey; t = T(); }
+    bool isRemoved() const        { return keyHash == sRemovedKey; }
+    void setRemoved()             { keyHash = sRemovedKey; t = T(); }
+    bool isLive() const           { return isLiveHash(keyHash); }
+    void setLive(HashNumber hn)   { JS_ASSERT(isLiveHash(hn)); keyHash = hn; }
+
+    void setCollision()           { JS_ASSERT(isLive()); keyHash |= sCollisionBit; }
+    void setCollision(HashNumber collisionBit) {
+        JS_ASSERT(isLive()); keyHash |= collisionBit;
+    }
+    void unsetCollision()         { JS_ASSERT(isLive()); keyHash &= ~sCollisionBit; }
+    bool hasCollision() const     { JS_ASSERT(isLive()); return keyHash & sCollisionBit; }
+    bool matchHash(HashNumber hn) { return (keyHash & ~sCollisionBit) == hn; }
+    HashNumber getKeyHash() const { JS_ASSERT(!hasCollision()); return keyHash; }
+};
+
 /*
  * js::detail::HashTable is an implementation detail of the js::HashMap and
  * js::HashSet templates. For js::Hash{Map,Set} API documentation and examples,
  * skip to the end of the detail namespace.
  */
 
 /* Reusable implementation of HashMap and HashSet. */
 template <class T, class HashPolicy, class AllocPolicy>
 class HashTable : private AllocPolicy
 {
     typedef typename tl::StripConst<T>::result NonConstT;
     typedef typename HashPolicy::KeyType Key;
     typedef typename HashPolicy::Lookup Lookup;
 
-    /*
-     * T::operator= is a private operation for HashMap::Entry. HashMap::Entry
-     * makes HashTable a friend, but MSVC does not allow HashMap::Entry to make
-     * HashTable::Entry a friend. So do assignment here:
-     */
-    static void assignT(NonConstT &dst, const T &src) { dst = src; }
-
   public:
-    class Entry {
-        HashNumber keyHash;
-
-      public:
-        Entry() : keyHash(0), t() {}
-        void operator=(const Entry &rhs) { keyHash = rhs.keyHash; assignT(t, rhs.t); }
-
-        NonConstT t;
-
-        bool isFree() const           { return keyHash == sFreeKey; }
-        void setFree()                { keyHash = sFreeKey; assignT(t, T()); }
-        bool isRemoved() const        { return keyHash == sRemovedKey; }
-        void setRemoved()             { keyHash = sRemovedKey; assignT(t, T()); }
-        bool isLive() const           { return isLiveHash(keyHash); }
-        void setLive(HashNumber hn)   { JS_ASSERT(isLiveHash(hn)); keyHash = hn; }
-
-        void setCollision()           { JS_ASSERT(isLive()); keyHash |= sCollisionBit; }
-        void setCollision(HashNumber collisionBit) {
-            JS_ASSERT(isLive()); keyHash |= collisionBit;
-        }
-        void unsetCollision()         { JS_ASSERT(isLive()); keyHash &= ~sCollisionBit; }
-        bool hasCollision() const     { JS_ASSERT(isLive()); return keyHash & sCollisionBit; }
-        bool matchHash(HashNumber hn) { return (keyHash & ~sCollisionBit) == hn; }
-        HashNumber getKeyHash() const { JS_ASSERT(!hasCollision()); return keyHash; }
-    };
+    typedef HashTableEntry<T> Entry;
 
     /*
      * A nullable pointer to a hash table element. A Ptr |p| can be tested
      * either explicitly |if (p.found()) p->...| or using boolean conversion
      * |if (p) p->...|. Ptr objects must not be used after any mutating hash
      * table operations unless |generation()| is tested.
      */
     class Ptr
@@ -169,16 +181,18 @@ class HashTable : private AllocPolicy
         Range(Entry *c, Entry *e) : cur(c), end(e) {
             while (cur != end && !cur->isLive())
                 ++cur;
         }
 
         Entry *cur, *end;
 
       public:
+        Range() : cur(NULL), end(NULL) {}
+
         bool empty() const {
             return cur == end;
         }
 
         T &front() const {
             JS_ASSERT(!empty());
             return cur->t;
         }
@@ -282,23 +296,23 @@ class HashTable : private AllocPolicy
     static const unsigned sMinSizeLog2  = 4;
     static const unsigned sMinSize      = 1 << sMinSizeLog2;
     static const unsigned sSizeLimit    = JS_BIT(24);
     static const unsigned sHashBits     = tl::BitSize<HashNumber>::result;
     static const uint8    sMinAlphaFrac = 64;  /* (0x100 * .25) taken from jsdhash.h */
     static const uint8    sMaxAlphaFrac = 192; /* (0x100 * .75) taken from jsdhash.h */
     static const uint8    sInvMaxAlpha  = 171; /* (ceil(0x100 / .75) >> 1) */
     static const HashNumber sGoldenRatio  = 0x9E3779B9U;       /* taken from jsdhash.h */
-    static const HashNumber sCollisionBit = 1;
-    static const HashNumber sFreeKey = 0;
-    static const HashNumber sRemovedKey = 1;
+    static const HashNumber sFreeKey = Entry::sFreeKey;
+    static const HashNumber sRemovedKey = Entry::sRemovedKey;
+    static const HashNumber sCollisionBit = Entry::sCollisionBit;
 
     static bool isLiveHash(HashNumber hash)
     {
-        return hash > sRemovedKey;
+        return Entry::isLiveHash(hash);
     }
 
     static HashNumber prepareHash(const Lookup& l)
     {
         HashNumber keyHash = HashPolicy::hash(l);
 
         /* Improve keyHash distribution. */
         keyHash *= sGoldenRatio;
@@ -568,18 +582,22 @@ class HashTable : private AllocPolicy
             METER(stats.shrinks++);
             (void) changeTableSize(-1);
         }
     }
 
   public:
     void clear()
     {
-        for (Entry *e = table, *end = table + tableCapacity; e != end; ++e)
-            *e = Entry();
+        if (tl::IsPodType<Entry>::result) {
+            memset(table, 0, sizeof(*table) * tableCapacity);
+        } else {
+            for (Entry *e = table, *end = table + tableCapacity; e != end; ++e)
+                *e = Entry();
+        }
         removedCount = 0;
         entryCount = 0;
 #ifdef DEBUG
         mutationCount++;
 #endif
     }
 
     void finish()
@@ -793,16 +811,49 @@ struct PointerHasher
  * Specialized hashing policy for pointer types. It assumes that the type is
  * at least word-aligned. For types with smaller size use PointerHasher.
  */
 template <class T>
 struct DefaultHasher<T *>: PointerHasher<T *, tl::FloorLog2<sizeof(void *)>::result> { };
 
 /* Looking for a hasher for jsid?  Try the DefaultHasher<jsid> in jsatom.h. */
 
+template <class Key, class Value>
+class HashMapEntry
+{
+    template <class, class, class> friend class detail::HashTable;
+    template <class> friend class detail::HashTableEntry;
+    void operator=(const HashMapEntry &rhs) {
+        const_cast<Key &>(key) = rhs.key;
+        value = rhs.value;
+    }
+
+  public:
+    HashMapEntry() : key(), value() {}
+    HashMapEntry(const Key &k, const Value &v) : key(k), value(v) {}
+
+    const Key key;
+    Value value;
+};
+
+namespace tl {
+
+template <class T>
+struct IsPodType<detail::HashTableEntry<T> > {
+    static const bool result = IsPodType<T>::result;
+};
+
+template <class K, class V>
+struct IsPodType<HashMapEntry<K, V> >
+{
+    static const bool result = IsPodType<K>::result && IsPodType<V>::result;
+};
+
+} /* namespace tl */
+
 /*
  * JS-friendly, STL-like container providing a hash-based map from keys to
  * values. In particular, HashMap calls constructors and destructors of all
  * objects added so non-PODs may be used safely.
  *
  * Key/Value requirements:
  *  - default constructible, copyable, destructible, assignable
  * HashPolicy requirements:
@@ -815,31 +866,17 @@ struct DefaultHasher<T *>: PointerHasher
  * N.B: Due to the lack of exception handling, the user must call |init()|.
  */
 template <class Key, class Value, class HashPolicy, class AllocPolicy>
 class HashMap
 {
   public:
     typedef typename HashPolicy::Lookup Lookup;
 
-    class Entry
-    {
-        template <class, class, class> friend class detail::HashTable;
-        void operator=(const Entry &rhs) {
-            const_cast<Key &>(key) = rhs.key;
-            value = rhs.value;
-        }
-
-      public:
-        Entry() : key(), value() {}
-        Entry(const Key &k, const Value &v) : key(k), value(v) {}
-
-        const Key key;
-        Value value;
-    };
+    typedef HashMapEntry<Key, Value> Entry;
 
   private:
     /* Implement HashMap using HashTable. Lift |Key| operations to |Entry|. */
     struct MapHashPolicy : HashPolicy
     {
         typedef Key KeyType;
         static const Key &getKey(Entry &e) { return e.key; }
     };
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -1411,17 +1411,17 @@ js_CloneBlockObject(JSContext *cx, JSObj
 extern JS_REQUIRES_STACK JSBool
 js_PutBlockObject(JSContext *cx, JSBool normalUnwind);
 
 JSBool
 js_XDRBlockObject(JSXDRState *xdr, JSObject **objp);
 
 struct JSSharpObjectMap {
     jsrefcount  depth;
-    jsatomid    sharpgen;
+    uint32      sharpgen;
     JSHashTable *table;
 };
 
 #define SHARP_BIT       ((jsatomid) 1)
 #define BUSY_BIT        ((jsatomid) 2)
 #define SHARP_ID_SHIFT  2
 #define IS_SHARP(he)    (uintptr_t((he)->value) & SHARP_BIT)
 #define MAKE_SHARP(he)  ((he)->value = (void *) (uintptr_t((he)->value)|SHARP_BIT))
--- a/js/src/jsparse.cpp
+++ b/js/src/jsparse.cpp
@@ -90,16 +90,18 @@
 #include "jsdhash.h"
 #endif
 
 #include "jsatominlines.h"
 #include "jsobjinlines.h"
 #include "jsregexpinlines.h"
 #include "jsscriptinlines.h"
 
+#include "frontend/ParseMaps-inl.h"
+
 // Grr, windows.h or something under it #defines CONST...
 #ifdef CONST
 #undef CONST
 #endif
 
 using namespace js;
 using namespace js::gc;
 
@@ -176,39 +178,41 @@ JSParseNode::clear()
     pn_used = pn_defn = false;
     pn_arity = PN_NULLARY;
     pn_parens = false;
 }
 
 Parser::Parser(JSContext *cx, JSPrincipals *prin, StackFrame *cfp, bool foldConstants)
   : js::AutoGCRooter(cx, PARSER),
     context(cx),
-    aleFreeList(NULL),
     tokenStream(cx),
     principals(NULL),
     callerFrame(cfp),
     callerVarObj(cfp ? &cfp->varObj() : NULL),
     nodeList(NULL),
     functionCount(0),
     traceListHead(NULL),
     tc(NULL),
     emptyCallShape(NULL),
     keepAtoms(cx->runtime),
     foldConstants(foldConstants)
 {
+    cx->activeCompilations++;
     js::PodArrayZero(tempFreeList);
     setPrincipals(prin);
     JS_ASSERT_IF(cfp, cfp->isScriptFrame());
 }
 
 bool
 Parser::init(const jschar *base, size_t length, const char *filename, uintN lineno,
              JSVersion version)
 {
     JSContext *cx = context;
+    if (!cx->ensureParseMapPool())
+        return false;
     emptyCallShape = EmptyShape::getEmptyCallShape(cx);
     if (!emptyCallShape)
         return false;
     tempPoolMark = JS_ARENA_MARK(&cx->tempPool);
     if (!tokenStream.init(base, length, filename, lineno, version)) {
         JS_ARENA_RELEASE(&cx->tempPool, tempPoolMark);
         return false;
     }
@@ -217,16 +221,17 @@ Parser::init(const jschar *base, size_t 
 
 Parser::~Parser()
 {
     JSContext *cx = context;
 
     if (principals)
         JSPRINCIPALS_DROP(cx, principals);
     JS_ARENA_RELEASE(&cx->tempPool, tempPoolMark);
+    cx->activeCompilations--;
 }
 
 void
 Parser::setPrincipals(JSPrincipals *prin)
 {
     JS_ASSERT(!principals);
     if (prin)
         JSPRINCIPALS_HOLD(context, prin);
@@ -362,24 +367,27 @@ Parser::trace(JSTracer *trc)
 /* Add |node| to |parser|'s free node list. */
 static inline void
 AddNodeToFreeList(JSParseNode *pn, js::Parser *parser)
 {
     /* Catch back-to-back dup recycles. */
     JS_ASSERT(pn != parser->nodeList);
 
     /* 
-     * It's too hard to clear these nodes from the JSAtomLists, etc. that
-     * hold references to them, so we never free them. It's our caller's
-     * job to recognize and process these, since their children do need to
-     * be dealt with.
+     * It's too hard to clear these nodes from the AtomDefnMaps, etc. that
+     * hold references to them, so we never free them. It's our caller's job to
+     * recognize and process these, since their children do need to be dealt
+     * with.
      */
     JS_ASSERT(!pn->pn_used);
     JS_ASSERT(!pn->pn_defn);
 
+    if (pn->pn_arity == PN_NAMESET && pn->pn_names.hasMap())
+        pn->pn_names.releaseMap(parser->context);
+
 #ifdef DEBUG
     /* Poison the node, to catch attempts to use it without initializing it. */
     memset(pn, 0xab, sizeof(*pn));
 #endif
 
     pn->pn_next = parser->nodeList;
     parser->nodeList = pn;
 }
@@ -537,17 +545,17 @@ PushNodeChildren(JSParseNode *pn, NodeSt
          */
         pn->pn_funbox = NULL;
         stack->pushUnlessNull(pn->pn_body);
         pn->pn_body = NULL;
         return false;
 
       case PN_NAME:
         /*
-         * Because used/defn nodes appear in JSAtomLists and elsewhere, we
+         * Because used/defn nodes appear in AtomDefnMaps and elsewhere, we
          * don't recycle them. (We'll recover their storage when we free
          * the temporary arena.) However, we do recycle the nodes around
          * them, so clean up the pointers to avoid dangling references. The
          * top-level decls table carries references to them that later
          * iterations through the compileScript loop may find, so they need
          * to be neat.
          *
          * pn_expr and pn_lexdef share storage; the latter isn't an owning
@@ -834,16 +842,18 @@ Parser::parse(JSObject *chain)
      * Protect atoms from being collected by a GC activation, which might
      * - nest on this thread due to out of memory (the so-called "last ditch"
      *   GC attempted within js_NewGCThing), or
      * - run for any reason on another thread if this thread is suspended on
      *   an object lock before it finishes generating bytecode into a script
      *   protected from the GC by a root or a stack frame reference.
      */
     JSTreeContext globaltc(this);
+    if (!globaltc.init(context))
+        return NULL;
     globaltc.setScopeChain(chain);
     if (!GenerateBlockId(&globaltc, globaltc.bodyid))
         return NULL;
 
     JSParseNode *pn = statements();
     if (pn) {
         if (!tokenStream.matchToken(TOK_EOF)) {
             reportErrorNumber(NULL, JSREPORT_ERROR, JSMSG_SYNTAX_ERROR);
@@ -873,19 +883,18 @@ SetStaticLevel(JSTreeContext *tc, uintN 
     tc->staticLevel = staticLevel;
     return true;
 }
 
 /*
  * Compile a top-level script.
  */
 Compiler::Compiler(JSContext *cx, JSPrincipals *prin, StackFrame *cfp)
-  : parser(cx, prin, cfp)
-{
-}
+  : parser(cx, prin, cfp), globalScope(NULL)
+{}
 
 JSScript *
 Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame,
                         JSPrincipals *principals, uint32 tcflags,
                         const jschar *chars, size_t length,
                         const char *filename, uintN lineno, JSVersion version,
                         JSString *source /* = NULL */,
                         uintN staticLevel /* = 0 */)
@@ -912,35 +921,34 @@ Compiler::compileScript(JSContext *cx, J
 
     JS_InitArenaPool(&codePool, "code", 1024, sizeof(jsbytecode));
     JS_InitArenaPool(&notePool, "note", 1024, sizeof(jssrcnote));
 
     Parser &parser = compiler.parser;
     TokenStream &tokenStream = parser.tokenStream;
 
     JSCodeGenerator cg(&parser, &codePool, &notePool, tokenStream.getLineno());
-    if (!cg.init())
+    if (!cg.init(cx, JSTreeContext::USED_AS_TREE_CONTEXT))
         return NULL;
 
     MUST_FLOW_THROUGH("out");
 
     // We can specialize a bit for the given scope chain if that scope chain is the global object.
     JSObject *globalObj = scopeChain && scopeChain == scopeChain->getGlobal()
                         ? scopeChain->getGlobal()
                         : NULL;
-    js::GlobalScope globalScope(cx, globalObj, &cg);
-    if (globalObj) {
-        JS_ASSERT(globalObj->isNative());
-        JS_ASSERT((globalObj->getClass()->flags & JSCLASS_GLOBAL_FLAGS) == JSCLASS_GLOBAL_FLAGS);
-    }
+
+    JS_ASSERT_IF(globalObj, globalObj->isNative());
+    JS_ASSERT_IF(globalObj, (globalObj->getClass()->flags & JSCLASS_GLOBAL_FLAGS) ==
+                            JSCLASS_GLOBAL_FLAGS);
 
     /* Null script early in case of error, to reduce our code footprint. */
     script = NULL;
 
-    globalScope.cg = &cg;
+    GlobalScope globalScope(cx, globalObj, &cg);
     cg.flags |= tcflags;
     cg.setScopeChain(scopeChain);
     compiler.globalScope = &globalScope;
     if (!SetStaticLevel(&cg, staticLevel))
         goto out;
 
     /* If this is a direct call to eval, inherit the caller's strictness.  */
     if (callerFrame &&
@@ -959,17 +967,18 @@ Compiler::compileScript(JSContext *cx, J
 
     if (tcflags & TCF_COMPILE_N_GO) {
         if (source) {
             /*
              * Save eval program source in script->atomMap.vector[0] for the
              * eval cache (see EvalCacheLookup in jsobj.cpp).
              */
             JSAtom *atom = js_AtomizeString(cx, source);
-            if (!atom || !cg.atomList.add(&parser, atom))
+            jsatomid _;
+            if (!atom || !cg.makeAtomIndex(atom, &_))
                 goto out;
         }
 
         if (callerFrame && callerFrame->isFunctionFrame()) {
             /*
              * An eval script in a caller frame needs to have its enclosing
              * function captured in case it refers to an upvar, and someone
              * wishes to decompile it while it's running.
@@ -1410,17 +1419,17 @@ CheckStrictBinding(JSContext *cx, JSTree
     }
 
     return true;
 }
 
 static bool
 ReportBadParameter(JSContext *cx, JSTreeContext *tc, JSAtom *name, uintN errorNumber)
 {
-    JSDefinition *dn = ALE_DEFN(tc->decls.lookup(name));
+    JSDefinition *dn = tc->decls.lookupFirst(name);
     JSAutoByteString bytes;
     return js_AtomToPrintableString(cx, name, &bytes) &&
            ReportStrictModeError(cx, TS(tc->parser), tc, dn, errorNumber, bytes.ptr());
 }
 
 /*
  * In strict mode code, all parameter names must be distinct, must not be
  * strict mode reserved keywords, and must not be 'eval' or 'arguments'.  We
@@ -1540,79 +1549,81 @@ Parser::functionBody()
             pn = NULL;
         }
     }
 
     tc->flags = oldflags | (tc->flags & TCF_FUN_FLAGS);
     return pn;
 }
 
-static JSAtomListElement *
-MakePlaceholder(JSParseNode *pn, JSTreeContext *tc)
-{
-    JSAtomListElement *ale = tc->lexdeps.add(tc->parser, pn->pn_atom);
-    if (!ale)
-        return NULL;
-
-    JSDefinition *dn = (JSDefinition *)NameNode::create(pn->pn_atom, tc);
+/*
+ * Creates a placeholder JSDefinition node for |atom| and adds it to the
+ * current lexdeps.
+ */
+static JSDefinition *
+MakePlaceholder(AtomDefnAddPtr &p, JSParseNode *pn, JSTreeContext *tc)
+{
+    JSAtom *atom = pn->pn_atom;
+    JSDefinition *dn = (JSDefinition *) NameNode::create(atom, tc);
     if (!dn)
         return NULL;
 
-    ALE_SET_DEFN(ale, dn);
+    if (!tc->lexdeps->add(p, atom, dn))
+        return NULL;
+
     dn->pn_type = TOK_NAME;
     dn->pn_op = JSOP_NOP;
     dn->pn_defn = true;
     dn->pn_dflags |= PND_PLACEHOLDER;
-    return ale;
+    return dn;
 }
 
 static bool
 Define(JSParseNode *pn, JSAtom *atom, JSTreeContext *tc, bool let = false)
 {
     JS_ASSERT(!pn->pn_used);
     JS_ASSERT_IF(pn->pn_defn, pn->isPlaceholder());
 
-    JSHashEntry **hep;
-    JSAtomListElement *ale = NULL;
-    JSAtomList *list = NULL;
+    bool foundLexdep = false;
+    JSDefinition *dn = NULL;
 
     if (let)
-        ale = (list = &tc->decls)->rawLookup(atom, hep);
-    if (!ale)
-        ale = (list = &tc->lexdeps)->rawLookup(atom, hep);
-
-    if (ale) {
-        JSDefinition *dn = ALE_DEFN(ale);
-        if (dn != pn) {
-            JSParseNode **pnup = &dn->dn_uses;
-            JSParseNode *pnu;
-            uintN start = let ? pn->pn_blockid : tc->bodyid;
-
-            while ((pnu = *pnup) != NULL && pnu->pn_blockid >= start) {
-                JS_ASSERT(pnu->pn_used);
-                pnu->pn_lexdef = (JSDefinition *) pn;
-                pn->pn_dflags |= pnu->pn_dflags & PND_USE2DEF_FLAGS;
-                pnup = &pnu->pn_link;
-            }
-
-            if (pnu != dn->dn_uses) {
-                *pnup = pn->dn_uses;
-                pn->dn_uses = dn->dn_uses;
-                dn->dn_uses = pnu;
-
-                if ((!pnu || pnu->pn_blockid < tc->bodyid) && list != &tc->decls)
-                    list->rawRemove(tc->parser, ale, hep);
-            }
-        }
-    }
-
-    ale = tc->decls.add(tc->parser, atom, let ? JSAtomList::SHADOW : JSAtomList::UNIQUE);
-    if (!ale)
+        dn = tc->decls.lookupFirst(atom);
+
+    if (!dn) {
+        dn = tc->lexdeps.lookupDefn(atom);
+        foundLexdep = !!dn;
+    }
+
+    if (dn && dn != pn) {
+        JSParseNode **pnup = &dn->dn_uses;
+        JSParseNode *pnu;
+        uintN start = let ? pn->pn_blockid : tc->bodyid;
+
+        while ((pnu = *pnup) != NULL && pnu->pn_blockid >= start) {
+            JS_ASSERT(pnu->pn_used);
+            pnu->pn_lexdef = (JSDefinition *) pn;
+            pn->pn_dflags |= pnu->pn_dflags & PND_USE2DEF_FLAGS;
+            pnup = &pnu->pn_link;
+        }
+
+        if (pnu != dn->dn_uses) {
+            *pnup = pn->dn_uses;
+            pn->dn_uses = dn->dn_uses;
+            dn->dn_uses = pnu;
+
+            if ((!pnu || pnu->pn_blockid < tc->bodyid) && foundLexdep)
+                tc->lexdeps->remove(atom);
+        }
+    }
+
+    JSDefinition *toAdd = (JSDefinition *) pn;
+    bool ok = let ? tc->decls.addShadow(atom, toAdd) : tc->decls.addUnique(atom, toAdd);
+    if (!ok)
         return false;
-    ALE_SET_DEFN(ale, pn);
     pn->pn_defn = true;
     pn->pn_dflags &= ~PND_PLACEHOLDER;
     if (!tc->parent)
         pn->pn_dflags |= PND_TOPLEVEL;
     return true;
 }
 
 static void
@@ -1782,17 +1793,17 @@ Compiler::compileFunctionBody(JSContext 
     JSArenaPool codePool, notePool;
     JS_InitArenaPool(&codePool, "code", 1024, sizeof(jsbytecode));
     JS_InitArenaPool(&notePool, "note", 1024, sizeof(jssrcnote));
 
     Parser &parser = compiler.parser;
     TokenStream &tokenStream = parser.tokenStream;
 
     JSCodeGenerator funcg(&parser, &codePool, &notePool, tokenStream.getLineno());
-    if (!funcg.init())
+    if (!funcg.init(cx, JSTreeContext::USED_AS_TREE_CONTEXT))
         return false;
 
     funcg.flags |= TCF_IN_FUNCTION;
     funcg.setFunction(fun);
     funcg.bindings.transfer(cx, bindings);
     fun->setArgCount(funcg.bindings.countArgs());
     if (!GenerateBlockId(&funcg, funcg.bodyid))
         return false;
@@ -1918,17 +1929,17 @@ BindDestructuringArg(JSContext *cx, Bind
 
     JS_ASSERT(tc->inFunction());
 
     /*
      * NB: Check tc->decls rather than tc->bindings, because destructuring
      *     bindings aren't added to tc->bindings until after all arguments have
      *     been parsed.
      */
-    if (tc->decls.lookup(atom)) {
+    if (tc->decls.lookupFirst(atom)) {
         ReportCompileErrorNumber(cx, TS(tc->parser), NULL, JSREPORT_ERROR,
                                  JSMSG_DESTRUCT_DUP_ARG);
         return JS_FALSE;
     }
 
     JSParseNode *pn = data->pn;
 
     /*
@@ -2077,24 +2088,22 @@ FindFunArgs(JSFunctionBox *funbox, int l
          * Compute in skipmin the least distance from fun's static level up to
          * an upvar, whether used directly by fun, or indirectly by a function
          * nested in fun.
          */
         uintN skipmin = UpvarCookie::FREE_LEVEL;
         JSParseNode *pn = fn->pn_body;
 
         if (pn->pn_type == TOK_UPVARS) {
-            JSAtomList upvars(pn->pn_names);
-            JS_ASSERT(upvars.count != 0);
-
-            JSAtomListIterator iter(&upvars);
-            JSAtomListElement *ale;
-
-            while ((ale = iter()) != NULL) {
-                JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+            AtomDefnMapPtr &upvars = pn->pn_names;
+            JS_ASSERT(upvars->count() != 0);
+
+            for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
+                JSDefinition *defn = r.front().value();
+                JSDefinition *lexdep = defn->resolve();
 
                 if (!lexdep->isFreeVar()) {
                     uintN upvarLevel = lexdep->frameLevel();
 
                     if (int(upvarLevel) <= fnlevel)
                         fn->setFunArg();
 
                     uintN skip = (funbox->level + 1) - upvarLevel;
@@ -2158,24 +2167,22 @@ Parser::markFunArgs(JSFunctionBox *funbo
 
     FindFunArgs(funbox, -1, &queue);
     while ((funbox = queue.pull()) != NULL) {
         JSParseNode *fn = funbox->node;
         JS_ASSERT(fn->isFunArg());
 
         JSParseNode *pn = fn->pn_body;
         if (pn->pn_type == TOK_UPVARS) {
-            JSAtomList upvars(pn->pn_names);
-            JS_ASSERT(upvars.count != 0);
-
-            JSAtomListIterator iter(&upvars);
-            JSAtomListElement *ale;
-
-            while ((ale = iter()) != NULL) {
-                JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+            AtomDefnMapPtr upvars = pn->pn_names;
+            JS_ASSERT(!upvars->empty());
+
+            for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
+                JSDefinition *defn = r.front().value();
+                JSDefinition *lexdep = defn->resolve();
 
                 if (!lexdep->isFreeVar() &&
                     !lexdep->isFunArg() &&
                     (lexdep->kind() == JSDefinition::FUNCTION ||
                      PN_OP(lexdep) == JSOP_CALLEE)) {
                     /*
                      * Mark this formerly-Algol-like function as an escaping
                      * function (i.e., as a funarg), because it is used from
@@ -2484,54 +2491,54 @@ Parser::setFunctionKinds(JSFunctionBox *
              * closures, and this can be detected via mutation. Open question:
              * do popular implementations create unique function objects for
              * null closures?
              *
              * FIXME: bug 476950.
              */
             FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
         } else {
-            JSAtomList upvars(pn->pn_names);
-            JS_ASSERT(upvars.count != 0);
-
-            JSAtomListIterator iter(&upvars);
-            JSAtomListElement *ale;
+            AtomDefnMapPtr upvars = pn->pn_names;
+            JS_ASSERT(!upvars->empty());
 
             if (!fn->isFunArg()) {
                 /*
                  * This function is Algol-like, it never escapes.
                  *
                  * Check that at least one outer lexical binding was assigned
                  * to (global variables don't count). This is conservative: we
                  * could limit assignments to those in the current function,
                  * but that's too much work. As with flat closures (handled
                  * below), we optimize for the case where outer bindings are
                  * not reassigned anywhere.
                  */
-                while ((ale = iter()) != NULL) {
-                    JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+                AtomDefnRange r = upvars->all();
+                for (; !r.empty(); r.popFront()) {
+                    JSDefinition *defn = r.front().value();
+                    JSDefinition *lexdep = defn->resolve();
 
                     if (!lexdep->isFreeVar()) {
                         JS_ASSERT(lexdep->frameLevel() <= funbox->level);
                         break;
                     }
                 }
 
-                if (!ale)
+                if (r.empty())
                     FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
             } else {
                 uintN nupvars = 0, nflattened = 0;
 
                 /*
                  * For each lexical dependency from this closure to an outer
                  * binding, analyze whether it is safe to copy the binding's
                  * value into a flat closure slot when the closure is formed.
                  */
-                while ((ale = iter()) != NULL) {
-                    JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+                for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
+                    JSDefinition *defn = r.front().value();
+                    JSDefinition *lexdep = defn->resolve();
 
                     if (!lexdep->isFreeVar()) {
                         ++nupvars;
                         if (CanFlattenUpvar(lexdep, funbox, *tcflags)) {
                             ++nflattened;
                             continue;
                         }
 
@@ -2583,24 +2590,22 @@ Parser::setFunctionKinds(JSFunctionBox *
              * One or more upvars cannot be safely snapshot into a flat
              * closure's non-reserved slot (see JSOP_GETFCSLOT), so we loop
              * again over all upvars, and for each non-free upvar, ensure that
              * its containing function has been flagged as heavyweight.
              *
              * The emitter must see TCF_FUN_HEAVYWEIGHT accurately before
              * generating any code for a tree of nested functions.
              */
-            JSAtomList upvars(pn->pn_names);
-            JS_ASSERT(upvars.count != 0);
-
-            JSAtomListIterator iter(&upvars);
-            JSAtomListElement *ale;
-
-            while ((ale = iter()) != NULL) {
-                JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+            AtomDefnMapPtr upvars = pn->pn_names;
+            JS_ASSERT(!upvars->empty());
+
+            for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
+                JSDefinition *defn = r.front().value();
+                JSDefinition *lexdep = defn->resolve();
                 if (!lexdep->isFreeVar())
                     FlagHeavyweights(lexdep, funbox, tcflags);
             }
         }
 
         if (funbox->joinable())
             fun->setJoinable();
 
@@ -2701,24 +2706,22 @@ LeaveFunction(JSParseNode *fn, JSTreeCon
 
     /*
      * Propagate unresolved lexical names up to tc->lexdeps, and save a copy
      * of funtc->lexdeps in a TOK_UPVARS node wrapping the function's formal
      * params and body. We do this only if there are lexical dependencies not
      * satisfied by the function's declarations, to avoid penalizing functions
      * that use only their arguments and other local bindings.
      */
-    if (funtc->lexdeps.count != 0) {
-        JSAtomListIterator iter(&funtc->lexdeps);
-        JSAtomListElement *ale;
+    if (funtc->lexdeps->count()) {
         int foundCallee = 0;
 
-        while ((ale = iter()) != NULL) {
-            JSAtom *atom = ALE_ATOM(ale);
-            JSDefinition *dn = ALE_DEFN(ale);
+        for (AtomDefnRange r = funtc->lexdeps->all(); !r.empty(); r.popFront()) {
+            JSAtom *atom = r.front().key();
+            JSDefinition *dn = r.front().value();
             JS_ASSERT(dn->isPlaceholder());
 
             if (atom == funAtom && kind == Expression) {
                 dn->pn_op = JSOP_CALLEE;
                 dn->pn_cookie.set(funtc->staticLevel, UpvarCookie::CALLEE_SLOT);
                 dn->pn_dflags |= PND_BOUND;
 
                 /*
@@ -2742,57 +2745,59 @@ LeaveFunction(JSParseNode *fn, JSTreeCon
                 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
                     if (pnu->isAssigned() && pnu->pn_blockid >= funtc->bodyid) {
                         funbox->tcflags |= TCF_FUN_SETS_OUTER_NAME;
                         break;
                     }
                 }
             }
 
-            JSAtomListElement *outer_ale = tc->decls.lookup(atom);
+            JSDefinition *outer_dn = tc->decls.lookupFirst(atom);
 
             /*
              * Make sure to deoptimize lexical dependencies that are polluted
              * by eval or with, to safely bind globals (see bug 561923).
              */
             if (funtc->callsEval() ||
-                (outer_ale && tc->innermostWith &&
-                 ALE_DEFN(outer_ale)->pn_pos < tc->innermostWith->pn_pos)) {
+                (outer_dn && tc->innermostWith &&
+                 outer_dn->pn_pos < tc->innermostWith->pn_pos)) {
                 DeoptimizeUsesWithin(dn, fn->pn_pos);
             }
 
-            if (!outer_ale)
-                outer_ale = tc->lexdeps.lookup(atom);
-            if (!outer_ale) {
-                /* 
-                 * Create a new placeholder for our outer lexdep. We could simply re-use
-                 * the inner placeholder, but that introduces subtleties in the case where
-                 * we find a later definition that captures an existing lexdep. For
-                 * example:
-                 *
-                 *   function f() { function g() { x; } let x; }
-                 *
-                 * Here, g's TOK_UPVARS node lists the placeholder for x, which must be
-                 * captured by the 'let' declaration later, since 'let's are hoisted.
-                 * Taking g's placeholder as our own would work fine. But consider:
-                 *
-                 *   function f() { x; { function g() { x; } let x; } }
-                 *
-                 * Here, the 'let' must not capture all the uses of f's lexdep entry for
-                 * x, but it must capture the x node referred to from g's TOK_UPVARS node.
-                 * Always turning inherited lexdeps into uses of a new outer definition
-                 * allows us to handle both these cases in a natural way.
-                 */
-                outer_ale = MakePlaceholder(dn, tc);
-                if (!outer_ale)
-                    return false;
-            }
-
-
-            JSDefinition *outer_dn = ALE_DEFN(outer_ale);
+            if (!outer_dn) {
+                AtomDefnAddPtr p = tc->lexdeps->lookupForAdd(atom);
+                if (p) {
+                    outer_dn = p.value();
+                } else {
+                    /*
+                     * Create a new placeholder for our outer lexdep. We could
+                     * simply re-use the inner placeholder, but that introduces
+                     * subtleties in the case where we find a later definition
+                     * that captures an existing lexdep. For example:
+                     *
+                     *   function f() { function g() { x; } let x; }
+                     *
+                     * Here, g's TOK_UPVARS node lists the placeholder for x,
+                     * which must be captured by the 'let' declaration later,
+                     * since 'let's are hoisted.  Taking g's placeholder as our
+                     * own would work fine. But consider:
+                     *
+                     *   function f() { x; { function g() { x; } let x; } }
+                     *
+                     * Here, the 'let' must not capture all the uses of f's
+                     * lexdep entry for x, but it must capture the x node
+                     * referred to from g's TOK_UPVARS node.  Always turning
+                     * inherited lexdeps into uses of a new outer definition
+                     * allows us to handle both these cases in a natural way.
+                     */
+                    outer_dn = MakePlaceholder(p, dn, tc);
+                    if (!outer_dn)
+                        return false;
+                }
+            }
 
             /*
              * Insert dn's uses list at the front of outer_dn's list.
              *
              * Without loss of generality or correctness, we allow a dn to
              * be in inner and outer lexdeps, since the purpose of lexdeps
              * is one-pass coordination of name use and definition across
              * functions, and if different dn's are used we'll merge lists
@@ -2824,47 +2829,49 @@ LeaveFunction(JSParseNode *fn, JSTreeCon
                 dn->pn_used = true;
                 dn->pn_lexdef = outer_dn;
             }
 
             /* Mark the outer dn as escaping. */
             outer_dn->pn_dflags |= PND_CLOSED;
         }
 
-        if (funtc->lexdeps.count - foundCallee != 0) {
+        if (funtc->lexdeps->count() - foundCallee != 0) {
             JSParseNode *body = fn->pn_body;
 
             fn->pn_body = NameSetNode::create(tc);
             if (!fn->pn_body)
                 return false;
 
             fn->pn_body->pn_type = TOK_UPVARS;
             fn->pn_body->pn_pos = body->pn_pos;
             if (foundCallee)
-                funtc->lexdeps.remove(tc->parser, funAtom);
+                funtc->lexdeps->remove(funAtom);
+            /* Transfer ownership of the lexdep map to the parse node. */
             fn->pn_body->pn_names = funtc->lexdeps;
+            funtc->lexdeps.clearMap();
             fn->pn_body->pn_tree = body;
-        }
-
-        funtc->lexdeps.clear();
+        } else {
+            funtc->lexdeps.releaseMap(funtc->parser->context);
+        }
+
     }
 
     /*
      * Check whether any parameters have been assigned within this function.
      * In strict mode parameters do not alias arguments[i], and to make the
      * arguments object reflect initial parameter values prior to any mutation
      * we create it eagerly whenever parameters are (or might, in the case of
      * calls to eval) be assigned.
      */
     if (funtc->inStrictMode() && funbox->object->getFunctionPrivate()->nargs > 0) {
-        JSAtomListIterator iter(&funtc->decls);
-        JSAtomListElement *ale;
-
-        while ((ale = iter()) != NULL) {
-            JSDefinition *dn = ALE_DEFN(ale);
+        AtomDeclsIter iter(&funtc->decls);
+        JSDefinition *dn;
+
+        while ((dn = iter()) != NULL) {
             if (dn->kind() == JSDefinition::ARG && dn->isAssigned()) {
                 funbox->tcflags |= TCF_FUN_MUTATES_PARAMETER;
                 break;
             }
         }
     }
 
     funbox->bindings.transfer(funtc->parser->context, &funtc->bindings);
@@ -2975,17 +2982,17 @@ Parser::functionArguments(JSTreeContext 
                  * (strict mode code), but we do those tests in one place
                  * below, after having parsed the body in case it begins with a
                  * "use strict"; directive.
                  *
                  * NB: Check funtc.decls rather than funtc.bindings, because
                  *     destructuring bindings aren't added to funtc.bindings
                  *     until after all arguments have been parsed.
                  */
-                if (funtc.decls.lookup(atom)) {
+                if (funtc.decls.lookupFirst(atom)) {
                     duplicatedArg = atom;
                     if (destructuringArg)
                         goto report_dup_and_destructuring;
                 }
 #endif
 
                 uint16 slot;
                 if (!funtc.bindings.addArgument(context, atom, &slot))
@@ -2998,17 +3005,17 @@ Parser::functionArguments(JSTreeContext 
               default:
                 reportErrorNumber(NULL, JSREPORT_ERROR, JSMSG_MISSING_FORMAL);
                 /* FALL THROUGH */
               case TOK_ERROR:
                 return false;
 
 #if JS_HAS_DESTRUCTURING
               report_dup_and_destructuring:
-                JSDefinition *dn = ALE_DEFN(funtc.decls.lookup(duplicatedArg));
+                JSDefinition *dn = funtc.decls.lookupFirst(duplicatedArg);
                 reportErrorNumber(dn, JSREPORT_ERROR, JSMSG_DESTRUCT_DUP_ARG);
                 return false;
 #endif
             }
         } while (tokenStream.matchToken(TOK_COMMA));
 
         if (tokenStream.getToken() != TOK_RP) {
             reportErrorNumber(NULL, JSREPORT_ERROR, JSMSG_PAREN_AFTER_FORMAL);
@@ -3044,18 +3051,17 @@ Parser::functionDef(JSAtom *funAtom, Fun
     bool bodyLevel = tc->atBodyLevel();
     pn->pn_dflags = (kind == Expression || !bodyLevel) ? PND_FUNARG : 0;
 
     /*
      * Record names for function statements in tc->decls so we know when to
      * avoid optimizing variable references that might name a function.
      */
     if (kind == Statement) {
-        if (JSAtomListElement *ale = tc->decls.lookup(funAtom)) {
-            JSDefinition *dn = ALE_DEFN(ale);
+        if (JSDefinition *dn = tc->decls.lookupFirst(funAtom)) {
             JSDefinition::Kind dn_kind = dn->kind();
 
             JS_ASSERT(!dn->pn_used);
             JS_ASSERT(dn->pn_defn);
 
             if (context->hasStrictOption() || dn_kind == JSDefinition::CONST) {
                 JSAutoByteString name;
                 if (!js_AtomToPrintableString(context, funAtom, &name) ||
@@ -3066,50 +3072,46 @@ Parser::functionDef(JSAtom *funAtom, Fun
                                        JSMSG_REDECLARED_VAR,
                                        JSDefinition::kindString(dn_kind),
                                        name.ptr())) {
                     return NULL;
                 }
             }
 
             if (bodyLevel) {
-                ALE_SET_DEFN(ale, pn);
+                tc->decls.update(funAtom, (JSDefinition *) pn);
                 pn->pn_defn = true;
                 pn->dn_uses = dn;               /* dn->dn_uses is now pn_link */
 
                 if (!MakeDefIntoUse(dn, pn, funAtom, tc))
                     return NULL;
             }
         } else if (bodyLevel) {
             /*
              * If this function was used before it was defined, claim the
              * pre-created definition node for this function that primaryExpr
              * put in tc->lexdeps on first forward reference, and recycle pn.
              */
-            JSHashEntry **hep;
-
-            ale = tc->lexdeps.rawLookup(funAtom, hep);
-            if (ale) {
-                JSDefinition *fn = ALE_DEFN(ale);
-
+
+            if (JSDefinition *fn = tc->lexdeps.lookupDefn(funAtom)) {
                 JS_ASSERT(fn->pn_defn);
                 fn->pn_type = TOK_FUNCTION;
                 fn->pn_arity = PN_FUNC;
                 fn->pn_pos.begin = pn->pn_pos.begin;
 
                 /*
                  * Set fn->pn_pos.end too, in case of error before we parse the
                  * closing brace.  See bug 640075.
                  */
                 fn->pn_pos.end = pn->pn_pos.end;
 
                 fn->pn_body = NULL;
                 fn->pn_cookie.makeFree();
 
-                tc->lexdeps.rawRemove(tc->parser, ale, hep);
+                tc->lexdeps->remove(funAtom);
                 RecycleTree(pn, tc);
                 pn = fn;
             }
 
             if (!Define(pn, funAtom, tc))
                 return NULL;
         }
 
@@ -3147,16 +3149,18 @@ Parser::functionDef(JSAtom *funAtom, Fun
             }
         }
     }
 
     JSTreeContext *outertc = tc;
 
     /* Initialize early for possible flags mutation via destructuringExpr. */
     JSTreeContext funtc(tc->parser);
+    if (!funtc.init(context))
+        return NULL;
 
     JSFunctionBox *funbox = EnterFunction(pn, &funtc, funAtom, kind);
     if (!funbox)
         return NULL;
 
     JSFunction *fun = funbox->function();
 
     /* Now parse formal argument list and compute fun->nargs. */
@@ -3171,21 +3175,19 @@ Parser::functionDef(JSAtom *funAtom, Fun
      * If there were destructuring formal parameters, bind the destructured-to
      * local variables now that we've parsed all the regular and destructuring
      * formal parameters. Because js::Bindings::add must be called first for
      * all ARGUMENTs, then all VARIABLEs and CONSTANTs, and finally all UPVARs,
      * we can't bind vars induced by formal parameter destructuring until after
      * Parser::functionArguments has returned.
      */
     if (prelude) {
-        JSAtomListIterator iter(&funtc.decls);
-
-        while (JSAtomListElement *ale = iter()) {
-            JSParseNode *apn = ALE_DEFN(ale);
-
+        AtomDeclsIter iter(&funtc.decls);
+
+        while (JSDefinition *apn = iter()) {
             /* Filter based on pn_op -- see BindDestructuringArg, above. */
             if (apn->pn_op != JSOP_SETLOCAL)
                 continue;
 
             uint16 index = funtc.bindings.countVars();
             if (!BindLocalVariable(context, &funtc, apn->pn_atom, VARIABLE, true))
                 return NULL;
             apn->pn_cookie.set(funtc.staticLevel, index);
@@ -3573,39 +3575,36 @@ MatchLabel(JSContext *cx, TokenStream *t
     return JS_TRUE;
 }
 
 static JSBool
 BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
 {
     JSParseNode *pn;
     JSObject *blockObj;
-    JSAtomListElement *ale;
     jsint n;
 
     /*
      * Body-level 'let' is the same as 'var' currently -- this may change in a
      * successor standard to ES5 that specifies 'let'.
      */
     JS_ASSERT(!tc->atBodyLevel());
 
     pn = data->pn;
     if (!CheckStrictBinding(cx, tc, atom, pn))
         return false;
 
     blockObj = tc->blockChain();
-    ale = tc->decls.lookup(atom);
-    if (ale && ALE_DEFN(ale)->pn_blockid == tc->blockid()) {
+    JSDefinition *dn = tc->decls.lookupFirst(atom);
+    if (dn && dn->pn_blockid == tc->blockid()) {
         JSAutoByteString name;
         if (js_AtomToPrintableString(cx, atom, &name)) {
             ReportCompileErrorNumber(cx, TS(tc->parser), pn,
                                      JSREPORT_ERROR, JSMSG_REDECLARED_VAR,
-                                     (ale && ALE_DEFN(ale)->isConst())
-                                     ? js_const_str
-                                     : js_variable_str,
+                                     dn->isConst() ? js_const_str : js_variable_str,
                                      name.ptr());
         }
         return false;
     }
 
     n = OBJ_BLOCK_COUNT(cx, blockObj);
     if (n == JS_BIT(16)) {
         ReportCompileErrorNumber(cx, TS(tc->parser), pn,
@@ -3660,17 +3659,17 @@ PopStatement(JSTreeContext *tc)
         JS_ASSERT(!obj->isClonedBlock());
 
         for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
             JSAtom *atom = JSID_TO_ATOM(r.front().propid);
 
             /* Beware the empty destructuring dummy. */
             if (atom == tc->parser->context->runtime->atomState.emptyAtom)
                 continue;
-            tc->decls.remove(tc->parser, atom);
+            tc->decls.remove(atom);
         }
     }
     js_PopStatement(tc);
 }
 
 static inline bool
 OuterLet(JSTreeContext *tc, JSStmtInfo *stmt, JSAtom *atom)
 {
@@ -3703,18 +3702,18 @@ static bool
 DefineGlobal(JSParseNode *pn, JSCodeGenerator *cg, JSAtom *atom)
 {
     GlobalScope *globalScope = cg->compiler()->globalScope;
     JSObject *globalObj = globalScope->globalObj;
 
     if (!cg->compileAndGo() || !globalObj || cg->compilingForEval())
         return true;
 
-    JSAtomListElement *ale = globalScope->names.lookup(atom);
-    if (!ale) {
+    AtomIndexAddPtr p = globalScope->names.lookupForAdd(atom);
+    if (!p) {
         JSContext *cx = cg->parser->context;
 
         JSObject *holder;
         JSProperty *prop;
         if (!globalObj->lookupProperty(cx, ATOM_TO_JSID(atom), &holder, &prop))
             return false;
 
         JSFunctionBox *funbox = (pn->pn_type == TOK_FUNCTION) ? pn->pn_funbox : NULL;
@@ -3740,50 +3739,49 @@ DefineGlobal(JSParseNode *pn, JSCodeGene
             def = GlobalScope::GlobalDef(shape->slot);
         } else {
             def = GlobalScope::GlobalDef(atom, funbox);
         }
 
         if (!globalScope->defs.append(def))
             return false;
 
-        ale = globalScope->names.add(cg->parser, atom);
-        if (!ale)
+        jsatomid index = globalScope->names.count();
+        if (!globalScope->names.add(p, atom, index))
             return false;
 
-        JS_ASSERT(ALE_INDEX(ale) == globalScope->defs.length() - 1);
+        JS_ASSERT(index == globalScope->defs.length() - 1);
     } else {
         /*
          * Functions can be redeclared, and the last one takes effect. Check
          * for this and make sure to rewrite the definition.
          *
          * Note: This could overwrite an existing variable declaration, for
          * example:
          *   var c = []
          *   function c() { }
          *
          * This rewrite is allowed because the function will be statically
          * hoisted to the top of the script, and the |c = []| will just
          * overwrite it at runtime.
          */
         if (pn->pn_type == TOK_FUNCTION) {
             JS_ASSERT(pn->pn_arity = PN_FUNC);
-            uint32 index = ALE_INDEX(ale);
+            jsatomid index = p.value();
             globalScope->defs[index].funbox = pn->pn_funbox;
         }
     }
 
     pn->pn_dflags |= PND_GVAR;
 
     return true;
 }
 
 static bool
-BindTopLevelVar(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParseNode *pn,
-                JSAtom *varname, JSTreeContext *tc)
+BindTopLevelVar(JSContext *cx, BindData *data, JSParseNode *pn, JSAtom *varname, JSTreeContext *tc)
 {
     JS_ASSERT(pn->pn_op == JSOP_NAME);
     JS_ASSERT(!tc->inFunction());
 
     /* There's no need to optimize bindings if we're not compiling code. */
     if (!tc->compiling())
         return true;
 
@@ -3826,17 +3824,17 @@ BindTopLevelVar(JSContext *cx, BindData 
      * If this is a global variable, we're compile-and-go, and a global object
      * is present, try to bake in either an already available slot or a
      * predicted slot that will be defined after compiling is completed.
      */
     return DefineGlobal(pn, tc->asCodeGenerator(), pn->pn_atom);
 }
 
 static bool
-BindFunctionLocal(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParseNode *pn,
+BindFunctionLocal(JSContext *cx, BindData *data, MultiDeclRange &mdl, JSParseNode *pn,
                   JSAtom *name, JSTreeContext *tc)
 {
     JS_ASSERT(tc->inFunction());
 
     if (name == cx->runtime->atomState.argumentsAtom) {
         pn->pn_op = JSOP_ARGUMENTS;
         pn->pn_dflags |= PND_BOUND;
         return true;
@@ -3859,17 +3857,17 @@ BindFunctionLocal(JSContext *cx, BindDat
         pn->pn_op = JSOP_GETLOCAL;
         pn->pn_cookie.set(tc->staticLevel, index);
         pn->pn_dflags |= PND_BOUND;
         return true;
     }
 
     if (kind == ARGUMENT) {
         JS_ASSERT(tc->inFunction());
-        JS_ASSERT(ale && ALE_DEFN(ale)->kind() == JSDefinition::ARG);
+        JS_ASSERT(!mdl.empty() && mdl.front()->kind() == JSDefinition::ARG);
     } else {
         JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
     }
 
     return true;
 }
 
 static JSBool
@@ -3887,21 +3885,21 @@ BindVarOrConst(JSContext *cx, BindData *
 
     if (stmt && stmt->type == STMT_WITH) {
         data->fresh = false;
         pn->pn_dflags |= PND_DEOPTIMIZED;
         tc->noteMightAliasLocals();
         return true;
     }
 
-    JSAtomListElement *ale = tc->decls.lookup(atom);
+    MultiDeclRange mdl = tc->decls.lookupMulti(atom);
     JSOp op = data->op;
 
-    if (stmt || ale) {
-        JSDefinition *dn = ale ? ALE_DEFN(ale) : NULL;
+    if (stmt || !mdl.empty()) {
+        JSDefinition *dn = mdl.empty() ? NULL : mdl.front();
         JSDefinition::Kind dn_kind = dn ? dn->kind() : JSDefinition::VAR;
 
         if (dn_kind == JSDefinition::ARG) {
             JSAutoByteString name;
             if (!js_AtomToPrintableString(cx, atom, &name))
                 return JS_FALSE;
 
             if (op == JSOP_DEFCONST) {
@@ -3934,32 +3932,32 @@ BindVarOrConst(JSContext *cx, BindData *
                                               JSDefinition::kindString(dn_kind),
                                               name.ptr())) {
                     return JS_FALSE;
                 }
             }
         }
     }
 
-    if (!ale) {
+    if (mdl.empty()) {
         if (!Define(pn, atom, tc))
             return JS_FALSE;
     } else {
         /*
          * A var declaration never recreates an existing binding, it restates
          * it and possibly reinitializes its value. Beware that if pn becomes a
-         * use of ALE_DEFN(ale), and if we have an initializer for this var or
+         * use of |mdl.defn()|, and if we have an initializer for this var or
          * const (typically a const would ;-), then pn must be rewritten into a
          * TOK_ASSIGN node. See Variables, further below.
          *
          * A case such as let (x = 1) { var x = 2; print(x); } is even harder.
          * There the x definition is hoisted but the x = 2 assignment mutates
          * the block-local binding of x.
          */
-        JSDefinition *dn = ALE_DEFN(ale);
+        JSDefinition *dn = mdl.front();
 
         data->fresh = false;
 
         if (!pn->pn_used) {
             /* Make pnu be a fresh name node that uses dn. */
             JSParseNode *pnu = pn;
 
             if (pn->pn_defn) {
@@ -3967,71 +3965,64 @@ BindVarOrConst(JSContext *cx, BindData *
                 if (!pnu)
                     return JS_FALSE;
             }
 
             LinkUseToDef(pnu, dn, tc);
             pnu->pn_op = JSOP_NAME;
         }
 
+        /* Find the first non-let binding of this atom. */
         while (dn->kind() == JSDefinition::LET) {
-            do {
-                ale = ALE_NEXT(ale);
-            } while (ale && ALE_ATOM(ale) != atom);
-            if (!ale)
+            mdl.popFront();
+            if (mdl.empty())
                 break;
-            dn = ALE_DEFN(ale);
-        }
-
-        if (ale) {
+            dn = mdl.front();
+        }
+
+        if (dn) {
             JS_ASSERT_IF(data->op == JSOP_DEFCONST,
                          dn->kind() == JSDefinition::CONST);
             return JS_TRUE;
         }
 
         /*
          * A var or const that is shadowed by one or more let bindings of the
          * same name, but that has not been declared until this point, must be
          * hoisted above the let bindings.
          */
         if (!pn->pn_defn) {
-            JSHashEntry **hep;
-
-            ale = tc->lexdeps.rawLookup(atom, hep);
-            if (ale) {
-                pn = ALE_DEFN(ale);
-                tc->lexdeps.rawRemove(tc->parser, ale, hep);
+            if (tc->lexdeps->lookup(atom)) {
+                tc->lexdeps->remove(atom);
             } else {
                 JSParseNode *pn2 = NameNode::create(atom, tc);
                 if (!pn2)
                     return JS_FALSE;
 
                 /* The token stream may be past the location for pn. */
                 pn2->pn_type = TOK_NAME;
                 pn2->pn_pos = pn->pn_pos;
                 pn = pn2;
             }
             pn->pn_op = JSOP_NAME;
         }
 
-        ale = tc->decls.add(tc->parser, atom, JSAtomList::HOIST);
-        if (!ale)
+        if (!tc->decls.addHoist(atom, (JSDefinition *) pn))
             return JS_FALSE;
-        ALE_SET_DEFN(ale, pn);
         pn->pn_defn = true;
         pn->pn_dflags &= ~PND_PLACEHOLDER;
     }
 
     if (data->op == JSOP_DEFCONST)
         pn->pn_dflags |= PND_CONST;
 
     if (tc->inFunction())
-        return BindFunctionLocal(cx, data, ale, pn, atom, tc);
-
-    return BindTopLevelVar(cx, data, ale, pn, atom, tc);
+        return BindFunctionLocal(cx, data, mdl, pn, atom, tc);
+
+    return BindTopLevelVar(cx, data, pn, atom, tc);
 }
 
 static bool
 MakeSetCall(JSContext *cx, JSParseNode *pn, JSTreeContext *tc, uintN msg)
 {
     JS_ASSERT(pn->pn_arity == PN_LIST);
     JS_ASSERT(pn->pn_op == JSOP_CALL || pn->pn_op == JSOP_EVAL ||
               pn->pn_op == JSOP_FUNCALL || pn->pn_op == JSOP_FUNAPPLY);
@@ -4739,28 +4730,25 @@ PushBlocklikeStatement(JSStmtInfo *stmt,
 {
     js_PushStatement(tc, stmt, type, -1);
     return GenerateBlockId(tc, stmt->blockid);
 }
 
 static JSParseNode *
 NewBindingNode(JSAtom *atom, JSTreeContext *tc, bool let = false)
 {
-    JSParseNode *pn = NULL;
-
-    JSAtomListElement *ale = tc->decls.lookup(atom);
-    if (ale) {
-        pn = ALE_DEFN(ale);
+    JSParseNode *pn;
+    AtomDefnPtr removal;
+
+    if ((pn = tc->decls.lookupFirst(atom))) {
         JS_ASSERT(!pn->isPlaceholder());
     } else {
-        ale = tc->lexdeps.lookup(atom);
-        if (ale) {
-            pn = ALE_DEFN(ale);
-            JS_ASSERT(pn->isPlaceholder());
-        }
+        removal = tc->lexdeps->lookup(atom);
+        pn = removal ? removal.value() : NULL;
+        JS_ASSERT_IF(pn, pn->isPlaceholder());
     }
 
     if (pn) {
         JS_ASSERT(pn->pn_defn);
 
         /*
          * A let binding at top level becomes a var before we get here, so if
          * pn and tc have the same blockid then that id must not be the bodyid.
@@ -4769,17 +4757,17 @@ NewBindingNode(JSAtom *atom, JSTreeConte
          */
         JS_ASSERT_IF(let && pn->pn_blockid == tc->blockid(),
                      pn->pn_blockid != tc->bodyid);
 
         if (pn->isPlaceholder() && pn->pn_blockid >= (let ? tc->blockid() : tc->bodyid)) {
             if (let)
                 pn->pn_blockid = tc->blockid();
 
-            tc->lexdeps.remove(tc->parser, atom);
+            tc->lexdeps->remove(removal);
             return pn;
         }
     }
 
     /* Make a new node for this declarator name (or destructuring pattern). */
     pn = NameNode::create(atom, tc);
     if (!pn)
         return NULL;
@@ -5441,19 +5429,19 @@ Parser::withStatement()
     pn->pn_right = pn2;
     tc->flags |= TCF_FUN_HEAVYWEIGHT;
     tc->innermostWith = oldWith;
 
     /*
      * Make sure to deoptimize lexical dependencies inside the |with|
      * to safely optimize binding globals (see bug 561923).
      */
-    JSAtomListIterator iter(&tc->lexdeps);
-    while (JSAtomListElement *ale = iter()) {
-        JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
+    for (AtomDefnRange r = tc->lexdeps->all(); !r.empty(); r.popFront()) {
+        JSDefinition *defn = r.front().value();
+        JSDefinition *lexdep = defn->resolve();
         DeoptimizeUsesWithin(lexdep, pn->pn_pos);
     }
 
     return pn;
 }
 
 #if JS_HAS_BLOCK_SCOPE
 JSParseNode *
@@ -6879,25 +6867,21 @@ CompExprTransplanter::transplant(JSParse
             }
 
             JSAtom *atom = pn->pn_atom;
 #ifdef DEBUG
             JSStmtInfo *stmt = js_LexicalLookup(tc, atom, NULL);
             JS_ASSERT(!stmt || stmt != tc->topStmt);
 #endif
             if (genexp && PN_OP(dn) != JSOP_CALLEE) {
-                JS_ASSERT(!tc->decls.lookup(atom));
+                JS_ASSERT(!tc->decls.lookupFirst(atom));
 
                 if (dn->pn_pos < root->pn_pos || dn->isPlaceholder()) {
-                    JSAtomListElement *ale = tc->lexdeps.add(tc->parser, atom);
-                    if (!ale)
-                        return false;
-
                     if (dn->pn_pos >= root->pn_pos) {
-                        tc->parent->lexdeps.remove(tc->parser, atom);
+                        tc->parent->lexdeps->remove(atom);
                     } else {
                         JSDefinition *dn2 = (JSDefinition *)NameNode::create(atom, tc);
                         if (!dn2)
                             return false;
 
                         dn2->pn_type = TOK_NAME;
                         dn2->pn_op = JSOP_NOP;
                         dn2->pn_defn = true;
@@ -6912,18 +6896,18 @@ CompExprTransplanter::transplant(JSParse
                             pnup = &pnu->pn_link;
                         }
                         dn2->dn_uses = dn->dn_uses;
                         dn->dn_uses = *pnup;
                         *pnup = NULL;
 
                         dn = dn2;
                     }
-
-                    ALE_SET_DEFN(ale, dn);
+                    if (!tc->lexdeps->put(atom, dn))
+                        return false;
                 }
             }
         }
 
         if (pn->pn_pos >= root->pn_pos)
             AdjustBlockId(pn, adjust, tc);
         break;
 
@@ -7176,16 +7160,18 @@ Parser::generatorExpr(JSParseNode *kid)
     genfn->pn_type = TOK_FUNCTION;
     genfn->pn_op = JSOP_LAMBDA;
     JS_ASSERT(!genfn->pn_body);
     genfn->pn_dflags = PND_FUNARG;
 
     {
         JSTreeContext *outertc = tc;
         JSTreeContext gentc(tc->parser);
+        if (!gentc.init(context))
+            return NULL;
 
         JSFunctionBox *funbox = EnterFunction(genfn, &gentc);
         if (!funbox)
             return NULL;
 
         /*
          * We have to dance around a bit to propagate sharp variables from
          * outertc to gentc before setting TCF_HAS_SHARPS implicitly by
@@ -8160,16 +8146,18 @@ JSParseNode *
 Parser::parseXMLText(JSObject *chain, bool allowList)
 {
     /*
      * Push a compiler frame if we have no frames, or if the top frame is a
      * lightweight function activation, or if its scope chain doesn't match
      * the one passed to us.
      */
     JSTreeContext xmltc(this);
+    if (!xmltc.init(context))
+        return NULL;
     xmltc.setScopeChain(chain);
 
     /* Set XML-only mode to turn off special treatment of {expr} in XML. */
     tokenStream.setXMLOnlyMode();
     TokenKind tt = tokenStream.getToken(TSF_OPERAND);
 
     JSParseNode *pn;
     if (tt != TOK_XMLSTAGO) {
@@ -8193,19 +8181,19 @@ Parser::parseXMLText(JSObject *chain, bo
  * and let blocks and expressions (not let declarations).
  *
  * Unlike let declarations ("let as the new var"), which is a kind of letrec
  * due to hoisting, let in a for loop head, let block, or let expression acts
  * like Scheme's let: initializers are evaluated without the new let bindings
  * being in scope.
  *
  * Name binding analysis is eager with fixups, rather than multi-pass, and let
- * bindings push on the front of the tc->decls JSAtomList (either the singular
- * list or on a hash chain -- see JSAtomList::AddHow) in order to shadow outer
- * scope bindings of the same name.
+ * bindings push on the front of the tc->decls AtomDecls (either the singular
+ * list or on a hash chain -- see JSAtomMultiList::add*) in order to shadow
+ * outer scope bindings of the same name.
  *
  * This simplifies binding lookup code at the price of a linear search here,
  * but only if code uses let (var predominates), and even then this function's
  * loop iterates more than once only in crazy cases.
  */
 static inline bool
 BlockIdInScope(uintN blockid, JSTreeContext *tc)
 {
@@ -8399,17 +8387,18 @@ Parser::primaryExpr(TokenKind tt, JSBool
       {
         JSBool afterComma;
         JSParseNode *pnval;
 
         /*
          * A map from property names we've seen thus far to a mask of property
          * assignment types, stored and retrieved with ALE_SET_INDEX/ALE_INDEX.
          */
-        JSAutoAtomList seen(tc->parser);
+        AtomIndexMap seen(context);
+
         enum AssignmentType {
             GET     = 0x1,
             SET     = 0x2,
             VALUE   = 0x4 | GET | SET
         };
 
         pn = ListNode::create(tc);
         if (!pn)
@@ -8529,18 +8518,20 @@ Parser::primaryExpr(TokenKind tt, JSBool
                 assignType = GET;
             } else if (op == JSOP_SETTER) {
                 assignType = SET;
             } else {
                 JS_NOT_REACHED("bad opcode in object initializer");
                 assignType = VALUE; /* try to error early */
             }
 
-            if (JSAtomListElement *ale = seen.lookup(atom)) {
-                AssignmentType oldAssignType = AssignmentType(ALE_INDEX(ale));
+            AtomIndexAddPtr p = seen.lookupForAdd(atom);
+            if (p) {
+                jsatomid index = p.value();
+                AssignmentType oldAssignType = AssignmentType(index);
                 if ((oldAssignType & assignType) &&
                     (oldAssignType != VALUE || assignType != VALUE || tc->needStrictChecks()))
                 {
                     JSAutoByteString name;
                     if (!js_AtomToPrintableString(context, atom, &name))
                         return NULL;
 
                     uintN flags = (oldAssignType == VALUE &&
@@ -8549,22 +8540,20 @@ Parser::primaryExpr(TokenKind tt, JSBool
                                   ? JSREPORT_WARNING
                                   : JSREPORT_ERROR;
                     if (!ReportCompileErrorNumber(context, &tokenStream, NULL, flags,
                                                   JSMSG_DUPLICATE_PROPERTY, name.ptr()))
                     {
                         return NULL;
                     }
                 }
-                ALE_SET_INDEX(ale, assignType | oldAssignType);
+                p.value() = assignType | oldAssignType;
             } else {
-                ale = seen.add(tc->parser, atom);
-                if (!ale)
+                if (!seen.add(p, atom, assignType))
                     return NULL;
-                ALE_SET_INDEX(ale, assignType);
             }
 
             tt = tokenStream.getToken();
             if (tt == TOK_RC)
                 goto end_obj_init;
             if (tt != TOK_COMMA) {
                 reportErrorNumber(NULL, JSREPORT_ERROR, JSMSG_CURLY_AFTER_LIST);
                 return NULL;
@@ -8690,76 +8679,73 @@ Parser::primaryExpr(TokenKind tt, JSBool
              * still use instead of arguments (more hate).
              */
             tc->noteArgumentsUse(pn);
 
             /*
              * Bind early to JSOP_ARGUMENTS to relieve later code from having
              * to do this work (new rule for the emitter to count on).
              */
-            if (!afterDot && !(tc->flags & TCF_DECL_DESTRUCTURING) && !tc->inStatement(STMT_WITH)) {
+            if (!afterDot && !(tc->flags & TCF_DECL_DESTRUCTURING)
+                && !tc->inStatement(STMT_WITH)) {
                 pn->pn_op = JSOP_ARGUMENTS;
                 pn->pn_dflags |= PND_BOUND;
             }
         } else if ((!afterDot
 #if JS_HAS_XML_SUPPORT
                     || tokenStream.peekToken() == TOK_DBLCOLON
 #endif
                    ) && !(tc->flags & TCF_DECL_DESTRUCTURING)) {
             /* In case this is a generator expression outside of any function. */
             if (!tc->inFunction() &&
                 pn->pn_atom == context->runtime->atomState.argumentsAtom) {
                 tc->countArgumentsUse(pn);
             }
 
             JSStmtInfo *stmt = js_LexicalLookup(tc, pn->pn_atom, NULL);
 
+            MultiDeclRange mdl = tc->decls.lookupMulti(pn->pn_atom);
             JSDefinition *dn;
 
-            JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom);
-            if (ale) {
-                dn = ALE_DEFN(ale);
+            if (!mdl.empty()) {
+                dn = mdl.front();
 #if JS_HAS_BLOCK_SCOPE
                 /*
                  * Skip out-of-scope let bindings along an ALE list or hash
                  * chain. These can happen due to |let (x = x) x| block and
                  * expression bindings, where the x on the right of = comes
                  * from an outer scope. See bug 496532.
                  */
                 while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) {
-                    do {
-                        ale = ALE_NEXT(ale);
-                    } while (ale && ALE_ATOM(ale) != pn->pn_atom);
-                    if (!ale)
+                    mdl.popFront();
+                    if (mdl.empty())
                         break;
-                    dn = ALE_DEFN(ale);
+                    dn = mdl.front();
                 }
 #endif
             }
 
-            if (ale) {
-                dn = ALE_DEFN(ale);
+            if (!mdl.empty()) {
+                dn = mdl.front();
             } else {
-                ale = tc->lexdeps.lookup(pn->pn_atom);
-                if (ale) {
-                    dn = ALE_DEFN(ale);
+                AtomDefnAddPtr p = tc->lexdeps->lookupForAdd(pn->pn_atom);
+                if (p) {
+                    dn = p.value();
                 } else {
                     /*
                      * No definition before this use in any lexical scope.
-                     * Add a mapping in tc->lexdeps from pn->pn_atom to a
-                     * new node for the forward-referenced definition. This
-                     * placeholder definition node will be adopted when we
-                     * parse the real defining declaration form, or left as
-                     * a free variable definition if we never see the real
-                     * definition.
+                     * Create a placeholder definition node to either:
+                     * - Be adopted when we parse the real defining
+                     *   declaration, or
+                     * - Be left as a free variable definition if we never
+                     *   see the real definition.
                      */
-                    ale = MakePlaceholder(pn, tc);
-                    if (!ale)
+                    dn = MakePlaceholder(p, pn, tc);
+                    if (!dn)
                         return NULL;
-                    dn = ALE_DEFN(ale);
 
                     /*
                      * In case this is a forward reference to a function,
                      * we pessimistically set PND_FUNARG if the next token
                      * is not a left parenthesis.
                      *
                      * If the definition eventually parsed into dn is not a
                      * function, this flag won't hurt, and if we do parse a
--- a/js/src/jsparse.h
+++ b/js/src/jsparse.h
@@ -44,16 +44,18 @@
  * JS parser definitions.
  */
 #include "jsversion.h"
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsatom.h"
 #include "jsscan.h"
 
+#include "frontend/ParseMaps.h"
+
 JS_BEGIN_EXTERN_C
 
 /*
  * Parsing builds a tree of nodes that directs code generation.  This tree is
  * not a concrete syntax tree in all respects (for example, || and && are left
  * associative, but (A && B && C) translates into the right-associated tree
  * <A && <B && C>> so that code generation can emit a left-associative branch
  * around <B && C> when A is false).  Nodes are labeled by token type, with a
@@ -295,26 +297,26 @@ JS_BEGIN_EXTERN_C
 typedef enum JSParseNodeArity {
     PN_NULLARY,                         /* 0 kids, only pn_atom/pn_dval/etc. */
     PN_UNARY,                           /* one kid, plus a couple of scalars */
     PN_BINARY,                          /* two kids, plus a couple of scalars */
     PN_TERNARY,                         /* three kids */
     PN_FUNC,                            /* function definition node */
     PN_LIST,                            /* generic singly linked list */
     PN_NAME,                            /* name use or definition node */
-    PN_NAMESET                          /* JSAtomList + JSParseNode ptr */
+    PN_NAMESET                          /* JSAtomDefnMapPtr + JSParseNode ptr */
 } JSParseNodeArity;
 
 struct JSDefinition;
 
 namespace js {
 
 struct GlobalScope {
     GlobalScope(JSContext *cx, JSObject *globalObj, JSCodeGenerator *cg)
-      : globalObj(globalObj), cg(cg), defs(cx)
+      : globalObj(globalObj), cg(cg), defs(cx), names(cx)
     { }
 
     struct GlobalDef {
         JSAtom        *atom;        // If non-NULL, specifies the property name to add.
         JSFunctionBox *funbox;      // If non-NULL, function value for the property.
                                     // This value is only set/used if atom is non-NULL.
         uint32        knownSlot;    // If atom is NULL, this is the known shape slot.
 
@@ -334,17 +336,17 @@ struct GlobalScope {
      * This is the table of global names encountered during parsing. Each
      * global name appears in the list only once, and the |names| table
      * maps back into |defs| for fast lookup.
      *
      * A definition may either specify an existing global property, or a new
      * one that must be added after compilation succeeds.
      */
     Vector<GlobalDef, 16> defs;
-    JSAtomList      names;
+    AtomIndexMap      names;
 };
 
 } /* namespace js */
 
 struct JSParseNode {
     uint32              pn_type:16,     /* TOK_* type, see jsscan.h */
                         pn_op:8,        /* see JSOp enum and jsopcode.tbl */
                         pn_arity:5,     /* see JSParseNodeArity enum */
@@ -401,18 +403,18 @@ struct JSParseNode {
             js::UpvarCookie cookie;     /* upvar cookie with absolute frame
                                            level (not relative skip), possibly
                                            in current frame */
             uint32      dflags:12,      /* definition/use flags, see below */
                         blockid:20;     /* block number, for subset dominance
                                            computation */
         } name;
         struct {                        /* lexical dependencies + sub-tree */
-            JSAtomSet   names;          /* set of names with JSDefinitions */
-            JSParseNode *tree;          /* sub-tree containing name uses */
+            js::AtomDefnMapPtr  defnMap;
+            JSParseNode         *tree;  /* sub-tree containing name uses */
         } nameset;
         struct {                        /* PN_NULLARY variant for E4X */
             JSAtom      *atom;          /* first atom in pair */
             JSAtom      *atom2;         /* second atom in pair or null */
         } apair;
         jsdouble        dval;           /* aligned numeric literal value */
     } pn_u;
 
@@ -436,29 +438,30 @@ struct JSParseNode {
 #define pn_kid          pn_u.unary.kid
 #define pn_num          pn_u.unary.num
 #define pn_hidden       pn_u.unary.hidden
 #define pn_prologue     pn_u.unary.hidden
 #define pn_atom         pn_u.name.atom
 #define pn_objbox       pn_u.name.objbox
 #define pn_expr         pn_u.name.expr
 #define pn_lexdef       pn_u.name.lexdef
-#define pn_names        pn_u.nameset.names
+#define pn_names        pn_u.nameset.defnMap
 #define pn_tree         pn_u.nameset.tree
 #define pn_dval         pn_u.dval
 #define pn_atom2        pn_u.apair.atom2
 
 protected:
-    void inline init(js::TokenKind type, JSOp op, JSParseNodeArity arity) {
+    void init(js::TokenKind type, JSOp op, JSParseNodeArity arity) {
         pn_type = type;
         pn_op = op;
         pn_arity = arity;
         pn_parens = false;
         JS_ASSERT(!pn_used);
         JS_ASSERT(!pn_defn);
+        pn_names.init();
         pn_next = pn_link = NULL;
     }
 
     static JSParseNode *create(JSParseNodeArity arity, JSTreeContext *tc);
 
 public:
     static JSParseNode *newBinaryOrAppend(js::TokenKind tt, JSOp op, JSParseNode *left,
                                           JSParseNode *right, JSTreeContext *tc);
@@ -747,16 +750,20 @@ struct LexicalScopeNode : public JSParse
 
 /*
  * JSDefinition is a degenerate subtype of the PN_FUNC and PN_NAME variants of
  * JSParseNode, allocated only for function, var, const, and let declarations
  * that define truly lexical bindings. This means that a child of a TOK_VAR
  * list may be a JSDefinition instead of a JSParseNode. The pn_defn bit is set
  * for all JSDefinitions, clear otherwise.
  *
+ * In an upvars list, defn->resolve() is the outermost definition the
+ * name may reference. If a with block or a function that calls eval encloses
+ * the use, the name may end up referring to something else at runtime.
+ *
  * Note that not all var declarations are definitions: JS allows multiple var
  * declarations in a function or script, but only the first creates the hoisted
  * binding. JS programmers do redeclare variables for good refactoring reasons,
  * for example:
  *
  *   function foo() {
  *       ...
  *       for (var i ...) ...;
@@ -856,20 +863,21 @@ struct LexicalScopeNode : public JSParse
  * those lambdas that post-dominate their upvars inevitable only assignments or
  * initializations as flat closures (after Chez Scheme's display closures).
  */
 #define dn_uses         pn_link
 
 struct JSDefinition : public JSParseNode
 {
     /*
-     * We store definition pointers in PN_NAMESET JSAtomLists in the AST, but
-     * due to redefinition these nodes may become uses of other definitions.
-     * This is unusual, so we simply chase the pn_lexdef link to find the final
-     * definition node. See methods called from Parser::analyzeFunctions.
+     * We store definition pointers in PN_NAMESET JSAtomDefnMapPtrs in the AST,
+     * but due to redefinition these nodes may become uses of other
+     * definitions.  This is unusual, so we simply chase the pn_lexdef link to
+     * find the final definition node. See methods called from
+     * Parser::analyzeFunctions.
      *
      * FIXME: MakeAssignment mutates for want of a parent link...
      */
     JSDefinition *resolve() {
         JSParseNode *pn = this;
         while (!pn->pn_defn) {
             if (pn->pn_type == js::TOK_ASSIGN) {
                 pn = pn->pn_left;
@@ -1050,17 +1058,16 @@ typedef struct BindData BindData;
 
 namespace js {
 
 enum FunctionSyntaxKind { Expression, Statement };
 
 struct Parser : private js::AutoGCRooter
 {
     JSContext           *const context; /* FIXME Bug 551291: use AutoGCRooter::context? */
-    JSAtomListElement   *aleFreeList;
     void                *tempFreeList[NUM_TEMP_FREELISTS];
     TokenStream         tokenStream;
     void                *tempPoolMark;  /* initial JSContext.tempPool mark */
     JSPrincipals        *principals;    /* principals associated with source */
     StackFrame          *const callerFrame;  /* scripted caller frame for eval and dbgapi */
     JSObject            *const callerVarObj; /* callerFrame's varObj */
     JSParseNode         *nodeList;      /* list of recyclable parse-node structs */
     uint32              functionCount;  /* number of functions in current unit */
@@ -1240,27 +1247,27 @@ Parser::reportErrorNumber(JSParseNode *p
     va_start(args, errorNumber);
     bool result = tokenStream.reportCompileErrorNumberVA(pn, flags, errorNumber, args);
     va_end(args);
     return result;
 }
 
 struct Compiler
 {
-    Parser parser;
+    Parser      parser;
     GlobalScope *globalScope;
 
     Compiler(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL);
 
-    /*
-     * Initialize a compiler. Parameters are passed on to init parser.
-     */
-    inline bool
-    init(const jschar *base, size_t length, const char *filename, uintN lineno, JSVersion version)
-    {
+    JSContext *context() {
+        return parser.context;
+    }
+
+    bool init(const jschar *base, size_t length, const char *filename, uintN lineno,
+              JSVersion version) {
         return parser.init(base, length, filename, lineno, version);
     }
 
     static bool
     compileFunctionBody(JSContext *cx, JSFunction *fun, JSPrincipals *principals,
                         js::Bindings *bindings, const jschar *chars, size_t length,
                         const char *filename, uintN lineno, JSVersion version);
 
--- a/js/src/jsprvtd.h
+++ b/js/src/jsprvtd.h
@@ -66,19 +66,19 @@ JS_BEGIN_EXTERN_C
 #define JS_BITS_PER_UINT32_LOG2 5
 #define JS_BITS_PER_UINT32      32
 
 /* The alignment required of objects stored in GC arenas. */
 static const uintN JS_GCTHING_ALIGN = 8;
 static const uintN JS_GCTHING_ZEROBITS = 3;
 
 /* Scalar typedefs. */
-typedef uint8  jsbytecode;
-typedef uint8  jssrcnote;
-typedef uint32 jsatomid;
+typedef uint8       jsbytecode;
+typedef uint8       jssrcnote;
+typedef uintptr_t   jsatomid;
 
 /* Struct typedefs. */
 typedef struct JSArgumentFormatMap  JSArgumentFormatMap;
 typedef struct JSCodeGenerator      JSCodeGenerator;
 typedef struct JSGCThing            JSGCThing;
 typedef struct JSGenerator          JSGenerator;
 typedef struct JSNativeEnumerator   JSNativeEnumerator;
 typedef struct JSFunctionBox        JSFunctionBox;
@@ -87,23 +87,20 @@ typedef struct JSParseNode          JSPa
 typedef struct JSProperty           JSProperty;
 typedef struct JSScript             JSScript;
 typedef struct JSSharpObjectMap     JSSharpObjectMap;
 typedef struct JSThread             JSThread;
 typedef struct JSTreeContext        JSTreeContext;
 typedef struct JSTryNote            JSTryNote;
 
 /* Friend "Advanced API" typedefs. */
-typedef struct JSAtomList           JSAtomList;
-typedef struct JSAtomListElement    JSAtomListElement;
 typedef struct JSAtomMap            JSAtomMap;
 typedef struct JSAtomState          JSAtomState;
 typedef struct JSCodeSpec           JSCodeSpec;
 typedef struct JSPrinter            JSPrinter;
-typedef struct JSRegExpStatics      JSRegExpStatics;
 typedef struct JSStackHeader        JSStackHeader;
 typedef struct JSSubString          JSSubString;
 typedef struct JSNativeTraceInfo    JSNativeTraceInfo;
 typedef struct JSSpecializedNative  JSSpecializedNative;
 typedef struct JSXML                JSXML;
 typedef struct JSXMLArray           JSXMLArray;
 typedef struct JSXMLArrayCursor     JSXMLArrayCursor;
 
@@ -121,16 +118,17 @@ extern "C++" {
 class JSDependentString;
 class JSExtensibleString;
 class JSExternalString;
 class JSLinearString;
 class JSFixedString;
 class JSStaticAtom;
 class JSRope;
 class JSAtom;
+struct JSDefinition;
 
 namespace js {
 
 struct ArgumentsData;
 
 class RegExp;
 class RegExpStatics;
 class AutoStringRooter;
@@ -174,23 +172,35 @@ template <class Key,
           class AllocPolicy = TempAllocPolicy>
 class HashMap;
 
 template <class T,
           class HashPolicy = DefaultHasher<T>,
           class AllocPolicy = TempAllocPolicy>
 class HashSet;
 
+template <typename K,
+          typename V,
+          size_t InlineElems>
+class InlineMap;
+
 class PropertyCache;
 struct PropertyCacheEntry;
 
 struct Shape;
 struct EmptyShape;
 class Bindings;
 
+class MultiDeclRange;
+class ParseMapPool;
+class DefnOrHeader;
+typedef js::InlineMap<JSAtom *, JSDefinition *, 24> AtomDefnMap;
+typedef js::InlineMap<JSAtom *, jsatomid, 24> AtomIndexMap;
+typedef js::InlineMap<JSAtom *, DefnOrHeader, 24> AtomDOHMap;
+
 } /* namespace js */
 
 } /* export "C++" */
 
 #else
 
 typedef struct JSAtom JSAtom;
 
--- a/js/src/jsscan.h
+++ b/js/src/jsscan.h
@@ -135,17 +135,17 @@ enum TokenKind {
     TOK_ARRAYCOMP = 78,                 /* array comprehension initialiser */
     TOK_ARRAYPUSH = 79,                 /* array push within comprehension */
     TOK_LEXICALSCOPE = 80,              /* block scope AST node label */
     TOK_LET = 81,                       /* let keyword */
     TOK_SEQ = 82,                       /* synthetic sequence of statements,
                                            not a block */
     TOK_FORHEAD = 83,                   /* head of for(;;)-style loop */
     TOK_ARGSBODY = 84,                  /* formal args in list + body at end */
-    TOK_UPVARS = 85,                    /* lexical dependencies as JSAtomList
+    TOK_UPVARS = 85,                    /* lexical dependencies as JSAtomDefnMap
                                            of definitions paired with a parse
                                            tree full of uses of those names */
     TOK_RESERVED,                       /* reserved keywords */
     TOK_STRICT_RESERVED,                /* reserved keywords in strict mode */
     TOK_LIMIT                           /* domain size */
 };
 
 static inline bool
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -1235,31 +1235,32 @@ JSScript *
 JSScript::NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
 {
     uint32 mainLength, prologLength, nsrcnotes, nfixed;
     JSScript *script;
     const char *filename;
     JSFunction *fun;
 
     /* The counts of indexed things must be checked during code generation. */
-    JS_ASSERT(cg->atomList.count <= INDEX_LIMIT);
+    JS_ASSERT(cg->atomIndices->count() <= INDEX_LIMIT);
     JS_ASSERT(cg->objectList.length <= INDEX_LIMIT);
     JS_ASSERT(cg->regexpList.length <= INDEX_LIMIT);
 
     mainLength = CG_OFFSET(cg);
     prologLength = CG_PROLOG_OFFSET(cg);
 
     CG_COUNT_FINAL_SRCNOTES(cg, nsrcnotes);
     uint16 nClosedArgs = uint16(cg->closedArgs.length());
     JS_ASSERT(nClosedArgs == cg->closedArgs.length());
     uint16 nClosedVars = uint16(cg->closedVars.length());
     JS_ASSERT(nClosedVars == cg->closedVars.length());
+    size_t upvarIndexCount = cg->upvarIndices.hasMap() ? cg->upvarIndices->count() : 0;
     script = NewScript(cx, prologLength + mainLength, nsrcnotes,
-                       cg->atomList.count, cg->objectList.length,
-                       cg->upvarList.count, cg->regexpList.length,
+                       cg->atomIndices->count(), cg->objectList.length,
+                       upvarIndexCount, cg->regexpList.length,
                        cg->ntrynotes, cg->constList.length(),
                        cg->globalUses.length(), nClosedArgs, nClosedVars, cg->version());
     if (!script)
         return NULL;
 
     cg->bindings.makeImmutable();
     AutoShapeRooter shapeRoot(cx, cg->bindings.lastShape());
 
@@ -1267,17 +1268,17 @@ JSScript::NewScriptFromCG(JSContext *cx,
     script->main += prologLength;
     memcpy(script->code, CG_PROLOG_BASE(cg), prologLength * sizeof(jsbytecode));
     memcpy(script->main, CG_BASE(cg), mainLength * sizeof(jsbytecode));
     nfixed = cg->inFunction()
              ? cg->bindings.countVars()
              : cg->sharpSlots();
     JS_ASSERT(nfixed < SLOTNO_LIMIT);
     script->nfixed = (uint16) nfixed;
-    js_InitAtomMap(cx, &script->atomMap, &cg->atomList);
+    js_InitAtomMap(cx, &script->atomMap, cg->atomIndices.getMap());
 
     filename = cg->parser->tokenStream.getFilename();
     if (filename) {
         script->filename = SaveScriptFilename(cx, filename);
         if (!script->filename)
             goto bad;
     }
     script->lineno = cg->firstLine;
@@ -1311,21 +1312,21 @@ JSScript::NewScriptFromCG(JSContext *cx,
         script->compileAndGo = true;
     if (cg->callsEval())
         script->usesEval = true;
     if (cg->flags & TCF_FUN_USES_ARGUMENTS)
         script->usesArguments = true;
     if (cg->flags & TCF_HAS_SINGLETONS)
         script->hasSingletons = true;
 
-    if (cg->upvarList.count != 0) {
-        JS_ASSERT(cg->upvarList.count <= cg->upvarMap.length);
+    if (cg->hasUpvarIndices()) {
+        JS_ASSERT(cg->upvarIndices->count() <= cg->upvarMap.length);
         memcpy(script->upvars()->vector, cg->upvarMap.vector,
-               cg->upvarList.count * sizeof(uint32));
-        cg->upvarList.clear();
+               cg->upvarIndices->count() * sizeof(uint32));
+        cg->upvarIndices->clear();
         cx->free_(cg->upvarMap.vector);
         cg->upvarMap.vector = NULL;
     }
 
     if (cg->globalUses.length()) {
         memcpy(script->globals()->vector, &cg->globalUses[0],
                cg->globalUses.length() * sizeof(GlobalSlotArray::Entry));
     }
--- a/js/src/jstl.h
+++ b/js/src/jstl.h
@@ -160,16 +160,17 @@ template <> struct IsPodType<unsigned ch
 template <> struct IsPodType<short>           { static const bool result = true; };
 template <> struct IsPodType<unsigned short>  { static const bool result = true; };
 template <> struct IsPodType<int>             { static const bool result = true; };
 template <> struct IsPodType<unsigned int>    { static const bool result = true; };
 template <> struct IsPodType<long>            { static const bool result = true; };
 template <> struct IsPodType<unsigned long>   { static const bool result = true; };
 template <> struct IsPodType<float>           { static const bool result = true; };
 template <> struct IsPodType<double>          { static const bool result = true; };
+template <typename T> struct IsPodType<T *>   { static const bool result = true; };
 
 /* Return the size/end of an array without using macros. */
 template <class T, size_t N> inline T *ArraySize(T (&)[N]) { return N; }
 template <class T, size_t N> inline T *ArrayEnd(T (&arr)[N]) { return arr + N; }
 
 template <bool cond, typename T, T v1, T v2> struct If        { static const T result = v1; };
 template <typename T, T v1, T v2> struct If<false, T, v1, v2> { static const T result = v2; };
 
--- a/js/src/jsvector.h
+++ b/js/src/jsvector.h
@@ -374,16 +374,19 @@ class Vector : private AllocPolicy
 
     /* Leave new elements as uninitialized memory. */
     bool growByUninitialized(size_t incr);
     bool resizeUninitialized(size_t newLength);
 
     /* Shorthand for shrinkBy(length()). */
     void clear();
 
+    /* Clears and releases any heap-allocated storage. */
+    void clearAndFree();
+
     /* Potentially fallible append operations. */
     bool append(const T &t);
     bool appendN(const T &t, size_t n);
     template <class U> bool append(const U *begin, const U *end);
     template <class U> bool append(const U *begin, size_t length);
     template <class U, size_t O, class BP> bool append(const Vector<U,O,BP> &other);
 
     /*
@@ -650,16 +653,33 @@ inline void
 Vector<T,N,AP>::clear()
 {
     REENTRANCY_GUARD_ET_AL;
     Impl::destroy(beginNoCheck(), endNoCheck());
     mLength = 0;
 }
 
 template <class T, size_t N, class AP>
+inline void
+Vector<T,N,AP>::clearAndFree()
+{
+    clear();
+
+    if (usingInlineStorage())
+        return;
+
+    this->free_(beginNoCheck());
+    mBegin = (T *)storage.addr();
+    mCapacity = sInlineCapacity;
+#ifdef DEBUG
+    mReserved = 0;
+#endif
+}
+
+template <class T, size_t N, class AP>
 JS_ALWAYS_INLINE bool
 Vector<T,N,AP>::append(const T &t)
 {
     REENTRANCY_GUARD_ET_AL;
     if (mLength == mCapacity && !growStorageBy(1))
         return false;
 
 #ifdef DEBUG
new file mode 100644
--- /dev/null
+++ b/js/src/mfbt/InlineMap.h
@@ -0,0 +1,408 @@
+/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99 ft=cpp:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey JavaScript engine.
+ *
+ * The Initial Developer of the Original Code is
+ * Mozilla Corporation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *   Chris Leary <cdleary@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef InlineMap_h__
+#define InlineMap_h__
+
+#include "jshashtable.h"
+
+namespace js {
+
+/*
+ * A type can only be used as an InlineMap key if zero is an invalid key value
+ * (and thus may be used as a tombstone value by InlineMap).
+ */
+template <typename T> struct ZeroIsReserved         { static const bool result = false; };
+template <typename T> struct ZeroIsReserved<T *>    { static const bool result = true; };
+
+template <typename K, typename V, size_t InlineElems>
+class InlineMap
+{
+  public:
+    typedef HashMap<K, V, DefaultHasher<K>, TempAllocPolicy> WordMap;
+
+    struct InlineElem
+    {
+        K key;
+        V value;
+    };
+
+  private:
+    typedef typename WordMap::Ptr       WordMapPtr;
+    typedef typename WordMap::AddPtr    WordMapAddPtr;
+    typedef typename WordMap::Range     WordMapRange;
+
+    size_t          inlNext;
+    size_t          inlCount;
+    InlineElem      inl[InlineElems];
+    WordMap         map;
+
+    void checkStaticInvariants() {
+        JS_STATIC_ASSERT(ZeroIsReserved<K>::result);
+    }
+
+    bool usingMap() const {
+        return inlNext > InlineElems;
+    }
+
+    bool switchToMap() {
+        JS_ASSERT(inlNext == InlineElems);
+
+        if (map.initialized()) {
+            map.clear();
+        } else {
+            if (!map.init(count()))
+                return false;
+            JS_ASSERT(map.initialized());
+        }
+
+        for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
+            if (it->key && !map.putNew(it->key, it->value))
+                return false;
+        }
+
+        inlNext = InlineElems + 1;
+        JS_ASSERT(map.count() == inlCount);
+        JS_ASSERT(usingMap());
+        return true;
+    }
+
+    JS_NEVER_INLINE
+    bool switchAndAdd(const K &key, const V &value) {
+        if (!switchToMap())
+            return false;
+
+        return map.putNew(key, value);
+    }
+
+  public:
+    explicit InlineMap(JSContext *cx)
+      : inlNext(0), inlCount(0), map(cx) {
+        checkStaticInvariants(); /* Force the template to instantiate the static invariants. */
+    }
+
+    class Entry
+    {
+        friend class InlineMap;
+        const K &key_;
+        const V &value_;
+
+        Entry(const K &key, const V &value) : key_(key), value_(value) {}
+
+      public:
+        const K &key() { return key_; }
+        const V &value() { return value_; }
+    }; /* class Entry */
+
+    class Ptr
+    {
+        friend class InlineMap;
+
+        WordMapPtr  mapPtr;
+        InlineElem  *inlPtr;
+        bool        isInlinePtr;
+
+        typedef Ptr ******* ConvertibleToBool;
+
+        explicit Ptr(WordMapPtr p) : mapPtr(p), isInlinePtr(false) {}
+        explicit Ptr(InlineElem *ie) : inlPtr(ie), isInlinePtr(true) {}
+        void operator==(const Ptr &other);
+
+      public:
+        /* Leaves Ptr uninitialized. */
+        Ptr() {
+#ifdef DEBUG
+            inlPtr = (InlineElem *) 0xbad;
+            isInlinePtr = true;
+#endif
+        }
+
+        /* Default copy constructor works for this structure. */
+
+        bool found() const {
+            return isInlinePtr ? bool(inlPtr) : mapPtr.found();
+        }
+
+        operator ConvertibleToBool() const {
+            return ConvertibleToBool(found());
+        }
+
+        K &key() {
+            JS_ASSERT(found());
+            return isInlinePtr ? inlPtr->key : mapPtr->key;
+        }
+
+        V &value() {
+            JS_ASSERT(found());
+            return isInlinePtr ? inlPtr->value : mapPtr->value;
+        }
+    }; /* class Ptr */
+
+    class AddPtr
+    {
+        friend class InlineMap;
+
+        WordMapAddPtr   mapAddPtr;
+        InlineElem      *inlAddPtr;
+        bool            isInlinePtr;
+        /* Indicates whether inlAddPtr is a found result or an add pointer. */
+        bool            inlPtrFound;
+
+        AddPtr(InlineElem *ptr, bool found)
+          : inlAddPtr(ptr), isInlinePtr(true), inlPtrFound(found)
+        {}
+
+        AddPtr(const WordMapAddPtr &p) : mapAddPtr(p), isInlinePtr(false) {}
+
+        void operator==(const AddPtr &other);
+
+        typedef AddPtr ******* ConvertibleToBool;
+
+      public:
+        AddPtr() {}
+
+        bool found() const {
+            return isInlinePtr ? inlPtrFound : mapAddPtr.found();
+        }
+
+        operator ConvertibleToBool() const {
+            return found() ? ConvertibleToBool(1) : ConvertibleToBool(0);
+        }
+
+        V &value() {
+            JS_ASSERT(found());
+            if (isInlinePtr)
+                return inlAddPtr->value;
+            return mapAddPtr->value;
+        }
+    }; /* class AddPtr */
+
+    size_t count() {
+        return usingMap() ? map.count() : inlCount;
+    }
+
+    bool empty() const {
+        return usingMap() ? map.empty() : !inlCount;
+    }
+
+    void clear() {
+        inlNext = 0;
+        inlCount = 0;
+    }
+
+    bool isMap() const {
+        return usingMap();
+    }
+
+    const WordMap &asMap() const {
+        JS_ASSERT(isMap());
+        return map;
+    }
+
+    const InlineElem *asInline() const {
+        JS_ASSERT(!isMap());
+        return inl;
+    }
+
+    const InlineElem *inlineEnd() const {
+        JS_ASSERT(!isMap());
+        return inl + inlNext;
+    }
+
+    JS_ALWAYS_INLINE
+    Ptr lookup(const K &key) {
+        if (usingMap())
+            return Ptr(map.lookup(key));
+
+        for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
+            if (it->key == key)
+                return Ptr(it);
+        }
+
+        return Ptr(NULL);
+    }
+
+    JS_ALWAYS_INLINE
+    AddPtr lookupForAdd(const K &key) {
+        if (usingMap())
+            return AddPtr(map.lookupForAdd(key));
+
+        for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
+            if (it->key == key)
+                return AddPtr(it, true);
+        }
+
+        /*
+         * The add pointer that's returned here may indicate the limit entry of
+         * the linear space, in which case the |add| operation will initialize
+         * the map if necessary and add the entry there.
+         */
+        return AddPtr(inl + inlNext, false);
+    }
+
+    JS_ALWAYS_INLINE
+    bool add(AddPtr &p, const K &key, const V &value) {
+        JS_ASSERT(!p);
+
+        if (p.isInlinePtr) {
+            InlineElem *addPtr = p.inlAddPtr;
+            JS_ASSERT(addPtr == inl + inlNext);
+
+            /* Switching to map mode before we add this pointer. */
+            if (addPtr == inl + InlineElems)
+                return switchAndAdd(key, value);
+
+            JS_ASSERT(!p.found());
+            JS_ASSERT(uintptr_t(inl + inlNext) == uintptr_t(p.inlAddPtr));
+            p.inlAddPtr->key = key;
+            p.inlAddPtr->value = value;
+            ++inlCount;
+            ++inlNext;
+            return true;
+        }
+
+        return map.add(p.mapAddPtr, key, value);
+    }
+
+    JS_ALWAYS_INLINE
+    bool put(const K &key, const V &value) {
+        AddPtr p = lookupForAdd(key);
+        if (p) {
+            p.value() = value;
+            return true;
+        }
+        return add(p, key, value);
+    }
+
+    void remove(Ptr p) {
+        JS_ASSERT(p);
+        if (p.isInlinePtr) {
+            JS_ASSERT(inlCount > 0);
+            JS_ASSERT(p.inlPtr->key != NULL);
+            p.inlPtr->key = NULL;
+            --inlCount;
+            return;
+        }
+        JS_ASSERT(map.initialized() && usingMap());
+        map.remove(p.mapPtr);
+    }
+
+    void remove(const K &key) {
+        if (Ptr p = lookup(key))
+            remove(p);
+    }
+
+    class Range
+    {
+        friend class InlineMap;
+
+        WordMapRange    mapRange;
+        InlineElem      *cur;
+        InlineElem      *end;
+        bool            isInline;
+
+        explicit Range(WordMapRange r) : isInline(false) {
+            mapRange = r;
+            JS_ASSERT(!isInlineRange());
+        }
+
+        Range(const InlineElem *begin, const InlineElem *end_)
+          : cur(const_cast<InlineElem *>(begin)),
+            end(const_cast<InlineElem *>(end_)),
+            isInline(true) {
+            advancePastNulls(cur);
+            JS_ASSERT(isInlineRange());
+        }
+
+        bool checkInlineRangeInvariants() const {
+            JS_ASSERT(uintptr_t(cur) <= uintptr_t(end));
+            JS_ASSERT_IF(cur != end, cur->key != NULL);
+            return true;
+        }
+
+        bool isInlineRange() const {
+            JS_ASSERT_IF(isInline, checkInlineRangeInvariants());
+            return isInline;
+        }
+
+        void advancePastNulls(InlineElem *begin) {
+            InlineElem *newCur = begin;
+            while (newCur < end && NULL == newCur->key)
+                ++newCur;
+            JS_ASSERT(uintptr_t(newCur) <= uintptr_t(end));
+            cur = newCur;
+        }
+
+        void bumpCurPtr() {
+            JS_ASSERT(isInlineRange());
+            advancePastNulls(cur + 1);
+        }
+
+        void operator==(const Range &other);
+
+      public:
+        bool empty() const {
+            return isInlineRange() ? cur == end : mapRange.empty();
+        }
+
+        Entry front() {
+            JS_ASSERT(!empty());
+            if (isInlineRange())
+                return Entry(cur->key, cur->value);
+            return Entry(mapRange.front().key, mapRange.front().value);
+        }
+
+        void popFront() {
+            JS_ASSERT(!empty());
+            if (isInlineRange())
+                bumpCurPtr();
+            else
+                mapRange.popFront();
+        }
+    }; /* class Range */
+
+    Range all() const {
+        return usingMap() ? Range(map.all()) : Range(inl, inl + inlNext);
+    }
+}; /* class InlineMap */
+
+} /* namespace js */
+
+#endif
--- a/mfbt/Util.h
+++ b/mfbt/Util.h
@@ -183,17 +183,17 @@ struct DebugOnly
      * to avoid!
      */
     ~DebugOnly() {}
 };
 
 
 /*
  * This utility pales in comparison to Boost's aligned_storage. The utility
- * simply assumes that JSUint64 is enough alignment for anyone. This may need
+ * simply assumes that uint64 is enough alignment for anyone. This may need
  * to be extended one day...
  *
  * As an important side effect, pulling the storage into this template is
  * enough obfuscation to confuse gcc's strict-aliasing analysis into not giving
  * false negatives when we cast from the char buffer to whatever type we've
  * constructed using the bytes.
  */
 template <size_t nbytes>
@@ -287,16 +287,21 @@ class Maybe
         return &asT();
     }
 
     T &ref() {
         MOZ_ASSERT(constructed);
         return asT();
     }
 
+    const T &ref() const {
+        MOZ_ASSERT(constructed);
+        return const_cast<Maybe *>(this)->asT();
+    }
+
     void destroy() {
         ref().~T();
         constructed = false;
     }
 
     void destroyIfConstructed() {
         if (!empty())
             destroy();