Fix 'for each' array iteration to use Iterator() so it is correct.
authorAndrew Sutherland <asutherland@asutherland.org>
Wed, 08 Oct 2008 23:57:32 -0700
changeset 969 588eb7390fb72088e8ccb24928308ea700bce890
parent 968 772a38efff65e2f1eaf42fe30256db226c134deb
child 970 2c5e2d260836f0789d3f10bb6a297694086305b4
push idunknown
push userunknown
push dateunknown
Fix 'for each' array iteration to use Iterator() so it is correct. Alternatively, we could have incremented an index variable and subscripted the arrays ourselves. But that's for suckers.
components/glautocomp.js
modules/collection.js
modules/databind.js
modules/datamodel.js
modules/datastore.js
modules/gloda.js
modules/index_ab.js
modules/indexer.js
modules/noun_freetag.js
modules/suffixtree.js
--- a/components/glautocomp.js
+++ b/components/glautocomp.js
@@ -70,17 +70,17 @@ function ResultRowMulti(aNounID, aCriter
   this.renderer = null;
 }
 ResultRowMulti.prototype = {
   multi: true,
   onItemsAdded: function(aItems) {
     LOG.debug("onItemsAdded");
     if (this.renderer) {
       LOG.debug("rendering...");
-      for each (let item in aItems) {
+      for each (let [iItem, item] in Iterator(aItems)) {
         LOG.debug(" ..." + item);
         this.renderer.renderItem(item);
       }
     }
   },
   onItemsModified: function(aItems) {
   },
   onItemsRemoved: function(aItems) {
@@ -199,19 +199,19 @@ function ContactIdentityCompleter() {
     this.identityCollection = identityQuery.getAllSync();
   }
   else {
     // create an empty explicit collection
     this.identityCollection = Gloda.explicitCollection(Gloda.NOUN_IDENTITY, []);
   }
 
   let contactNames = [(c.name.replace(" ", "").toLowerCase() || "x") for each
-                      (c in this.contactCollection.items)];
+                      ([ic, c] in Iterator(this.contactCollection.items))];
   let identityMails = [i.value.toLowerCase() for each
-                       (i in this.identityCollection.items)];
+                       ([ii, i] in Iterator(this.identityCollection.items))];
 
   this.suffixTree = new MultiSuffixTree(contactNames.concat(identityMails),
     this.contactCollection.items.concat(this.identityCollection.items));
 }
 ContactIdentityCompleter.prototype = {
   _popularitySorter: function(a, b){ return b.popularity - a.popularity; },
   complete: function ContactIdentityCompleter_complete(aResult, aString) {
     if (aString.length < 3)
@@ -229,20 +229,20 @@ ContactIdentityCompleter.prototype = {
       if (thing.NOUN_ID == Gloda.NOUN_CONTACT && !(thing.id in contactToThing))
         contactToThing[thing.id] = thing;
       else if (thing.NOUN_ID == Gloda.NOUN_IDENTITY)
         contactToThing[thing.contactID] = thing;
     }
     // and since we can now map from contacts down to identities, map contacts
     //  to the first identity for them that we find...
     matches = [val.NOUN_ID == Gloda.NOUN_IDENTITY ? val : val.identities[0]
-               for each (val in contactToThing)];
+               for each ([iVal, val] in Iterator(contactToThing))];
 
     let rows = [new ResultRowSingle(match, "text", aResult.searchString)
-                for each (match in matches)];
+                for each ([iMatch, match] in Iterator(matches))];
     aResult.addRows(rows);
 
     // - match against database contacts / identities
     let pending = {contactToThing: contactToThing, pendingCount: 2};
     
     let contactQuery = Gloda.newQuery(Gloda.NOUN_CONTACT);
     contactQuery.nameLike([contactQuery.WILD, aString, contactQuery.WILD]);
     pending.contactColl = contactQuery.getCollection(this);
@@ -294,17 +294,17 @@ ContactIdentityCompleter.prototype = {
           contactToThing[contact.id] = contact;
           possibleDudes.push(contact.identities[0]);
         }
       }
       
       // sort in order of descending popularity
       possibleDudes.sort(this._popularitySorter);
       let rows = [new ResultRowSingle(dude, "text", result.searchString)
-                  for each (dude in possibleDudes)];
+                  for each ([iDude, dude] in Iterator(possibleDudes))];
       result.addRows(rows);
       result.markCompleted(this);
       
       // the collections no longer care about the result, make it clear.
       delete pending.identityColl.data;
       delete pending.contactColl.data;
       // the result object no longer needs us or our data
       delete result._contactCompleterPending;
@@ -339,17 +339,17 @@ ContactTagCompleter.prototype = {
     
     if (aString.length < 2)
       return false; // no async mechanism that will add new rows
     
     LOG.debug("Completing on tags...");
     
     tags = this._suffixTree.findMatches(aString.toLowerCase());
     let rows = [];
-    for each (let tag in tags) {
+    for each (let [iTag, tag] in Iterator(tags)) {
       let query = Gloda.newQuery(Gloda.NOUN_CONTACT);
       LOG.debug("  checking for tag: " + tag.name);
       query.freeTags(tag);
       let resRow = new ResultRowMulti(Gloda.NOUN_CONTACT, "tag", tag.name,
                                       query);
       rows.push(resRow);
     }
     aResult.addRows(rows);
@@ -412,17 +412,17 @@ nsAutoCompleteGloda.prototype = {
       Components.interfaces.nsIAutoCompleteSearch]),
 
   startSearch: function(aString, aParam, aResult, aListener) {
     let result = new nsAutoCompleteGlodaResult(aListener, this, aString);
     // save this for hacky access to the search.  I somewhat suspect we simply
     //  should not be using the formal autocomplete mechanism at all.
     this.curResult = result;
     
-    for each (let completer in this.completers) {
+    for each (let [iCompleter, completer] in Iterator(this.completers)) {
       // they will return true if they have something pending.
       if (completer.complete(result, aString))
         result.markPending(completer);
     }
     
     aListener.onSearchResult(this, result);
   },
 
--- a/modules/collection.js
+++ b/modules/collection.js
@@ -117,17 +117,18 @@ var GlodaCollectionManager = {
         let item = cache._idMap[aID];
         return cache.hit(item);
       }
     }
     
     if (aDoCache === false)
       cache = null;
   
-    for each (let collection in this.getCollectionsForNounID(aNounID)) {
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
       if (aID in collection._idMap) {
         let item = collection._idMap[aID];
         if (cache)
           cache.add([item]);
         return item;
       }
     }
     
@@ -149,17 +150,18 @@ var GlodaCollectionManager = {
         let item = cache._uniqueValueMap[aUniqueValue];
         return cache.hit(item);
       }
     }
     
     if (aDoCache === false)
       cache = null;
   
-    for each (let collection in this.getCollectionsForNounID(aNounID)) {
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
       if (aUniqueValue in collection._uniqueValueMap) {
         let item = collection._uniqueValueMap[aUniqueValue];
         if (cache)
           cache.add([item]);
         return item;
       }
     }
     
@@ -224,17 +226,18 @@ var GlodaCollectionManager = {
       for (let iItem = 0; iItem < aItems.length; iItem++) {
         unresolvedIndexToItem[iItem] = aItems[iItem];
       }
       numUnresolved = aItems.length;
     }
   
     let needToCache = [];
     // next, let's fall back to our collections
-    for each (let collection in this.getCollectionsForNounID(aNounID)) {
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
       for (let [iItem, item] in Iterator(unresolvedIndexToItem)) {
         if (item.id in collection._idMap) {
           let realItem = collection._idMap[item.id];
           // update the caller's array to now have the 'real' object
           aItems[iItem] = realItem;
           // flag that we need to cache this guy (we use an inclusive cache)
           needToCache.push(realItem);
           // we no longer need to resolve this item...
@@ -292,18 +295,19 @@ var GlodaCollectionManager = {
    *  to the collection if the item meets the query that defines the collection.
    */
   itemsAdded: function gloda_colm_itemsAdded(aNounID, aItems) {
     let cache = this._cachesByNoun[aNounID];
     if (cache) {
       cache.add(aItems);
     }
 
-    for each (let collection in this.getCollectionsForNounID(aNounID)) {
-      let addItems = [item for each (item in aItems)
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
+      let addItems = [item for each ([i, item] in Iterator(aItems))
                       if (collection.query.test(item))];
       if (addItems.length)
         collection._onItemsAdded(addItems);
     }
   },
   /**
    * This should be called when items in the global database are modified.  For
    *  example, as a result of indexing.  This should generally only be called
@@ -311,19 +315,20 @@ var GlodaCollectionManager = {
    * We walk all existing collections for the given noun type.  For items
    *  currently included in each collection but should no longer be (per the
    *  collection's defining query) we generate onItemsRemoved events.  For items
    *  not currently included in the collection but should now be, we generate
    *  onItemsAdded events.  For items included that still match the query, we
    *  generate onItemsModified events.
    */
   itemsModified: function gloda_colm_itemsModified(aNounID, aItems) {
-    for each (collection in this.getCollectionsForNounID(aNounID)) {
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
       let added = [], modified = [], removed = [];
-      for each (let item in aItems) {
+      for each (let [iItem, item] in Iterator(aItems)) {
         if (item.id in collection._idMap) {
           // currently in... but should it still be there?
           if (collection.query.test(item))
             modified.push(item); // yes, keep it
           else
             removed.push(item); // no, bin it
         }
         else if (collection.query.test(item)) // not in, should it be?
@@ -345,25 +350,26 @@ var GlodaCollectionManager = {
    *  with a brand new unique id and we would get an itemsAdded event.)
    * We walk all existing collections for the given noun type.  For items
    *  currently in the collection, we generate onItemsRemoved events.
    */
   itemsDeleted: function gloda_colm_itemsDeleted(aNounID, aItems) {
     // cache
     let cache = this._cachesByNoun[aNounID];
     if (cache) {
-      for each (let item in aItem) {
+      for each (let [iItem, item] in Iterator(aItem)) {
         if (item.id in cache._idMap)
           cache.delete(item);
       }
     }
 
     // collections
-    for each (let collection in this.getCollectionsForNounID(aNounID)) {
-      let removeItems = [item for each (item in aItems)
+    for each (let [iCollection, collection] in
+              Iterator(this.getCollectionsForNounID(aNounID))) {
+      let removeItems = [item for each ([i, item] in Iterator(aItems))
                          if (item.id in collection._idMap)];
       if (removeItems.length)
         collection._onItemsRemoved(removeItems);
     }
   },
 };
 
 /**
@@ -383,23 +389,23 @@ function GlodaCollection(aNounMeta, aIte
   this._nounMeta = aNounMeta;
   // should we also maintain a unique value mapping...
   if (this._nounMeta.usesUniqueValue)
     this._uniqueValueMap = {};
 
   this.items = aItems || [];
   this._idMap = {};
   if (this._uniqueValueMap) {
-    for each (let item in this.items) {
+    for each (let [iItem, item] in Iterator(this.items)) {
       this._idMap[item.id] = item;
       this._uniqueValueMap[item.uniqueValue] = item;
     }
   }
   else {
-    for each (let item in this.items) {
+    for each (let [iItem, item] in Iterator(this.items)) {
       this._idMap[item.id] = item;
     }
   }
   
   this.query = aQuery || null;
   this._listener = aListener || null;
 }
 
@@ -418,23 +424,23 @@ GlodaCollection.prototype = {
     if (this._uniqueValueMap)
       this._uniqueValueMap = {};
     this.items = [];
   },
 
   _onItemsAdded: function gloda_coll_onItemsAdded(aItems) {
     this.items.push.apply(this.items, aItems);
     if (this._uniqueValueMap) {
-      for each (let item in this.items) {
+      for each (let [iItem, item] in Iterator(this.items)) {
         this._idMap[item.id] = item;
         this._uniqueValueMap[item.uniqueValue] = item;
       }
     }
     else {
-      for each (let item in this.items) {
+      for each (let [iItem, item] in Iterator(this.items)) {
         this._idMap[item.id] = item;
       }
     }
     if (this._listener)
       this._listener.onItemsAdded(aItems, this);
   },
   
   _onItemsModified: function gloda_coll_onItemsModified(aItems) {
@@ -448,17 +454,17 @@ GlodaCollection.prototype = {
    *  part is that we need to remove the deleted items from our list of items.
    */
   _onItemsRemoved: function gloda_coll_onItemsRemoved(aItems) {
     // we want to avoid the O(n^2) deletion performance case, and deletion
     //  should be rare enough that the extra cost of building the deletion map
     //  should never be a real problem.
     let deleteMap = {};
     // build the delete map while also nuking from our id map/unique value map
-    for each (let item in aItems) {
+    for each (let [iItem, item] in Iterator(aItems)) {
       deleteMap[item.id] = true;
       delete this._idMap[item.id];
       if (this._uniqueValueMap)
         delete this._uniqueValueMap[item.uniqueValue];
     }
     let items = this.items;
     // in-place filter.  probably needless optimization.
     let iWrite=0;
@@ -498,17 +504,17 @@ function GlodaLRUCacheCollection(aNounMe
  * @class A LRU-discard cache.  We use a doubly linked-list for the eviction
  *  tracking.  Since we require that there is at most one LRU-discard cache per
  *  noun class, we simplify our lives by adding our own attributes to the
  *  cached objects.
  * @augments GlodaCollection
  */
 GlodaLRUCacheCollection.prototype = new GlodaCollection;
 GlodaLRUCacheCollection.prototype.add = function cache_add(aItems) {
-  for each (let item in aItems) {
+  for each (let [iItem, item] in Iterator(aItems)) {
     if (item.id in this._idMap) {
       // DEBUGME so, we're dealing with this, but it shouldn't happen.  need
       //  trace-debuggage.
       continue;
     }
     this._idMap[item.id] = item;
     if (this._uniqueValueMap)
       this._uniqueValueMap[item.uniqueValue] = item;
@@ -603,16 +609,16 @@ GlodaLRUCacheCollection.prototype.delete
  * If any of the cached items are dirty, commit them, and make them no longer
  *  dirty.
  */
 GlodaLRUCacheCollection.prototype.commitDirty = function cache_commitDirty() {
   // we can only do this if there is an update method available...
   if (!this._nounMeta.objUpdate)
     return;
 
-  for each (let item in this._idMap) {
+  for each (let [iItem, item] in Iterator(this._idMap)) {
     if (item.dirty) {
       LOG.debug("flushing dirty: " + item);
       this._nounMeta.objUpdate.call(this._nounMeta.datastore, item);
       delete item.dirty;
     }
   }
 }
--- a/modules/databind.js
+++ b/modules/databind.js
@@ -45,22 +45,22 @@ const Cu = Components.utils;
 Cu.import("resource://gloda/modules/log4moz.js");
 
 function GlodaDatabind(aTableDef, aDatastore) {
   this._tableDef = aTableDef;
   this._datastore = aDatastore;
   this._log = Log4Moz.Service.getLogger("gloda.databind." + aTableDef.name);
   
   let insertSql = "INSERT INTO " + this._tableDef._realName + " (" +
-                  [coldef[0] for each
-                   (coldef in this._tableDef.columns)].join(", ") +
-                  ") VALUES (" +
-                  [(":" + coldef[0]) for each
-                   (coldef in this._tableDef.columns)].join(", ") +
-                  ")";
+    [coldef[0] for each
+     ([i, coldef] in Iterator(this._tableDef.columns))].join(", ") +
+    ") VALUES (" +
+    [(":" + coldef[0]) for each
+     ([i, coldef] in Iterator(this._tableDef.columns))].join(", ") +
+    ")";
   
   this._insertStmt = aDatastore._createStatement(insertSql);
   
   this._stmtCache = {};
 }
 
 GlodaDatabind.prototype = {
   
@@ -102,15 +102,15 @@ GlodaDatabind.prototype = {
     if (stmt.step())
       row = stmt.row;
     
     return row;
   },
   
   insert: function(aValueDict) {
     let stmt = this._insertStmt;
-    for each (let coldef in this._tableDef.columns) {
-      this._log.debug("insert arg: " + coldef[0] + "=" + aValueDict[coldef[0]]);
-      stmt.params[coldef[0]] = aValueDict[coldef[0]];
+    for each (let [iColDef, colDef] in Iterator(this._tableDef.columns)) {
+      this._log.debug("insert arg: " + colDef[0] + "=" + aValueDict[colDef[0]]);
+      stmt.params[colDef[0]] = aValueDict[colDef[0]];
     }
     stmt.execute();
   }
 };
--- a/modules/datamodel.js
+++ b/modules/datamodel.js
@@ -172,17 +172,17 @@ let GlodaHasAttributesMixIn = {
 
   /**
    * For consistency of caching with the bound attributes, we try and access the
    *  attributes through their bound names if they are bound.
    */
   get_attributes: function() {
     let seenDefs = {};
     let attribs = [];
-    for each (let attrParamVal in this.rawAttributes) {
+    for each (let [iAPV, attrParamVal] in Iterator(this.rawAttributes)) {
       let attrDef = attrParamVal[0];
       if (!(attrDef in seenDefs)) {
         if (attrDef.isBound) {
           if (attrDef.singular) {
             attribs.push([attrDef, this[attrDef.boundName]]);
           }
           else {
             let values = this[attrDef.boundName];
@@ -223,32 +223,33 @@ let GlodaHasAttributesMixIn = {
     let hadAttributes = this._attributes !== null;
     this._attributes = aNewAttribs;
     // if this guy didn't already have attributes, we don't actually need to
     //  do any caching work.
     if (!hadAttributes)
       return;
 
     let seenDefs = {};
-    for each (let attrParamVal in this._attributes) {
+    for each (let [iAPV, attrParamVal] in Iterator(this._attributes)) {
       let attrDef = attrParamVal[0];
       if (!(attrDef in seenDefs)) {
         if (attrDef.isBound) {
           // get the getter from our _prototype_ (not us!)
           let getterFunc = this.__proto__.__lookupGetter__(attrDef.boundName);
           // force the getter to do his work (on us)
           getterFunc.call(this);
           seenDefs[attrDef] = true;
         }
       }
     }
   },
 
   getAttributeInstances: function gloda_attrix_getAttributeInstances(aAttr) {
-    return [attrParamVal for each (attrParamVal in this.rawAttributes) if
+    return [attrParamVal for each
+            ([iAPV, attrParamVal] in Iterator(this.rawAttributes)) if
             (attrParamVal[0] == aAttr)];
   },
 
   getSingleAttribute: function gloda_attrix_getSingleAttribute(aAttr) {
     let instances = this.getAttributeInstances(aAttr);
     if (instances.length > 0)
       return instances[0];
     else
--- a/modules/datastore.js
+++ b/modules/datastore.js
@@ -627,18 +627,18 @@ var GlodaDatastore = {
    */
   createTableIfNotExists: function gloda_ds_createTableIfNotExists(aTableDef) {
     aTableDef._realName = "plugin_" + aTableDef.name;
 
     // first, check if the table exists
     if (!this.syncConnection.tableExists(aTableDef._realName)) {
       try {
         this.syncConnection.createTable(aTableDef._realName,
-                                        [coldef.join(" ") for each
-                                     (coldef in aTableDef.columns)].join(", "));
+          [coldef.join(" ") for each
+           ([i, coldef] in Iterator(aTableDef.columns))].join(", "));
       }
       catch (ex) {
          this._log.error("Problem creating table " + aTableDef.name + " " +
            "because: " + ex + " at " + ex.fileName + ":" + ex.lineNumber);
          return null;
       }
 
       for (let indexName in aTableDef.indices) {
@@ -694,17 +694,18 @@ var GlodaDatastore = {
 
     if (!aWillFinalize)
       this._outstandingAsyncStatements.push(statement);
 
     return statement;
   },
 
   _cleanupAsyncStatements: function gloda_ds_cleanupAsyncStatements() {
-    [stmt.finalize() for each (stmt in this._outstandingAsyncStatements)];
+    [stmt.finalize() for each
+     ([i, stmt] in Iterator(this._outstandingAsyncStatements))];
   },
 
   _outstandingSyncStatements: [],
 
   _createSyncStatement: function gloda_ds_createSyncStatement(aSQLString,
                                                               aWillFinalize) {
     let statement = null;
     try {
@@ -718,17 +719,18 @@ var GlodaDatastore = {
 
     if (!aWillFinalize)
       this._outstandingSyncStatements.push(statement);
 
     return statement;
   },
 
   _cleanupSyncStatements: function gloda_ds_cleanupSyncStatements() {
-    [stmt.finalize() for each (stmt in this._outstandingSyncStatements)];
+    [stmt.finalize() for each
+     ([i, stmt] in Iterator(this._outstandingSyncStatements))];
   },
 
   /**
    * Perform a synchronous executeStep on the statement, handling any
    *  SQLITE_BUSY fallout that could conceivably happen from a collision on our
    *  read with the async writes.
    * Basically we keep trying until we succeed or run out of tries.
    * We believe this to be a reasonable course of action because we don't
@@ -1651,17 +1653,17 @@ var GlodaDatastore = {
       results.push([]);
       msgIDToIndex[msgID] = iID;
     }
 
     // Unfortunately, IN doesn't work with statement binding mechanisms, and
     //  a chain of ORed tests really can't be bound unless we create one per
     //  value of N (seems silly).
     let quotedIDs = ["'" + msgID.replace("'", "''", "g") + "'" for each
-                     (msgID in aMessageIDs)]
+                     ([i, msgID] in Iterator(aMessageIDs))]
     let sqlString = "SELECT * FROM messages WHERE headerMessageID IN (" +
                     quotedIDs + ")";
     let statement = this._createAsyncStatement(sqlString, true);
 
     statement.executeAsync(new MessagesByMessageIdCallback(statement,
       msgIDToIndex, results, aCallback, aCallbackThis, aCallbackArgs));
     statement.finalize();
   },
@@ -2026,17 +2028,18 @@ var GlodaDatastore = {
 
         // our implementation requires that everyone in attr_ors has the same
         //  attribute.
         let presumedAttr = attr_ors[0][0];
 
         // -- handle full-text specially here, it's different than the other
         //  cases...
         if (presumedAttr.special == kSpecialFulltext) {
-          let matchStr = [APV[2] for each (APV in attr_ors)].join(" OR ");
+          let matchStr = [APV[2] for each
+            ([iAPV, APV] in Iterator(attr_ors))].join(" OR ");
           matchStr.replace("'", "''");
 
           // for example, the match
           let ftSelect = "SELECT docid FROM " + nounMeta.tableName + "Text" +
             " WHERE " + presumedAttr.specialColumnName + " MATCH '" +
             matchStr + "'";
           selects.push(ftSelect);
 
@@ -2112,36 +2115,36 @@ var GlodaDatastore = {
                                   " AND " + APV[3]);
             }
             // - string case (LIKE)
             else {
               // this will result in a warning in debug builds.  as we move to
               //  supporting async operation, we should also move to binding all
               //  arguments for dynamic queries too.
               likePayload = '';
-              for each (let valuePart in APV[2]) {
+              for each (let [iValuePart, valuePart] in Iterator(APV[2])) {
                 if (typeof valuePart == "string")
                   likePayload += this._escapeLikeStatement.escapeStringForLIKE(
                     valuePart, "/");
                 else
                   likePayload += "%";
               }
               valueTests.push(valueColumnName + " LIKE ? ESCAPE '/'");
               boundArgs.push(likePayload);
             }
           }
         }
         let select = "SELECT " + idColumnName + " FROM " + tableName +
-                     " WHERE " +
-                     [("(" + avt[0] +
-                       (avt[1].length ? ((avt[0] ? " AND " : "") + "(" 
-                            + avt[1].join(" OR ") + ")") :
-                          "")
-                       + ")")
-                      for each (avt in attrValueTests)].join(" OR ");
+          " WHERE " +
+          [("(" + avt[0] +
+            (avt[1].length ? ((avt[0] ? " AND " : "") + "(" 
+                 + avt[1].join(" OR ") + ")") :
+               "")
+            + ")")
+           for each ([i, avt] in Iterator(attrValueTests))].join(" OR ");
         selects.push(select);
       }
 
       if (selects.length)
         whereClauses.push("id IN (" + selects.join(" INTERSECT ") + " )");
     }
 
     let sqlString = "SELECT * FROM " + nounMeta.tableName;
--- a/modules/gloda.js
+++ b/modules/gloda.js
@@ -673,17 +673,17 @@ var Gloda = {
   /**
    * Retrieve all of the actions (as defined using defineNounAction) for the
    *  given noun type (via noun ID) with the given action type (ex: filter).
    */
   getNounActions: function gloda_ns_getNounActions(aNounID, aActionType) {
     let nounMeta = this._nounIDToMeta[aNounID];
     if (!nounMeta)
       return [];
-    return [action for each (action in nounMeta.actions)
+    return [action for each ([i, action] in Iterator(nounMeta.actions))
             if (!aActionType || (action.actionType == aActionType))];
   },
 
   /** Attribute providers in the sequence to process them. */
   _attrProviderOrderByNoun: {},
   /** Maps attribute providers to the list of attributes they provide */
   _attrProviders: {},
   /**
--- a/modules/index_ab.js
+++ b/modules/index_ab.js
@@ -234,17 +234,17 @@ var GlodaABAttrs = {
     let tags = null;
     try {
       tags = aCard.getProperty("tags", null);
     } catch (ex) {
       this._log.error("Problem accessing property: " + ex);
     }
     if (tags) {
       this._log.debug("Found tags: " + tags);
-      for each (let tagName in tags.split(",")) {
+      for each (let [iTagName, tagName] in Iterator(tags.split(","))) {
         tagName = tagName.trim();
         // return attrib, param, value; we know the param to use because we know
         //  how FreeTagNoun works, but this is a candidate for refactoring.
         if (tagName) {
           FreeTagNoun.getFreeTag(tagName); // cause the tag to be known
           attribs.push([this._attrFreeTag, tagName, null]);
         }
       }
--- a/modules/indexer.js
+++ b/modules/indexer.js
@@ -313,17 +313,18 @@ var GlodaIndexer = {
   },
   
   _otherIndexers: [],
   registerIndexer: function gloda_index_registerIndexer(aIndexer) {
     this._log.info("Registering indexer: " + aIndexer.name);
     this._otherIndexers.push(aIndexer);
     
     try {
-      for each (let [workerCode, workerFunc] in aIndexer.workers) {
+      for each (let [iWorker, workerInfo] in Iterator(aIndexer.workers)) {
+        let [workerCode, workerFunc] = workerInfo;
         this._otherIndexerWorkers[workerCode] = [aIndexer, workerFunc];
       }
     }
     catch (ex) {
       this._log.warning("Helper indexer threw exception on worker enum.");
     }
     
     if (this._enabled) {
@@ -372,32 +373,32 @@ var GlodaIndexer = {
       let notificationService =
         Cc["@mozilla.org/messenger/msgnotificationservice;1"].
         getService(Ci.nsIMsgFolderNotificationService);
       notificationService.addListener(this._msgFolderListener,
                                       Ci.nsIMsgFolderNotificationService.all);
       
       this._enabled = true;
       
-      for each (let indexer in this._otherIndexers) {
+      for each (let [iIndexer, indexer] in Iterator(this._otherIndexers)) {
         try {
           indexer.enable();
         } catch (ex) {
           this._log.warning("Helper indexer threw exception on enable: " + ex);
         }
       }
       
       // if we have an accumulated desire to index things, kick it off again.
       if (this._indexingDesired) {
         this._indexingDesired = false; // it's edge-triggered for now
         this.indexing = true;
       }
     }
     else if (this._enabled && !aEnable) {
-      for each (let indexer in this._otherIndexers) {
+      for each (let [iIndexer, indexer] in Iterator(this._otherIndexers)) {
         try {
           indexer.disable();
         } catch (ex) {
           this._log.warning("Helper indexer threw exception on disable: " + ex);
         }
       }
 
       // remove observer; no more events to observe!
@@ -1118,17 +1119,17 @@ var GlodaIndexer = {
       // no need to set this.indexing to true, it must be true if we are here.
     }
     
     // if this is our first sweep, give the other indexers a chance to do their
     //  own initial sweep.  it's on them to schedule their own job if they have
     //  a lot to do, but if they only have a little to do, they can get away
     //  with it, as we yield a sync after each one.
     if (!this._initialSweepPerformed) {
-      for each (let indexer in this._otherIndexers) {
+      for each (let [iIndexer, indexer] in Iterator(this._otherIndexers)) {
         try {
           indexer.initialSweep();
         }
         catch (ex) {
           this._log.warning("Helper indexer threw exception on initial sweep:" +
                             ex);
         }
         yield this.kWorkSync;
@@ -1230,17 +1231,17 @@ var GlodaIndexer = {
     //  on the asynchronous thread.  (well, there is a potential race that
     //  would result in us clearing pendingDeletions erroneously, but the
     //  processedAny flag and our use of a while loop here make this
     //  sufficiently close to zero until we move to being async.)
     let messagesToDelete = this._datastore.getDeletedMessageBlock();
     let processedAny = false;
     while (messagesToDelete.length) {
       aJob.goal += messagesToDelete.length;
-      for each (let message in messagesToDelete) {
+      for each (let [iMessage, message] in Iterator(messagesToDelete)) {
         this._deleteMessage(message);
         aJob.offset++;
         yield this.kWorkSync;
       }
       
       processedAny = true;
       messagesToDelete = this._datastore.getDeletedMessageBlock(); 
     }
@@ -1322,17 +1323,17 @@ var GlodaIndexer = {
   /**
    * Queue a list of messages for indexing.
    *
    * @param aFoldersAndMessages List of [nsIMsgFolder, message key] tuples.
    */
   indexMessages: function gloda_index_indexMessages(aFoldersAndMessages) {
     let job = new IndexingJob("message", 1, null);
     job.items = [[GlodaDatastore._mapFolderURI(fm[0].URI), fm[1]] for each
-                 (fm in aFoldersAndMessages)];
+                 ([i, fm] in Iterator(aFoldersAndMessages))];
     this._indexQueue.push(job);
     this._indexingJobGoal++;
     this.indexing = true;
   },
   
   /* *********** Event Processing *********** */
   observe: function gloda_indexer_observe(aSubject, aTopic, aData) {
     // idle
@@ -1984,17 +1985,17 @@ var GlodaIndexer = {
       else if ((curMsg === null) && (candMsg.folderID === null)) {
         curMsg = candMsg;
       }
     }
     
     let attachmentNames = null;
     if (aMimeMsg) {
       let allAttachmentNames = [att.name for each
-                                (att in aMimeMsg.allAttachments)
+                                ([i, att] in Iterator(aMimeMsg.allAttachments))
                                 if (att.isRealAttachment)];
       // we need some kind of delimeter for the names.  we use a newline.
       if (allAttachmentNames)
         attachmentNames = allAttachmentNames.join("\n");
     } 
     
     let isNew;
     if (curMsg === null) {
--- a/modules/noun_freetag.js
+++ b/modules/noun_freetag.js
@@ -73,17 +73,17 @@ var FreeTagNoun = {
       this._listeners.splice(index, 1);
   },
   
   knownFreeTags: {},
   getFreeTag: function(aTagName) {
     let tag = this.knownFreeTags[aTagName];
     if (!tag) {
       tag = this.knownFreeTags[aTagName] = new FreeTag(aTagName);
-      for each (let listener in this._listeners)
+      for each (let [iListener, listener] in Iterator(this._listeners))
         listener.onFreeTagAdded(tag);
     }
     return tag;
   },
 
   toParamAndValue: function gloda_noun_tag_toParamAndValue(aTag) {
     return [aTag.name, null];
   },
--- a/modules/suffixtree.js
+++ b/modules/suffixtree.js
@@ -351,17 +351,17 @@ dump("  bailing! (bail was: " + bail + "
                                 Math.min(aState.end, this._str.length)); 
       dump(aIndent + aKey + ":" + snip + "(" +
            aState.start + ":" + aState.end + ")\n");
     }
     else
       dump(aIndent + aKey + ": (explicit:" + aState.start + ":" + aState.end +")\n");
     let nextIndent = aIndent + "  ";
     let keys = [c for (c in aState) if (c.length == 1)];
-    for each (let key in keys) {
+    for each (let [iKey, key] in Iterator(keys)) {
       this.dump(aState[key], nextIndent, key);
     }
   }
 };
 MultiSuffixTree.prototype = SuffixTree.prototype;
 
 function examplar() {
   let names = ["AndrewSmith", "AndrewJones", "MarkSmith", "BryanClark",