Bug 896100: Convert all uses of OldMove and MoveRef to true rvalue references and the modern Move and Forward. r=luke, r=waldo
authorJim Blandy <jimb@mozilla.com>
Tue, 19 Nov 2013 09:05:36 -0800
changeset 156772 bbf4e009ba00199c5ecfbcb5ee3c25e1cb465391
parent 156771 a2af190cd480187aa9cf47e9da82d935148d4570
child 156773 a18969434210ede5e1bb87d20be10812d792a804
push id270
push userpvanderbeken@mozilla.com
push dateThu, 06 Mar 2014 09:24:21 +0000
reviewersluke, waldo
bugs896100
milestone28.0a1
Bug 896100: Convert all uses of OldMove and MoveRef to true rvalue references and the modern Move and Forward. r=luke, r=waldo
js/public/HashTable.h
js/public/MemoryMetrics.h
js/public/Vector.h
js/src/builtin/MapObject.cpp
js/src/jit/AliasAnalysis.cpp
js/src/jit/AsmJS.cpp
js/src/jit/AsmJSModule.h
js/src/shell/jsheaptools.cpp
js/src/vm/MemoryMetrics.cpp
mfbt/Move.h
mfbt/Vector.h
xpcom/glue/nsHashKeys.h
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -132,30 +132,31 @@ class HashMap
     //    assert(p->key == 3);
     //    char val = p->value;
     typedef typename Impl::AddPtr AddPtr;
     AddPtr lookupForAdd(const Lookup &l) const {
         return impl.lookupForAdd(l);
     }
 
     template<typename KeyInput, typename ValueInput>
-    bool add(AddPtr &p, const KeyInput &k, const ValueInput &v) {
-        Entry e(k, v);
-        return impl.add(p, mozilla::OldMove(e));
+    bool add(AddPtr &p, KeyInput &&k, ValueInput &&v) {
+        Entry e(mozilla::Forward<KeyInput>(k), mozilla::Forward<ValueInput>(v));
+        return impl.add(p, mozilla::Move(e));
     }
 
-    bool add(AddPtr &p, const Key &k) {
-        Entry e(k, Value());
-        return impl.add(p, mozilla::OldMove(e));
+    template<typename KeyInput>
+    bool add(AddPtr &p, KeyInput &&k) {
+        Entry e(mozilla::Forward<KeyInput>(k), Value());
+        return impl.add(p, mozilla::Move(e));
     }
 
     template<typename KeyInput, typename ValueInput>
-    bool relookupOrAdd(AddPtr &p, const KeyInput &k, const ValueInput &v) {
-        Entry e(k, v);
-        return impl.relookupOrAdd(p, k, mozilla::OldMove(e));
+    bool relookupOrAdd(AddPtr &p, KeyInput &&k, ValueInput &&v) {
+        Entry e(mozilla::Forward<KeyInput>(k), mozilla::Forward<ValueInput>(v));
+        return impl.relookupOrAdd(p, e.key, mozilla::Move(e));
     }
 
     // |all()| returns a Range containing |count()| elements. E.g.:
     //
     //   typedef HashMap<int,char> HM;
     //   HM h;
     //   for (HM::Range r = h.all(); !r.empty(); r.popFront())
     //     char c = r.front().value;
@@ -214,30 +215,30 @@ class HashMap
     /************************************************** Shorthand operations */
 
     bool has(const Lookup &l) const {
         return impl.lookup(l) != nullptr;
     }
 
     // Overwrite existing value with v. Return false on oom.
     template<typename KeyInput, typename ValueInput>
-    bool put(const KeyInput &k, const ValueInput &v) {
+    bool put(KeyInput &&k, ValueInput &&v) {
         AddPtr p = lookupForAdd(k);
         if (p) {
-            p->value = v;
+            p->value = mozilla::Forward<ValueInput>(v);
             return true;
         }
-        return add(p, k, v);
+        return add(p, mozilla::Forward<KeyInput>(k), mozilla::Forward<ValueInput>(v));
     }
 
     // Like put, but assert that the given key is not already present.
     template<typename KeyInput, typename ValueInput>
-    bool putNew(const KeyInput &k, const ValueInput &v) {
-        Entry e(k, v);
-        return impl.putNew(k, mozilla::OldMove(e));
+    bool putNew(KeyInput &&k, ValueInput &&v) {
+        Entry e(mozilla::Forward<KeyInput>(k), mozilla::Forward<ValueInput>(v));
+        return impl.putNew(e.key, mozilla::Move(e));
     }
 
     // Add (k,defaultValue) if |k| is not found. Return a false-y Ptr on oom.
     Ptr lookupWithDefault(const Key &k, const Value &defaultValue) {
         AddPtr p = lookupForAdd(k);
         if (p)
             return p;
         (void)add(p, k, defaultValue);  // p is left false-y on oom.
@@ -259,18 +260,21 @@ class HashMap
 
     // Infallibly rekey one entry, if present.
     void rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const Key &new_key) {
         if (Ptr p = lookup(old_lookup))
             impl.rekeyAndMaybeRehash(p, new_lookup, new_key);
     }
 
     // HashMap is movable
-    HashMap(mozilla::MoveRef<HashMap> rhs) : impl(mozilla::OldMove(rhs->impl)) {}
-    void operator=(mozilla::MoveRef<HashMap> rhs) { impl = mozilla::OldMove(rhs->impl); }
+    HashMap(HashMap &&rhs) : impl(mozilla::Move(rhs.impl)) {}
+    void operator=(HashMap &&rhs) {
+        MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
+        impl = mozilla::Move(rhs.impl);
+    }
 
   private:
     // HashMap is not copyable or assignable
     HashMap(const HashMap &hm) MOZ_DELETE;
     HashMap &operator=(const HashMap &hm) MOZ_DELETE;
 
     friend class Impl::Enum;
 };
@@ -366,20 +370,24 @@ class HashSet
     //    }
     //    assert(*p == 3);
     //
     // Note that relookupOrAdd(p,l,t) performs Lookup using |l| and adds the
     // entry |t|, where the caller ensures match(l,t).
     typedef typename Impl::AddPtr AddPtr;
     AddPtr lookupForAdd(const Lookup &l) const        { return impl.lookupForAdd(l); }
 
-    bool add(AddPtr &p, const T &t)                   { return impl.add(p, t); }
+    template <typename U>
+    bool add(AddPtr &p, U &&u) {
+        return impl.add(p, mozilla::Forward<U>(u));
+    }
 
-    bool relookupOrAdd(AddPtr &p, const Lookup &l, const T &t) {
-        return impl.relookupOrAdd(p, l, t);
+    template <typename U>
+    bool relookupOrAdd(AddPtr &p, const Lookup &l, U &&u) {
+        return impl.relookupOrAdd(p, l, mozilla::Forward<U>(u));
     }
 
     // |all()| returns a Range containing |count()| elements:
     //
     //   typedef HashSet<int> HS;
     //   HS h;
     //   for (HS::Range r = h.all(); !r.empty(); r.popFront())
     //     int i = r.front();
@@ -433,29 +441,32 @@ class HashSet
     unsigned generation() const                       { return impl.generation(); }
 
     /************************************************** Shorthand operations */
 
     bool has(const Lookup &l) const {
         return impl.lookup(l) != nullptr;
     }
 
-    // Overwrite existing value with v. Return false on oom.
-    bool put(const T &t) {
-        AddPtr p = lookupForAdd(t);
-        return p ? true : add(p, t);
+    // Add |u| if it is not present already. Return false on oom.
+    template <typename U>
+    bool put(U &&u) {
+        AddPtr p = lookupForAdd(u);
+        return p ? true : add(p, mozilla::Forward<U>(u));
     }
 
     // Like put, but assert that the given key is not already present.
-    bool putNew(const T &t) {
-        return impl.putNew(t, t);
+    template <typename U>
+    bool putNew(U &&u) {
+        return impl.putNew(u, mozilla::Forward<U>(u));
     }
 
-    bool putNew(const Lookup &l, const T &t) {
-        return impl.putNew(l, t);
+    template <typename U>
+    bool putNew(const Lookup &l, U &&u) {
+        return impl.putNew(l, mozilla::Forward<U>(u));
     }
 
     void remove(const Lookup &l) {
         if (Ptr p = lookup(l))
             remove(p);
     }
 
     // Infallibly rekey one entry, if present.
@@ -467,18 +478,21 @@ class HashSet
 
     // Infallibly rekey one entry, if present.
     void rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const T &new_value) {
         if (Ptr p = lookup(old_lookup))
             impl.rekeyAndMaybeRehash(p, new_lookup, new_value);
     }
 
     // HashSet is movable
-    HashSet(mozilla::MoveRef<HashSet> rhs) : impl(mozilla::OldMove(rhs->impl)) {}
-    void operator=(mozilla::MoveRef<HashSet> rhs) { impl = mozilla::OldMove(rhs->impl); }
+    HashSet(HashSet &&rhs) : impl(mozilla::Move(rhs.impl)) {}
+    void operator=(HashSet &&rhs) {
+        MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
+        impl = mozilla::Move(rhs.impl);
+    }
 
   private:
     // HashSet is not copyable or assignable
     HashSet(const HashSet &hs) MOZ_DELETE;
     HashSet &operator=(const HashSet &hs) MOZ_DELETE;
 
     friend class Impl::Enum;
 };
@@ -605,20 +619,21 @@ class HashMapEntry
     template <class, class, class> friend class detail::HashTable;
     template <class> friend class detail::HashTableEntry;
 
     HashMapEntry(const HashMapEntry &) MOZ_DELETE;
     void operator=(const HashMapEntry &) MOZ_DELETE;
 
   public:
     template<typename KeyInput, typename ValueInput>
-    HashMapEntry(const KeyInput &k, const ValueInput &v) : key(k), value(v) {}
+    HashMapEntry(KeyInput &&k, ValueInput &&v)
+      : key(mozilla::Forward<KeyInput>(k)), value(mozilla::Forward<ValueInput>(v)) { }
 
-    HashMapEntry(mozilla::MoveRef<HashMapEntry> rhs)
-      : key(mozilla::OldMove(rhs->key)), value(mozilla::OldMove(rhs->value)) { }
+    HashMapEntry(HashMapEntry &&rhs)
+      : key(mozilla::Move(const_cast<Key &>(rhs.key))), value(mozilla::Move(rhs.value)) { }
 
     typedef Key KeyType;
     typedef Value ValueType;
 
     const Key key;
     Value value;
 };
 
@@ -694,21 +709,21 @@ class HashTableEntry
     void setCollision()               { JS_ASSERT(isLive()); keyHash |= sCollisionBit; }
     void setCollision(HashNumber bit) { JS_ASSERT(isLive()); keyHash |= bit; }
     void unsetCollision()             { keyHash &= ~sCollisionBit; }
     bool hasCollision() const         { return keyHash & sCollisionBit; }
     bool matchHash(HashNumber hn)     { return (keyHash & ~sCollisionBit) == hn; }
     HashNumber getKeyHash() const     { return keyHash & ~sCollisionBit; }
 
     template <class U>
-    void setLive(HashNumber hn, const U &u)
+    void setLive(HashNumber hn, U &&u)
     {
         JS_ASSERT(!isLive());
         keyHash = hn;
-        new(mem.addr()) T(u);
+        new(mem.addr()) T(mozilla::Forward<U>(u));
         JS_ASSERT(isLive());
     }
 };
 
 template <class T, class HashPolicy, class AllocPolicy>
 class HashTable : private AllocPolicy
 {
     typedef typename mozilla::RemoveConst<T>::Type NonConstT;
@@ -856,27 +871,28 @@ class HashTable : private AllocPolicy
             }
 
             if (removed)
                 table.compactIfUnderloaded();
         }
     };
 
     // HashTable is movable
-    HashTable(mozilla::MoveRef<HashTable> rhs)
-      : AllocPolicy(*rhs)
+    HashTable(HashTable &&rhs)
+      : AllocPolicy(rhs)
     {
-        mozilla::PodAssign(this, &*rhs);
-        rhs->table = nullptr;
+        mozilla::PodAssign(this, &rhs);
+        rhs.table = nullptr;
     }
-    void operator=(mozilla::MoveRef<HashTable> rhs) {
+    void operator=(HashTable &&rhs) {
+        MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
         if (table)
             destroyTable(*this, table, capacity());
-        mozilla::PodAssign(this, &*rhs);
-        rhs->table = nullptr;
+        mozilla::PodAssign(this, &rhs);
+        rhs.table = nullptr;
     }
 
   private:
     // HashTable is not copyable or assignable
     HashTable(const HashTable &) MOZ_DELETE;
     void operator=(const HashTable &) MOZ_DELETE;
 
   private:
@@ -1205,17 +1221,17 @@ class HashTable : private AllocPolicy
         removedCount = 0;
         gen++;
         table = newTable;
 
         // Copy only live entries, leaving removed ones behind.
         for (Entry *src = oldTable, *end = src + oldCap; src < end; ++src) {
             if (src->isLive()) {
                 HashNumber hn = src->getKeyHash();
-                findFreeEntry(hn).setLive(hn, mozilla::OldMove(src->get()));
+                findFreeEntry(hn).setLive(hn, mozilla::Move(src->get()));
                 src->destroy();
             }
         }
 
         // All entries have been destroyed, no need to destroyTable.
         this->free_(oldTable);
         return Rehashed;
     }
@@ -1420,17 +1436,17 @@ class HashTable : private AllocPolicy
         HashNumber keyHash = prepareHash(l);
         Entry &entry = lookup(l, keyHash, sCollisionBit);
         AddPtr p(entry, keyHash);
         p.mutationCount = mutationCount;
         return p;
     }
 
     template <class U>
-    bool add(AddPtr &p, const U &rhs)
+    bool add(AddPtr &p, U &&u)
     {
         mozilla::ReentrancyGuard g(*this);
         JS_ASSERT(mutationCount == p.mutationCount);
         JS_ASSERT(table);
         JS_ASSERT(!p.found());
         JS_ASSERT(!(p.keyHash & sCollisionBit));
 
         // Changing an entry from removed to live does not affect whether we
@@ -1443,80 +1459,86 @@ class HashTable : private AllocPolicy
             // Preserve the validity of |p.entry_|.
             RebuildStatus status = checkOverloaded();
             if (status == RehashFailed)
                 return false;
             if (status == Rehashed)
                 p.entry_ = &findFreeEntry(p.keyHash);
         }
 
-        p.entry_->setLive(p.keyHash, rhs);
+        p.entry_->setLive(p.keyHash, mozilla::Forward<U>(u));
         entryCount++;
         mutationCount++;
         return true;
     }
 
+    // Note: |l| may be a reference to a piece of |u|, so this function
+    // must take care not to use |l| after moving |u|.
     template <class U>
-    void putNewInfallible(const Lookup &l, const U &u)
+    void putNewInfallible(const Lookup &l, U &&u)
     {
         JS_ASSERT(table);
 
         HashNumber keyHash = prepareHash(l);
         Entry *entry = &findFreeEntry(keyHash);
 
         if (entry->isRemoved()) {
             METER(stats.addOverRemoved++);
             removedCount--;
             keyHash |= sCollisionBit;
         }
 
-        entry->setLive(keyHash, u);
+        entry->setLive(keyHash, mozilla::Forward<U>(u));
         entryCount++;
         mutationCount++;
     }
 
+    // Note: |l| may be a reference to a piece of |u|, so this function
+    // must take care not to use |l| after moving |u|.
     template <class U>
-    bool putNew(const Lookup &l, const U &u)
+    bool putNew(const Lookup &l, U &&u)
     {
         if (checkOverloaded() == RehashFailed)
             return false;
 
-        putNewInfallible(l, u);
+        putNewInfallible(l, mozilla::Forward<U>(u));
         return true;
     }
 
+    // Note: |l| may be a reference to a piece of |u|, so this function
+    // must take care not to use |l| after moving |u|.
     template <class U>
-    bool relookupOrAdd(AddPtr& p, const Lookup &l, const U &u)
+    bool relookupOrAdd(AddPtr& p, const Lookup &l, U &&u)
     {
         p.mutationCount = mutationCount;
         {
             mozilla::ReentrancyGuard g(*this);
             p.entry_ = &lookup(l, p.keyHash, sCollisionBit);
         }
-        return p.found() || add(p, u);
+        return p.found() || add(p, mozilla::Forward<U>(u));
     }
 
     void remove(Ptr p)
     {
         JS_ASSERT(table);
         mozilla::ReentrancyGuard g(*this);
         JS_ASSERT(p.found());
         remove(*p.entry_);
         checkUnderloaded();
     }
 
     void rekeyWithoutRehash(Ptr p, const Lookup &l, const Key &k)
     {
         JS_ASSERT(table);
         mozilla::ReentrancyGuard g(*this);
         JS_ASSERT(p.found());
-        typename HashTableEntry<T>::NonConstT t(mozilla::OldMove(*p));
+        typename HashTableEntry<T>::NonConstT t(mozilla::Move(*p));
         HashPolicy::setKey(t, const_cast<Key &>(k));
         remove(*p.entry_);
-        putNewInfallible(l, mozilla::OldMove(t));
+        putNewInfallible(l, mozilla::Move(t));
     }
 
     void rekeyAndMaybeRehash(Ptr p, const Lookup &l, const Key &k)
     {
         rekeyWithoutRehash(p, l, k);
         checkOverRemoved();
     }
 
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -268,18 +268,18 @@ struct StringInfo
 //
 // Essentially the only difference between this class and StringInfo is that
 // NotableStringInfo holds a copy of the string's chars.
 struct NotableStringInfo : public StringInfo
 {
     NotableStringInfo();
     NotableStringInfo(JSString *str, const StringInfo &info);
     NotableStringInfo(const NotableStringInfo& info);
-    NotableStringInfo(mozilla::MoveRef<NotableStringInfo> info);
-    NotableStringInfo &operator=(mozilla::MoveRef<NotableStringInfo> info);
+    NotableStringInfo(NotableStringInfo &&info);
+    NotableStringInfo &operator=(NotableStringInfo &&info);
 
     ~NotableStringInfo() {
         js_free(buffer);
     }
 
     // A string needs to take up this many bytes of storage before we consider
     // it to be "notable".
     static size_t notableSize() {
@@ -321,20 +321,20 @@ struct RuntimeSizes
 };
 
 struct ZoneStats : js::ZoneStatsPod
 {
     ZoneStats() {
         strings.init();
     }
 
-    ZoneStats(mozilla::MoveRef<ZoneStats> other)
-        : ZoneStatsPod(other),
-          strings(mozilla::OldMove(other->strings)),
-          notableStrings(mozilla::OldMove(other->notableStrings))
+    ZoneStats(ZoneStats &&other)
+      : ZoneStatsPod(mozilla::Move(other)),
+        strings(mozilla::Move(other.strings)),
+        notableStrings(mozilla::Move(other.notableStrings))
     {}
 
     // Add other's numbers to this object's numbers.  Both objects'
     // notableStrings vectors must be empty at this point, because we can't
     // merge them.  (A NotableStringInfo contains only a prefix of the string,
     // so we can't tell whether two NotableStringInfo objects correspond to the
     // same string.)
     void add(const ZoneStats &other) {
--- a/js/public/Vector.h
+++ b/js/public/Vector.h
@@ -50,17 +50,17 @@ class Vector
                                MinInlineCapacity,
                                AllocPolicy,
                                Vector<T, MinInlineCapacity, AllocPolicy> >
 {
     typedef typename mozilla::VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base;
 
   public:
     Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {}
-    Vector(mozilla::MoveRef<Vector> vec) : Base(vec) {}
-    Vector &operator=(mozilla::MoveRef<Vector> vec) {
-        return Base::operator=(vec);
+    Vector(Vector &&vec) : Base(mozilla::Move(vec)) {}
+    Vector &operator=(Vector &&vec) {
+        return Base::operator=(mozilla::Move(vec));
     }
 };
 
 } // namespace js
 
 #endif /* js_Vector_h */
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -17,19 +17,19 @@
 #include "vm/GlobalObject.h"
 #include "vm/Interpreter.h"
 
 #include "jsobjinlines.h"
 
 using namespace js;
 
 using mozilla::DoubleIsInt32;
+using mozilla::Forward;
 using mozilla::IsNaN;
-using mozilla::OldMove;
-using mozilla::MoveRef;
+using mozilla::Move;
 using mozilla::ArrayLength;
 using JS::DoubleNaNValue;
 
 
 /*** OrderedHashTable ****************************************************************************/
 
 /*
  * Define two collection templates, js::OrderedHashMap and js::OrderedHashSet.
@@ -76,17 +76,17 @@ class OrderedHashTable
     typedef typename Ops::Lookup Lookup;
 
     struct Data
     {
         T element;
         Data *chain;
 
         Data(const T &e, Data *c) : element(e), chain(c) {}
-        Data(MoveRef<T> e, Data *c) : element(e), chain(c) {}
+        Data(T &&e, Data *c) : element(Move(e)), chain(c) {}
     };
 
     class Range;
     friend class Range;
 
   private:
     Data **hashTable;           // hash table (has hashBuckets() elements)
     Data *data;                 // data vector, an array of Data objects
@@ -162,35 +162,36 @@ class OrderedHashTable
     /*
      * If the table already contains an entry that matches |element|,
      * replace that entry with |element|. Otherwise add a new entry.
      *
      * On success, return true, whether there was already a matching element or
      * not. On allocation failure, return false. If this returns false, it
      * means the element was not added to the table.
      */
-    bool put(const T &element) {
+    template <typename ElementInput>
+    bool put(ElementInput &&element) {
         HashNumber h = prepareHash(Ops::getKey(element));
         if (Data *e = lookup(Ops::getKey(element), h)) {
-            e->element = element;
+            e->element = Forward<ElementInput>(element);
             return true;
         }
 
         if (dataLength == dataCapacity) {
             // If the hashTable is more than 1/4 deleted data, simply rehash in
             // place to free up some space. Otherwise, grow the table.
             uint32_t newHashShift = liveCount >= dataCapacity * 0.75 ? hashShift - 1 : hashShift;
             if (!rehash(newHashShift))
                 return false;
         }
 
         h >>= hashShift;
         liveCount++;
         Data *e = &data[dataLength++];
-        new (e) Data(element, hashTable[h]);
+        new (e) Data(Forward<ElementInput>(element), hashTable[h]);
         hashTable[h] = e;
         return true;
     }
 
     /*
      * If the table contains an element matching l, remove it and set *foundp
      * to true. Otherwise set *foundp to false.
      *
@@ -587,17 +588,17 @@ class OrderedHashTable
     void rehashInPlace() {
         for (uint32_t i = 0, N = hashBuckets(); i < N; i++)
             hashTable[i] = nullptr;
         Data *wp = data, *end = data + dataLength;
         for (Data *rp = data; rp != end; rp++) {
             if (!Ops::isEmpty(Ops::getKey(rp->element))) {
                 HashNumber h = prepareHash(Ops::getKey(rp->element)) >> hashShift;
                 if (rp != wp)
-                    wp->element = OldMove(rp->element);
+                    wp->element = Move(rp->element);
                 wp->chain = hashTable[h];
                 hashTable[h] = wp;
                 wp++;
             }
         }
         MOZ_ASSERT(wp == data + liveCount);
 
         while (wp != end)
@@ -634,17 +635,17 @@ class OrderedHashTable
             alloc.free_(newHashTable);
             return false;
         }
 
         Data *wp = newData;
         for (Data *p = data, *end = data + dataLength; p != end; p++) {
             if (!Ops::isEmpty(Ops::getKey(p->element))) {
                 HashNumber h = prepareHash(Ops::getKey(p->element)) >> newHashShift;
-                new (wp) Data(OldMove(p->element), newHashTable[h]);
+                new (wp) Data(Move(p->element), newHashTable[h]);
                 newHashTable[h] = wp;
                 wp++;
             }
         }
         MOZ_ASSERT(wp == newData + liveCount);
 
         alloc.free_(hashTable);
         freeData(data, dataLength);
@@ -674,25 +675,26 @@ class OrderedHashMap
     class Entry
     {
         template <class, class, class> friend class detail::OrderedHashTable;
         void operator=(const Entry &rhs) {
             const_cast<Key &>(key) = rhs.key;
             value = rhs.value;
         }
 
-        void operator=(MoveRef<Entry> rhs) {
-            const_cast<Key &>(key) = OldMove(rhs->key);
-            value = OldMove(rhs->value);
+        void operator=(Entry &&rhs) {
+            MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
+            const_cast<Key &>(key) = Move(rhs.key);
+            value = Move(rhs.value);
         }
 
       public:
         Entry() : key(), value() {}
         Entry(const Key &k, const Value &v) : key(k), value(v) {}
-        Entry(MoveRef<Entry> rhs) : key(OldMove(rhs->key)), value(OldMove(rhs->value)) {}
+        Entry(Entry &&rhs) : key(Move(rhs.key)), value(Move(rhs.value)) {}
 
         const Key key;
         Value value;
     };
 
   private:
     struct MapOps : OrderedHashPolicy
     {
--- a/js/src/jit/AliasAnalysis.cpp
+++ b/js/src/jit/AliasAnalysis.cpp
@@ -166,17 +166,17 @@ AliasAnalysis::analyze()
     Vector<MDefinitionVector, AliasSet::NumCategories, IonAllocPolicy> stores(alloc());
 
     // Initialize to the first instruction.
     MDefinition *firstIns = *graph_.begin()->begin();
     for (unsigned i = 0; i < AliasSet::NumCategories; i++) {
         MDefinitionVector defs(alloc());
         if (!defs.append(firstIns))
             return false;
-        if (!stores.append(OldMove(defs)))
+        if (!stores.append(Move(defs)))
             return false;
     }
 
     // Type analysis may have inserted new instructions. Since this pass depends
     // on the instruction number ordering, all instructions are renumbered.
     // We start with 1 because some passes use 0 to denote failure.
     uint32_t newId = 1;
 
--- a/js/src/jit/AsmJS.cpp
+++ b/js/src/jit/AsmJS.cpp
@@ -42,18 +42,17 @@ using namespace js::jit;
 using mozilla::AddToHash;
 using mozilla::ArrayLength;
 using mozilla::CountLeadingZeroes32;
 using mozilla::DebugOnly;
 using mozilla::HashGeneric;
 using mozilla::IsNaN;
 using mozilla::IsNegativeZero;
 using mozilla::Maybe;
-using mozilla::OldMove;
-using mozilla::MoveRef;
+using mozilla::Move;
 using mozilla::PositiveInfinity;
 using JS::GenericNaN;
 
 static const size_t LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 1 << 12;
 
 /*****************************************************************************/
 // ParseNode utilities
 
@@ -629,34 +628,34 @@ class Signature
     VarTypeVector argTypes_;
     RetType retType_;
 
   public:
     Signature(ExclusiveContext *cx)
       : argTypes_(cx) {}
     Signature(ExclusiveContext *cx, RetType retType)
       : argTypes_(cx), retType_(retType) {}
-    Signature(MoveRef<VarTypeVector> argTypes, RetType retType)
-      : argTypes_(argTypes), retType_(retType) {}
-    Signature(MoveRef<Signature> rhs)
-      : argTypes_(OldMove(rhs->argTypes_)), retType_(rhs->retType_) {}
+    Signature(VarTypeVector &&argTypes, RetType retType)
+      : argTypes_(Move(argTypes)), retType_(Move(retType)) {}
+    Signature(Signature &&rhs)
+      : argTypes_(Move(rhs.argTypes_)), retType_(Move(rhs.retType_)) {}
 
     bool copy(const Signature &rhs) {
         if (!argTypes_.resize(rhs.argTypes_.length()))
             return false;
         for (unsigned i = 0; i < argTypes_.length(); i++)
             argTypes_[i] = rhs.argTypes_[i];
         retType_ = rhs.retType_;
         return true;
     }
 
     bool appendArg(VarType type) { return argTypes_.append(type); }
     VarType arg(unsigned i) const { return argTypes_[i]; }
     const VarTypeVector &args() const { return argTypes_; }
-    MoveRef<VarTypeVector> extractArgs() { return OldMove(argTypes_); }
+    VarTypeVector &&extractArgs() { return Move(argTypes_); }
 
     RetType retType() const { return retType_; }
 };
 
 } /* namespace anonymous */
 
 static
 bool operator==(const Signature &lhs, const Signature &rhs)
@@ -1034,18 +1033,18 @@ class MOZ_STACK_CLASS ModuleCompiler
         PropertyName *name_;
         bool defined_;
         uint32_t srcOffset_;
         Signature sig_;
         Label *code_;
         unsigned compileTime_;
 
       public:
-        Func(PropertyName *name, MoveRef<Signature> sig, Label *code)
-          : name_(name), defined_(false), srcOffset_(0), sig_(sig), code_(code), compileTime_(0)
+        Func(PropertyName *name, Signature &&sig, Label *code)
+          : name_(name), defined_(false), srcOffset_(0), sig_(Move(sig)), code_(code), compileTime_(0)
         {}
 
         PropertyName *name() const { return name_; }
         bool defined() const { return defined_; }
         void define(uint32_t so) { JS_ASSERT(!defined_); defined_ = true; srcOffset_ = so; }
         uint32_t srcOffset() const { JS_ASSERT(defined_); return srcOffset_; }
         Signature &sig() { return sig_; }
         const Signature &sig() const { return sig_; }
@@ -1138,47 +1137,47 @@ class MOZ_STACK_CLASS ModuleCompiler
     class FuncPtrTable
     {
         Signature sig_;
         uint32_t mask_;
         uint32_t globalDataOffset_;
         FuncPtrVector elems_;
 
       public:
-        FuncPtrTable(ExclusiveContext *cx, MoveRef<Signature> sig, uint32_t mask, uint32_t gdo)
-          : sig_(sig), mask_(mask), globalDataOffset_(gdo), elems_(cx)
+        FuncPtrTable(ExclusiveContext *cx, Signature &&sig, uint32_t mask, uint32_t gdo)
+          : sig_(Move(sig)), mask_(mask), globalDataOffset_(gdo), elems_(cx)
         {}
 
-        FuncPtrTable(MoveRef<FuncPtrTable> rhs)
-          : sig_(OldMove(rhs->sig_)), mask_(rhs->mask_), globalDataOffset_(rhs->globalDataOffset_),
-            elems_(OldMove(rhs->elems_))
+        FuncPtrTable(FuncPtrTable &&rhs)
+          : sig_(Move(rhs.sig_)), mask_(rhs.mask_), globalDataOffset_(rhs.globalDataOffset_),
+            elems_(Move(rhs.elems_))
         {}
 
         Signature &sig() { return sig_; }
         const Signature &sig() const { return sig_; }
         unsigned mask() const { return mask_; }
         unsigned globalDataOffset() const { return globalDataOffset_; }
 
-        void initElems(MoveRef<FuncPtrVector> elems) { elems_ = elems; JS_ASSERT(!elems_.empty()); }
+        void initElems(FuncPtrVector &&elems) { elems_ = Move(elems); JS_ASSERT(!elems_.empty()); }
         unsigned numElems() const { JS_ASSERT(!elems_.empty()); return elems_.length(); }
         const Func &elem(unsigned i) const { return *elems_[i]; }
     };
 
     typedef Vector<FuncPtrTable> FuncPtrTableVector;
 
     class ExitDescriptor
     {
         PropertyName *name_;
         Signature sig_;
 
       public:
-        ExitDescriptor(PropertyName *name, MoveRef<Signature> sig)
-          : name_(name), sig_(sig) {}
-        ExitDescriptor(MoveRef<ExitDescriptor> rhs)
-          : name_(rhs->name_), sig_(OldMove(rhs->sig_))
+        ExitDescriptor(PropertyName *name, Signature &&sig)
+          : name_(name), sig_(Move(sig)) {}
+        ExitDescriptor(ExitDescriptor &&rhs)
+          : name_(rhs.name_), sig_(Move(rhs.sig_))
         {}
         const Signature &sig() const {
             return sig_;
         }
 
         // ExitDescriptor is a HashPolicy:
         typedef ExitDescriptor Lookup;
         static HashNumber hash(const ExitDescriptor &d) {
@@ -1463,44 +1462,44 @@ class MOZ_STACK_CLASS ModuleCompiler
         if (!global)
             return false;
         global->u.var.index_ = index;
         global->u.var.type_ = VarType(coercion).which();
         global->u.var.isConst_ = isConst;
         global->u.var.isLitConst_ = false;
         return globals_.putNew(varName, global);
     }
-    bool addFunction(PropertyName *name, MoveRef<Signature> sig, Func **func) {
+    bool addFunction(PropertyName *name, Signature &&sig, Func **func) {
         JS_ASSERT(!finishedFunctionBodies_);
         Global *global = moduleLifo_.new_<Global>(Global::Function);
         if (!global)
             return false;
         global->u.funcIndex_ = functions_.length();
         if (!globals_.putNew(name, global))
             return false;
         Label *code = moduleLifo_.new_<Label>();
         if (!code)
             return false;
-        *func = moduleLifo_.new_<Func>(name, sig, code);
+        *func = moduleLifo_.new_<Func>(name, Move(sig), code);
         if (!*func)
             return false;
         return functions_.append(*func);
     }
-    bool addFuncPtrTable(PropertyName *name, MoveRef<Signature> sig, uint32_t mask, FuncPtrTable **table) {
+    bool addFuncPtrTable(PropertyName *name, Signature &&sig, uint32_t mask, FuncPtrTable **table) {
         Global *global = moduleLifo_.new_<Global>(Global::FuncPtrTable);
         if (!global)
             return false;
         global->u.funcPtrTableIndex_ = funcPtrTables_.length();
         if (!globals_.putNew(name, global))
             return false;
         uint32_t globalDataOffset;
         if (!module_->addFuncPtrTable(/* numElems = */ mask + 1, &globalDataOffset))
             return false;
-        FuncPtrTable tmpTable(cx_, sig, mask, globalDataOffset);
-        if (!funcPtrTables_.append(OldMove(tmpTable)))
+        FuncPtrTable tmpTable(cx_, Move(sig), mask, globalDataOffset);
+        if (!funcPtrTables_.append(Move(tmpTable)))
             return false;
         *table = &funcPtrTables_.back();
         return true;
     }
     bool addFFI(PropertyName *varName, PropertyName *field) {
         Global *global = moduleLifo_.new_<Global>(Global::FFI);
         if (!global)
             return false;
@@ -1541,28 +1540,28 @@ class MOZ_STACK_CLASS ModuleCompiler
         AsmJSModule::ArgCoercionVector argCoercions;
         const VarTypeVector &args = func->sig().args();
         if (!argCoercions.resize(args.length()))
             return false;
         for (unsigned i = 0; i < args.length(); i++)
             argCoercions[i] = args[i].toCoercion();
         AsmJSModule::ReturnType retType = func->sig().retType().toModuleReturnType();
         return module_->addExportedFunction(func->name(), maybeFieldName,
-                                            OldMove(argCoercions), retType);
-    }
-    bool addExit(unsigned ffiIndex, PropertyName *name, MoveRef<Signature> sig, unsigned *exitIndex) {
-        ExitDescriptor exitDescriptor(name, sig);
+                                            Move(argCoercions), retType);
+    }
+    bool addExit(unsigned ffiIndex, PropertyName *name, Signature &&sig, unsigned *exitIndex) {
+        ExitDescriptor exitDescriptor(name, Move(sig));
         ExitMap::AddPtr p = exits_.lookupForAdd(exitDescriptor);
         if (p) {
             *exitIndex = p->value;
             return true;
         }
         if (!module_->addExit(ffiIndex, exitIndex))
             return false;
-        return exits_.add(p, OldMove(exitDescriptor), *exitIndex);
+        return exits_.add(p, Move(exitDescriptor), *exitIndex);
     }
     bool addGlobalAccess(AsmJSGlobalAccess access) {
         return globalAccesses_.append(access);
     }
 
     // Note a constraint on the minimum size of the heap.  The heap size is
     // constrained when linking to be at least the maximum of all such constraints.
     void requireHeapLengthToBeAtLeast(uint32_t len) {
@@ -2690,17 +2689,17 @@ class FunctionCompiler
     template <class Key, class Map>
     bool addBreakOrContinue(Key key, Map *map)
     {
         if (!curBlock_)
             return true;
         typename Map::AddPtr p = map->lookupForAdd(key);
         if (!p) {
             BlockVector empty(m().cx());
-            if (!map->add(p, key, OldMove(empty)))
+            if (!map->add(p, key, Move(empty)))
                 return false;
         }
         if (!p->value.append(curBlock_))
             return false;
         curBlock_ = nullptr;
         return true;
     }
 
@@ -3626,24 +3625,24 @@ CheckSignatureAgainstExisting(ModuleComp
                        sig.retType().toType().toChars(), existing.retType().toType().toChars());
     }
 
     JS_ASSERT(sig == existing);
     return true;
 }
 
 static bool
-CheckFunctionSignature(ModuleCompiler &m, ParseNode *usepn, MoveRef<Signature> sig, PropertyName *name,
+CheckFunctionSignature(ModuleCompiler &m, ParseNode *usepn, Signature &&sig, PropertyName *name,
                        ModuleCompiler::Func **func)
 {
     ModuleCompiler::Func *existing = m.lookupFunction(name);
     if (!existing) {
         if (!CheckModuleLevelName(m, usepn, name))
             return false;
-        return m.addFunction(name, sig, func);
+        return m.addFunction(name, Move(sig), func);
     }
 
     if (!CheckSignatureAgainstExisting(m, usepn, sig, existing->sig()))
         return false;
 
     *func = existing;
     return true;
 }
@@ -3661,29 +3660,29 @@ CheckInternalCall(FunctionCompiler &f, P
                   RetType retType, MDefinition **def, Type *type)
 {
     FunctionCompiler::Call call(f, retType);
 
     if (!CheckCallArgs(f, callNode, CheckIsVarType, &call))
         return false;
 
     ModuleCompiler::Func *callee;
-    if (!CheckFunctionSignature(f.m(), callNode, OldMove(call.sig()), calleeName, &callee))
+    if (!CheckFunctionSignature(f.m(), callNode, Move(call.sig()), calleeName, &callee))
         return false;
 
     if (!f.internalCall(*callee, call, def))
         return false;
 
     *type = retType.toType();
     return true;
 }
 
 static bool
 CheckFuncPtrTableAgainstExisting(ModuleCompiler &m, ParseNode *usepn,
-                                 PropertyName *name, MoveRef<Signature> sig, unsigned mask,
+                                 PropertyName *name, Signature &&sig, unsigned mask,
                                  ModuleCompiler::FuncPtrTable **tableOut)
 {
     if (const ModuleCompiler::Global *existing = m.lookupGlobal(name)) {
         if (existing->which() != ModuleCompiler::Global::FuncPtrTable)
             return m.failName(usepn, "'%s' is not a function-pointer table", name);
 
         ModuleCompiler::FuncPtrTable &table = m.funcPtrTable(existing->funcPtrTableIndex());
         if (mask != table.mask())
@@ -3694,17 +3693,17 @@ CheckFuncPtrTableAgainstExisting(ModuleC
 
         *tableOut = &table;
         return true;
     }
 
     if (!CheckModuleLevelName(m, usepn, name))
         return false;
 
-    return m.addFuncPtrTable(name, sig, mask, tableOut);
+    return m.addFuncPtrTable(name, Move(sig), mask, tableOut);
 }
 
 static bool
 CheckFuncPtrCall(FunctionCompiler &f, ParseNode *callNode, RetType retType, MDefinition **def, Type *type)
 {
     ParseNode *callee = CallCallee(callNode);
     ParseNode *tableNode = ElemBase(callee);
     ParseNode *indexExpr = ElemIndex(callee);
@@ -3737,17 +3736,17 @@ CheckFuncPtrCall(FunctionCompiler &f, Pa
         return f.failf(indexNode, "%s is not a subtype of intish", indexType.toChars());
 
     FunctionCompiler::Call call(f, retType);
 
     if (!CheckCallArgs(f, callNode, CheckIsVarType, &call))
         return false;
 
     ModuleCompiler::FuncPtrTable *table;
-    if (!CheckFuncPtrTableAgainstExisting(f.m(), tableNode, name, OldMove(call.sig()), mask, &table))
+    if (!CheckFuncPtrTableAgainstExisting(f.m(), tableNode, name, Move(call.sig()), mask, &table))
         return false;
 
     if (!f.funcPtrCall(*table, indexDef, call, def))
         return false;
 
     *type = retType.toType();
     return true;
 }
@@ -3766,17 +3765,17 @@ CheckFFICall(FunctionCompiler &f, ParseN
 {
     PropertyName *calleeName = CallCallee(callNode)->name();
 
     FunctionCompiler::Call call(f, retType);
     if (!CheckCallArgs(f, callNode, CheckIsExternType, &call))
         return false;
 
     unsigned exitIndex;
-    if (!f.m().addExit(ffiIndex, calleeName, OldMove(call.sig()), &exitIndex))
+    if (!f.m().addExit(ffiIndex, calleeName, Move(call.sig()), &exitIndex))
         return false;
 
     if (!f.ffiCall(exitIndex, call, retType.toMIRType(), def))
         return false;
 
     *type = retType.toType();
     return true;
 }
@@ -4924,19 +4923,19 @@ CheckFunction(ModuleCompiler &m, LifoAll
 
     RetType retType;
     if (!CheckFinalReturn(f, lastNonEmptyStmt, &retType))
         return false;
 
     if (!CheckReturnType(f, lastNonEmptyStmt, retType))
         return false;
 
-    Signature sig(OldMove(argTypes), retType);
+    Signature sig(Move(argTypes), retType);
     ModuleCompiler::Func *func;
-    if (!CheckFunctionSignature(m, fn, OldMove(sig), FunctionName(fn), &func))
+    if (!CheckFunctionSignature(m, fn, Move(sig), FunctionName(fn), &func))
         return false;
 
     if (func->defined())
         return m.failName(fn, "function '%s' already defined", FunctionName(fn));
 
     func->define(fn->pn_pos.begin);
     func->accumulateCompileTime((PRMJ_Now() - before) / PRMJ_USEC_PER_MSEC);
 
@@ -5346,20 +5345,20 @@ CheckFuncPtrTable(ModuleCompiler &m, Par
             return false;
     }
 
     Signature sig(m.cx());
     if (!sig.copy(*firstSig))
         return false;
 
     ModuleCompiler::FuncPtrTable *table;
-    if (!CheckFuncPtrTableAgainstExisting(m, var, var->name(), OldMove(sig), mask, &table))
-        return false;
-
-    table->initElems(OldMove(elems));
+    if (!CheckFuncPtrTableAgainstExisting(m, var, var->name(), Move(sig), mask, &table))
+        return false;
+
+    table->initElems(Move(elems));
     return true;
 }
 
 static bool
 CheckFuncPtrTables(ModuleCompiler &m)
 {
     while (true) {
         ParseNode *varStmt;
--- a/js/src/jit/AsmJSModule.h
+++ b/js/src/jit/AsmJSModule.h
@@ -4,16 +4,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jit_AsmJSModule_h
 #define jit_AsmJSModule_h
 
 #ifdef JS_ION
 
+#include "mozilla/Move.h"
 #include "mozilla/PodOperations.h"
 
 #include "jsscript.h"
 
 #include "gc/Marking.h"
 #include "jit/AsmJS.h"
 #include "jit/IonMacroAssembler.h"
 #ifdef JS_ION_PERF
@@ -247,40 +248,40 @@ class AsmJSModule
             ReturnType returnType_;
             uint32_t codeOffset_;
         } pod;
 
         friend class AsmJSModule;
 
         ExportedFunction(PropertyName *name,
                          PropertyName *maybeFieldName,
-                         mozilla::MoveRef<ArgCoercionVector> argCoercions,
+                         ArgCoercionVector &&argCoercions,
                          ReturnType returnType)
         {
             name_ = name;
             maybeFieldName_ = maybeFieldName;
-            argCoercions_ = argCoercions;
+            argCoercions_ = mozilla::Move(argCoercions);
             pod.returnType_ = returnType;
             pod.codeOffset_ = UINT32_MAX;
             JS_ASSERT_IF(maybeFieldName_, name_->isTenured());
         }
 
         void trace(JSTracer *trc) {
             MarkStringUnbarriered(trc, &name_, "asm.js export name");
             if (maybeFieldName_)
                 MarkStringUnbarriered(trc, &maybeFieldName_, "asm.js export field");
         }
 
       public:
         ExportedFunction() {}
-        ExportedFunction(mozilla::MoveRef<ExportedFunction> rhs) {
-            name_ = rhs->name_;
-            maybeFieldName_ = rhs->maybeFieldName_;
-            argCoercions_ = mozilla::OldMove(rhs->argCoercions_);
-            pod = rhs->pod;
+        ExportedFunction(ExportedFunction &&rhs) {
+            name_ = rhs.name_;
+            maybeFieldName_ = rhs.maybeFieldName_;
+            argCoercions_ = mozilla::Move(rhs.argCoercions_);
+            pod = rhs.pod;
         }
 
         void initCodeOffset(unsigned off) {
             JS_ASSERT(pod.codeOffset_ == UINT32_MAX);
             pod.codeOffset_ = off;
         }
 
         PropertyName *name() const {
@@ -335,24 +336,24 @@ class AsmJSModule
     struct ProfiledBlocksFunction : public ProfiledFunction
     {
         unsigned endInlineCodeOffset;
         jit::BasicBlocksVector blocks;
 
         ProfiledBlocksFunction(JSAtom *name, unsigned start, unsigned endInline, unsigned end,
                                jit::BasicBlocksVector &blocksVector)
           : ProfiledFunction(name, start, end), endInlineCodeOffset(endInline),
-            blocks(mozilla::OldMove(blocksVector))
+            blocks(mozilla::Move(blocksVector))
         {
             JS_ASSERT(name->isTenured());
         }
 
         ProfiledBlocksFunction(const ProfiledBlocksFunction &copy)
           : ProfiledFunction(copy.name, copy.startCodeOffset, copy.endCodeOffset),
-            endInlineCodeOffset(copy.endInlineCodeOffset), blocks(mozilla::OldMove(copy.blocks))
+            endInlineCodeOffset(copy.endInlineCodeOffset), blocks(mozilla::Move(copy.blocks))
         { }
     };
 #endif
 
   private:
     typedef Vector<ExportedFunction, 0, SystemAllocPolicy> ExportedFunctionVector;
     typedef Vector<Global, 0, SystemAllocPolicy> GlobalVector;
     typedef Vector<Exit, 0, SystemAllocPolicy> ExitVector;
@@ -512,21 +513,21 @@ class AsmJSModule
         *exitIndex = unsigned(exits_.length());
         return exits_.append(Exit(ffiIndex, globalDataOffset));
     }
     bool addFunctionCounts(jit::IonScriptCounts *counts) {
         return functionCounts_.append(counts);
     }
 
     bool addExportedFunction(PropertyName *name, PropertyName *maybeFieldName,
-                             mozilla::MoveRef<ArgCoercionVector> argCoercions,
+                             ArgCoercionVector &&argCoercions,
                              ReturnType returnType)
     {
-        ExportedFunction func(name, maybeFieldName, argCoercions, returnType);
-        return exports_.append(mozilla::OldMove(func));
+        ExportedFunction func(name, maybeFieldName, mozilla::Move(argCoercions), returnType);
+        return exports_.append(mozilla::Move(func));
     }
     unsigned numExportedFunctions() const {
         return exports_.length();
     }
     const ExportedFunction &exportedFunction(unsigned i) const {
         return exports_[i];
     }
     ExportedFunction &exportedFunction(unsigned i) {
--- a/js/src/shell/jsheaptools.cpp
+++ b/js/src/shell/jsheaptools.cpp
@@ -16,18 +16,17 @@
 #include "jscompartment.h"
 #include "jsobj.h"
 #include "jsprf.h"
 
 #include "jsobjinlines.h"
 
 using namespace js;
 
-using mozilla::OldMove;
-using mozilla::MoveRef;
+using mozilla::Move;
 
 #ifdef DEBUG
 
 
 /*** class HeapReverser **************************************************************************/
 
 /*
  * A class for constructing a map of the JavaScript heap, with all
@@ -69,21 +68,22 @@ class HeapReverser : public JSTracer, pu
         Node(JSGCTraceKind kind)
           : kind(kind), incoming(), marked(false) { }
 
         /*
          * Move constructor and move assignment. These allow us to store our
          * incoming edge Vector in the hash table: Vectors support moves, but
          * not assignments or copy construction.
          */
-        Node(MoveRef<Node> rhs)
-          : kind(rhs->kind), incoming(OldMove(rhs->incoming)), marked(rhs->marked) { }
-        Node &operator=(MoveRef<Node> rhs) {
+        Node(Node &&rhs)
+          : kind(rhs.kind), incoming(Move(rhs.incoming)), marked(rhs.marked) { }
+        Node &operator=(Node &&rhs) {
+            MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
             this->~Node();
-            new(this) Node(rhs);
+            new(this) Node(Move(rhs));
             return *this;
         }
 
         void trace(JSTracer *trc) {
             for (Edge *e = incoming.begin(); e != incoming.end(); e++)
                 e->trace(trc);
         }
 
@@ -111,22 +111,23 @@ class HeapReverser : public JSTracer, pu
         Edge(char *name, void *origin) : name(name), origin(origin) { }
         ~Edge() { js_free(name); }
 
         /*
          * Move constructor and move assignment. These allow us to live in
          * Vectors without needing to copy our name string when the vector is
          * resized.
          */
-        Edge(MoveRef<Edge> rhs) : name(rhs->name), origin(rhs->origin) {
-            rhs->name = nullptr;
+        Edge(Edge &&rhs) : name(rhs.name), origin(rhs.origin) {
+            rhs.name = nullptr;
         }
-        Edge &operator=(MoveRef<Edge> rhs) {
+        Edge &operator=(Edge &&rhs) {
+            MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
             this->~Edge();
-            new(this) Edge(rhs);
+            new(this) Edge(Move(rhs));
             return *this;
         }
 
         void trace(JSTracer *trc) {
             if (origin)
                 gc::MarkGCThingRoot(trc, &origin, "HeapReverser::Edge");
         }
 
@@ -266,26 +267,26 @@ HeapReverser::traverseEdge(void *cell, J
     if (!a) {
         /*
          * We've never visited this cell before. Add it to the map (thus
          * marking it as visited), and put it on the work stack, to be
          * visited from the main loop.
          */
         Node n(kind);
         uint32_t generation = map.generation();
-        if (!map.add(a, cell, OldMove(n)) ||
+        if (!map.add(a, cell, Move(n)) ||
             !work.append(Child(cell, kind)))
             return false;
         /* If the map has been resized, re-check the pointer. */
         if (map.generation() != generation)
             a = map.lookupForAdd(cell);
     }
 
     /* Add this edge to the reversed map. */
-    return a->value.incoming.append(OldMove(e));
+    return a->value.incoming.append(Move(e));
 }
 
 bool
 HeapReverser::reverseHeap()
 {
     traversalStatus = true;
 
     /* Prime the work stack with the roots of collection. */
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -19,18 +19,17 @@
 #include "vm/ArrayObject.h"
 #include "vm/Runtime.h"
 #include "vm/Shape.h"
 #include "vm/String.h"
 #include "vm/WrapperObject.h"
 
 using mozilla::DebugOnly;
 using mozilla::MallocSizeOf;
-using mozilla::MoveRef;
-using mozilla::OldMove;
+using mozilla::Move;
 using mozilla::PodEqual;
 
 using namespace js;
 
 using JS::RuntimeStats;
 using JS::ObjectPrivateVisitor;
 using JS::ZoneStats;
 using JS::CompartmentStats;
@@ -90,22 +89,22 @@ InefficientNonFlatteningStringHashPolicy
     return PodEqual(c1, c2, k->length());
 }
 
 } // namespace js
 
 namespace JS {
 
 NotableStringInfo::NotableStringInfo()
-    : bufferSize(0),
-      buffer(0)
+  : bufferSize(0),
+    buffer(0)
 {}
 
 NotableStringInfo::NotableStringInfo(JSString *str, const StringInfo &info)
-    : StringInfo(info)
+  : StringInfo(info)
 {
     bufferSize = Min(str->length() + 1, size_t(4096));
     buffer = js_pod_malloc<char>(bufferSize);
     if (!buffer) {
         MOZ_CRASH("oom");
     }
 
     const jschar* chars;
@@ -120,37 +119,38 @@ NotableStringInfo::NotableStringInfo(JSS
 
     // We might truncate |str| even if it's much shorter than 4096 chars, if
     // |str| contains unicode chars.  Since this is just for a memory reporter,
     // we don't care.
     PutEscapedString(buffer, bufferSize, chars, str->length(), /* quote */ 0);
 }
 
 NotableStringInfo::NotableStringInfo(const NotableStringInfo& info)
-    : StringInfo(info),
-      bufferSize(info.bufferSize)
+  : StringInfo(info),
+    bufferSize(info.bufferSize)
 {
     buffer = js_pod_malloc<char>(bufferSize);
     if (!buffer)
         MOZ_CRASH("oom");
 
     strcpy(buffer, info.buffer);
 }
 
-NotableStringInfo::NotableStringInfo(MoveRef<NotableStringInfo> info)
-    : StringInfo(info)
+NotableStringInfo::NotableStringInfo(NotableStringInfo &&info)
+  : StringInfo(Move(info))
 {
-    buffer = info->buffer;
-    info->buffer = nullptr;
+    buffer = info.buffer;
+    info.buffer = nullptr;
 }
 
-NotableStringInfo &NotableStringInfo::operator=(MoveRef<NotableStringInfo> info)
+NotableStringInfo &NotableStringInfo::operator=(NotableStringInfo &&info)
 {
+    MOZ_ASSERT(this != &info, "self-move assignment is prohibited");
     this->~NotableStringInfo();
-    new (this) NotableStringInfo(info);
+    new (this) NotableStringInfo(Move(info));
     return *this;
 }
 
 } // namespace JS
 
 typedef HashSet<ScriptSource *, DefaultHasher<ScriptSource *>, SystemAllocPolicy> SourceSet;
 
 struct StatsClosure
@@ -409,17 +409,17 @@ FindNotableStrings(ZoneStats &zStats)
         StringInfo &info = r.front().value;
 
         // If this string is too small, or if we can't grow the notableStrings
         // vector, skip this string.
         if (info.totalSizeOf() < NotableStringInfo::notableSize() ||
             !zStats.notableStrings.growBy(1))
             continue;
 
-        zStats.notableStrings.back() = OldMove(NotableStringInfo(str, info));
+        zStats.notableStrings.back() = NotableStringInfo(str, info);
 
         // We're moving this string from a non-notable to a notable bucket, so
         // subtract it out of the non-notable tallies.
         MOZ_ASSERT(zStats.stringsShortGCHeap >= info.shortGCHeap);
         MOZ_ASSERT(zStats.stringsNormalGCHeap >= info.normalGCHeap);
         MOZ_ASSERT(zStats.stringsNormalMallocHeap >= info.normalMallocHeap);
         zStats.stringsShortGCHeap -= info.shortGCHeap;
         zStats.stringsNormalGCHeap -= info.normalGCHeap;
--- a/mfbt/Move.h
+++ b/mfbt/Move.h
@@ -11,24 +11,16 @@
 
 #include "mozilla/TypeTraits.h"
 
 namespace mozilla {
 
 /*
  * "Move" References
  *
- * [Once upon a time, C++11 rvalue references were not implemented by all the
- * compilers we cared about, so we invented mozilla::Move() (now called
- * OldMove()), which does something similar.  We're in the process of
- * transitioning away from this to pure stl (bug 896100).  Until that bug is
- * completed, this header will provide both mozilla::OldMove() and
- * mozilla::Move().]
- *
- *
  * Some types can be copied much more efficiently if we know the original's
  * value need not be preserved --- that is, if we are doing a "move", not a
  * "copy". For example, if we have:
  *
  *   Vector<T> u;
  *   Vector<T> v(u);
  *
  * the constructor for v must apply a copy constructor to each element of u ---
@@ -38,161 +30,176 @@ namespace mozilla {
  * to v --- a constant-time operation, regardless of the size of u.
  *
  * Moves often appear in container implementations. For example, when we append
  * to a vector, we may need to resize its buffer. This entails moving each of
  * its extant elements from the old, smaller buffer to the new, larger buffer.
  * But once the elements have been migrated, we're just going to throw away the
  * old buffer; we don't care if they still have their values. So if the vector's
  * element type can implement "move" more efficiently than "copy", the vector
- * resizing should by all means use a "move" operation. Hash tables also need to
- * be resized.
+ * resizing should by all means use a "move" operation. Hash tables should also
+ * use moves when resizing their internal array as entries are added and
+ * removed.
  *
- * The details of the optimization, and whether it's worth applying, vary from
- * one type to the next. And while some constructor calls are moves, many really
- * are copies, and can't be optimized this way. So we need:
+ * The details of the optimization, and whether it's worth applying, vary
+ * from one type to the next: copying an 'int' is as cheap as moving it, so
+ * there's no benefit in distinguishing 'int' moves from copies. And while
+ * some constructor calls for complex types are moves, many really have to
+ * be copies, and can't be optimized this way. So we need:
  *
- * 1) a way for a particular invocation of a copy constructor to say that it's
- *    really a move, and that the value of the original isn't important
- *    afterwards (although it must still be safe to destroy); and
- *
- * 2) a way for a type (like Vector) to announce that it can be moved more
+ * 1) a way for a type (like Vector) to announce that it can be moved more
  *    efficiently than it can be copied, and provide an implementation of that
- *    move operation.
+ *    move operation; and
+ *
+ * 2) a way for a particular invocation of a copy constructor to say that it's
+ *    really a move, not a copy, and that the value of the original isn't
+ *    important afterwards (although it must still be safe to destroy).
  *
- * The OldMove(T&) function takes a reference to a T, and returns a MoveRef<T>
- * referring to the same value; that's (1). A MoveRef<T> is simply a reference
- * to a T, annotated to say that a copy constructor applied to it may move that
- * T, instead of copying it. Finally, a constructor that accepts an MoveRef<T>
- * should perform a more efficient move, instead of a copy, providing (2).
+ * If a constructor has a single argument of type 'T&&' (an 'rvalue reference
+ * to T'), that indicates that it is a 'move constructor'. That's 1). It should
+ * move, not copy, its argument into the object being constructed. It may leave
+ * the original in any safely-destructible state.
  *
- * The Move(T&) function takes a reference to a T and returns a T&&.  It acts
- * just like std::move(), which is not available on all our platforms.
- *
- * In new code, you should use Move(T&) and T&& instead of OldMove(T&) and
- * MoveRef<T>, where possible.
+ * If a constructor's argument is an rvalue, as in 'C(f(x))' or 'C(x + y)', as
+ * opposed to an lvalue, as in 'C(x)', then overload resolution will prefer the
+ * move constructor, if there is one. The 'mozilla::Move' function, defined in
+ * this file, is an identity function you can use in a constructor invocation to
+ * make any argument into an rvalue, like this: C(Move(x)). That's 2). (You
+ * could use any function that works, but 'Move' indicates your intention
+ * clearly.)
  *
  * Where we might define a copy constructor for a class C like this:
  *
  *   C(const C& rhs) { ... copy rhs to this ... }
  *
  * we would declare a move constructor like this:
  *
  *   C(C&& rhs) { .. move rhs to this ... }
  *
- * or, in the deprecated OldMove style:
- *
- *   C(MoveRef<C> rhs) { ... move rhs to this ... }
- *
  * And where we might perform a copy like this:
  *
  *   C c2(c1);
  *
  * we would perform a move like this:
  *
  *   C c2(Move(c1));
  *
- * or, in the deprecated OldMove style:
- *
- *   C c2(OldMove(c1));
+ * Note that 'T&&' implicitly converts to 'T&'. So you can pass a 'T&&' to an
+ * ordinary copy constructor for a type that doesn't support a special move
+ * constructor, and you'll just get a copy. This means that templates can use
+ * Move whenever they know they won't use the original value any more, even if
+ * they're not sure whether the type at hand has a specialized move constructor.
+ * If it doesn't, the 'T&&' will just convert to a 'T&', and the ordinary copy
+ * constructor will apply.
  *
- * Note that MoveRef<T> implicitly converts to T&, so you can pass a MoveRef<T>
- * to an ordinary copy constructor for a type that doesn't support a special
- * move constructor, and you'll just get a copy.  This means that templates can
- * use Move whenever they know they won't use the original value any more, even
- * if they're not sure whether the type at hand has a specialized move
- * constructor.  If it doesn't, the MoveRef<T> will just convert to a T&, and
- * the ordinary copy constructor will apply.
+ * A class with a move constructor can also provide a move assignment operator.
+ * A generic definition would run this's destructor, and then apply the move
+ * constructor to *this's memory. A typical definition:
  *
- * A class with a move constructor can also provide a move assignment operator,
- * which runs this's destructor, and then applies the move constructor to
- * *this's memory. A typical definition:
- *
- *   C& operator=(C&& rhs) {  // or |MoveRef<C> rhs|
+ *   C& operator=(C&& rhs) {
+ *     MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
  *     this->~C();
- *     new(this) C(rhs);
+ *     new(this) C(Move(rhs));
  *     return *this;
  *   }
  *
  * With that in place, one can write move assignments like this:
  *
- *   c2 = Move(c1); // or OldMove()
+ *   c2 = Move(c1);
  *
  * This destroys c1, moves c1's value to c2, and leaves c1 in an undefined but
  * destructible state.
  *
- * This header file defines MoveRef, Move, and OldMove in the mozilla namespace.
- * It's up to individual containers to annotate moves as such, by calling Move
- * or OldMove; and it's up to individual types to define move constructors.
+ * As we say, a move must leave the original in a "destructible" state. The
+ * original's destructor will still be called, so if a move doesn't
+ * actually steal all its resources, that's fine. We require only that the
+ * move destination must take on the original's value; and that destructing
+ * the original must not break the move destination.
+ *
+ * (Opinions differ on whether move assignment operators should deal with move
+ * assignment of an object onto itself. It seems wise to either handle that
+ * case, or assert that it does not occur.)
+ *
+ * Forwarding:
  *
- * One hint: if you're writing a move constructor where the type has members
- * that should be moved themselves, it's much nicer to write this:
+ * Sometimes we want copy construction or assignment if we're passed an ordinary
+ * value, but move construction if passed an rvalue reference. For example, if
+ * our constructor takes two arguments and either could usefully be a move, it
+ * seems silly to write out all four combinations:
+ *
+ *   C::C(X&  x, Y&  y) : x(x),       y(y)       { }
+ *   C::C(X&  x, Y&& y) : x(x),       y(Move(y)) { }
+ *   C::C(X&& x, Y&  y) : x(Move(x)), y(y)       { }
+ *   C::C(X&& x, Y&& y) : x(Move(x)), y(Move(y)) { }
+ *
+ * To avoid this, C++11 has tweaks to make it possible to write what you mean.
+ * The four constructor overloads above can be written as one constructor
+ * template like so:
+ *
+ *   template <typename XArg, typename YArg>
+ *   C::C(XArg&& x, YArg&& y) : x(Forward<XArg>(x)), y(Forward<YArg>(y)) { }
+ *
+ * ("'Don't Repeat Yourself'? What's that?")
+ *
+ * This takes advantage of two new rules in C++11:
  *
- *   C(MoveRef<C> c) : x(Move(c->x)), y(Move(c->y)) { }
+ * - First, when a function template takes an argument that is an rvalue
+ *   reference to a template argument (like 'XArg&& x' and 'YArg&& y' above),
+ *   then when the argument is applied to an lvalue, the template argument
+ *   resolves to 'T &'; and when it is applied to an rvalue, the template
+ *   argument resolves to 'T &&'. Thus, in a call to C::C like:
+ *
+ *      X foo(int);
+ *      Y yy;
  *
- * than the equivalent:
+ *      C(foo(5), yy)
+ *
+ *   XArg would resolve to 'X&&', and YArg would resolve to 'Y&'.
+ *
+ * - Second, Whereas C++ used to forbid references to references, C++11 defines
+ *   'collapsing rules': 'T& &', 'T&& &', and 'T& &&' (that is, any combination
+ *   involving an lvalue reference) now collapse to simply 'T&'; and 'T&& &&'
+ *   collapses to 'T&&'.
  *
- *   C(MoveRef<C> c) { new(&x) X(Move(c->x)); new(&y) Y(Move(c->y)); }
+ *   Thus, in the call above, 'XArg&&' is 'X&& &&', collapsing to 'X&&'; and
+ *   'YArg&&' is 'Y& &&', which collapses to 'Y &'. Because the arguments are
+ *   declared as rvalue references to template arguments, the rvalue-ness
+ *   "shines through" where present.
  *
- * especially since GNU C++ fails to notice that this does indeed initialize x
- * and y, which may matter if they're const.
+ * Then, the 'Forward<T>' function --- you must invoke 'Forward' with its type
+ * argument --- returns an lvalue reference or an rvalue reference to its
+ * argument, depending on what T is. In our unified constructor definition, that
+ * means that we'll invoke either the copy or move constructors for x and y,
+ * depending on what we gave C's constructor. In our call, we'll move 'foo()'
+ * into 'x', but copy 'yy' into 'y'.
+ *
+ * This header file defines Move and Forward in the mozilla namespace. It's up
+ * to individual containers to annotate moves as such, by calling Move; and it's
+ * up to individual types to define move constructors and assignment operators
+ * when valuable.
+ *
+ * (C++11 says that the <utility> header file should define 'std::move' and
+ * 'std::forward', which are just like our 'Move' and 'Forward'; but those
+ * definitions aren't available in that header on all our platforms, so we
+ * define them ourselves here.)
  */
-template<typename T>
-class MoveRef
-{
-    T* pointer;
-
-  public:
-    explicit MoveRef(T& t) : pointer(&t) { }
-    T& operator*() const { return *pointer; }
-    T* operator->() const { return pointer; }
-    operator T& () const { return *pointer; }
-};
-
-template<typename T>
-inline MoveRef<T>
-OldMove(T& t)
-{
-  return MoveRef<T>(t);
-}
-
-template<typename T>
-inline MoveRef<T>
-OldMove(const T& t)
-{
-  // With some versions of gcc, for a class C, there's an (incorrect) ambiguity
-  // between the C(const C&) constructor and the default C(C&&) C++11 move
-  // constructor, when the constructor is called with a const C& argument.
-  //
-  // This ambiguity manifests with the Move implementation above when Move is
-  // passed const U& for some class U.  Calling Move(const U&) returns a
-  // MoveRef<const U&>, which is then commonly passed to the U constructor,
-  // triggering an implicit conversion to const U&.  gcc doesn't know whether to
-  // call U(const U&) or U(U&&), so it wrongly reports a compile error.
-  //
-  // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=50442 has since been fixed, so
-  // this is no longer an issue for up-to-date compilers.  But there's no harm
-  // in keeping it around for older compilers, so we might as well.  See also
-  // bug 686280.
-  return MoveRef<T>(const_cast<T&>(t));
-}
 
 /**
  * Identical to std::Move(); this is necessary until our stlport supports
  * std::move().
  */
 template<typename T>
 inline typename RemoveReference<T>::Type&&
 Move(T&& a)
 {
   return static_cast<typename RemoveReference<T>::Type&&>(a);
 }
 
 /**
- * These two overloads are identidal to std::Forward(); they are necessary until
+ * These two overloads are identical to std::forward(); they are necessary until
  * our stlport supports std::forward().
  */
 template<typename T>
 inline T&&
 Forward(typename RemoveReference<T>::Type& a)
 {
   return static_cast<T&&>(a);
 }
@@ -206,16 +213,16 @@ Forward(typename RemoveReference<T>::Typ
   return static_cast<T&&>(t);
 }
 
 /** Swap |t| and |u| using move-construction if possible. */
 template<typename T>
 inline void
 Swap(T& t, T& u)
 {
-  T tmp(OldMove(t));
-  t = OldMove(u);
-  u = OldMove(tmp);
+  T tmp(Move(t));
+  t = Move(u);
+  u = Move(tmp);
 }
 
 } // namespace mozilla
 
 #endif /* mozilla_Move_h */
--- a/mfbt/Vector.h
+++ b/mfbt/Vector.h
@@ -79,19 +79,19 @@ struct VectorImpl
         new(dst) T(*p);
     }
 
     /*
      * Move-constructs objects in the uninitialized range
      * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend).
      */
     template<typename U>
-    static inline void moveConstruct(T* dst, const U* srcbeg, const U* srcend) {
-      for (const U* p = srcbeg; p < srcend; ++p, ++dst)
-        new(dst) T(OldMove(*p));
+    static inline void moveConstruct(T* dst, U* srcbeg, U* srcend) {
+      for (U* p = srcbeg; p < srcend; ++p, ++dst)
+        new(dst) T(Move(*p));
     }
 
     /*
      * Copy-constructs objects in the uninitialized range [dst, dst+n) from the
      * same object u.
      */
     template<typename U>
     static inline void copyConstructN(T* dst, size_t n, const U& u) {
@@ -110,17 +110,17 @@ struct VectorImpl
       MOZ_ASSERT(!v.usingInlineStorage());
       MOZ_ASSERT(!CapacityHasExcessSpace<T>(newCap));
       T* newbuf = reinterpret_cast<T*>(v.malloc_(newCap * sizeof(T)));
       if (!newbuf)
         return false;
       T* dst = newbuf;
       T* src = v.beginNoCheck();
       for (; src < v.endNoCheck(); ++dst, ++src)
-        new(dst) T(OldMove(*src));
+        new(dst) T(Move(*src));
       VectorImpl::destroy(v.beginNoCheck(), v.endNoCheck());
       v.free_(v.mBegin);
       v.mBegin = newbuf;
       /* v.mLength is unchanged. */
       v.mCapacity = newCap;
       return true;
     }
 };
@@ -297,30 +297,30 @@ class VectorBase : private AllocPolicy
     size_t reserved() const {
       MOZ_ASSERT(mReserved <= mCapacity);
       MOZ_ASSERT(mLength <= mReserved);
       return mReserved;
     }
 #endif
 
     /* Append operations guaranteed to succeed due to pre-reserved space. */
-    template<typename U> void internalAppend(const U& u);
+    template<typename U> void internalAppend(U&& u);
     template<typename U, size_t O, class BP, class UV>
     void internalAppendAll(const VectorBase<U, O, BP, UV>& u);
     void internalAppendN(const T& t, size_t n);
     template<typename U> void internalAppend(const U* begin, size_t length);
 
   public:
     static const size_t sMaxInlineStorage = N;
 
     typedef T ElementType;
 
     VectorBase(AllocPolicy = AllocPolicy());
-    VectorBase(MoveRef<ThisVector>); /* Move constructor. */
-    ThisVector& operator=(MoveRef<ThisVector>); /* Move assignment. */
+    VectorBase(ThisVector&&); /* Move constructor. */
+    ThisVector& operator=(ThisVector&&); /* Move assignment. */
     ~VectorBase();
 
     /* accessors */
 
     const AllocPolicy& allocPolicy() const {
       return *this;
     }
 
@@ -448,38 +448,38 @@ class VectorBase : private AllocPolicy
     /**
      * If true, appending |needed| elements won't reallocate elements storage.
      * This *doesn't* mean that infallibleAppend may be used!  You still must
      * reserve the extra space, even if this method indicates that appends won't
      * need to reallocate elements storage.
      */
     bool canAppendWithoutRealloc(size_t needed) const;
 
+    /** Potentially fallible append operations. */
+
     /**
-     * Potentially fallible append operations.
-     *
-     * The function templates that take an unspecified type U require a const T&
-     * or a MoveRef<T>.  The MoveRef<T> variants move their operands into the
-     * vector, instead of copying them.  If they fail, the operand is left
-     * unmoved.
+     * This can take either a T& or a T&&. Given a T&&, it moves |u| into the
+     * vector, instead of copying it. If it fails, |u| is left unmoved. ("We are
+     * not amused.")
      */
-    template<typename U> bool append(const U& u);
+    template<typename U> bool append(U&& u);
+
     template<typename U, size_t O, class BP, class UV>
     bool appendAll(const VectorBase<U, O, BP, UV>& u);
     bool appendN(const T& t, size_t n);
     template<typename U> bool append(const U* begin, const U* end);
     template<typename U> bool append(const U* begin, size_t length);
 
     /*
      * Guaranteed-infallible append operations for use upon vectors whose
      * memory has been pre-reserved.  Don't use this if you haven't reserved the
      * memory!
      */
-    template<typename U> void infallibleAppend(const U& u) {
-      internalAppend(u);
+    template<typename U> void infallibleAppend(U&& u) {
+      internalAppend(Forward<U>(u));
     }
     void infallibleAppendN(const T& t, size_t n) {
       internalAppendN(t, n);
     }
     template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd) {
       internalAppend(aBegin, PointerRangeSize(aBegin, aEnd));
     }
     template<typename U> void infallibleAppend(const U* aBegin, size_t aLength) {
@@ -521,17 +521,18 @@ class VectorBase : private AllocPolicy
      * Example usage:
      *
      *   if (!(p = vec.insert(p, val)))
      *     <handle failure>
      *   <keep working with p>
      *
      * This is inherently a linear-time operation.  Be careful!
      */
-    T* insert(T* p, const T& val);
+    template<typename U>
+    T* insert(T* p, U&& val);
 
     /**
      * Removes the element |t|, which must fall in the bounds [begin, end),
      * shifting existing elements from |t + 1| onward one position lower.
      */
     void erase(T* t);
 
     /**
@@ -545,16 +546,20 @@ class VectorBase : private AllocPolicy
      */
     size_t sizeOfIncludingThis(MallocSizeOf mallocSizeOf) const;
 
     void swap(ThisVector& other);
 
   private:
     VectorBase(const VectorBase&) MOZ_DELETE;
     void operator=(const VectorBase&) MOZ_DELETE;
+
+    /* Move-construct/assign only from our derived class, ThisVector. */
+    VectorBase(VectorBase&&) MOZ_DELETE;
+    void operator=(VectorBase&&) MOZ_DELETE;
 };
 
 /* This does the re-entrancy check plus several other sanity checks. */
 #define MOZ_REENTRANCY_GUARD_ET_AL \
   ReentrancyGuard g(*this); \
   MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == sInlineCapacity); \
   MOZ_ASSERT(reserved() <= mCapacity); \
   MOZ_ASSERT(mLength <= reserved()); \
@@ -574,60 +579,61 @@ VectorBase<T, N, AP, TV>::VectorBase(AP 
 #endif
 {
   mBegin = static_cast<T*>(storage.addr());
 }
 
 /* Move constructor. */
 template<typename T, size_t N, class AllocPolicy, class TV>
 MOZ_ALWAYS_INLINE
-VectorBase<T, N, AllocPolicy, TV>::VectorBase(MoveRef<TV> rhs)
-  : AllocPolicy(rhs)
+VectorBase<T, N, AllocPolicy, TV>::VectorBase(TV&& rhs)
+  : AllocPolicy(Move(rhs))
 #ifdef DEBUG
     , entered(false)
 #endif
 {
-  mLength = rhs->mLength;
-  mCapacity = rhs->mCapacity;
+  mLength = rhs.mLength;
+  mCapacity = rhs.mCapacity;
 #ifdef DEBUG
-  mReserved = rhs->mReserved;
+  mReserved = rhs.mReserved;
 #endif
 
-  if (rhs->usingInlineStorage()) {
+  if (rhs.usingInlineStorage()) {
     /* We can't move the buffer over in this case, so copy elements. */
     mBegin = static_cast<T*>(storage.addr());
-    Impl::moveConstruct(mBegin, rhs->beginNoCheck(), rhs->endNoCheck());
+    Impl::moveConstruct(mBegin, rhs.beginNoCheck(), rhs.endNoCheck());
     /*
      * Leave rhs's mLength, mBegin, mCapacity, and mReserved as they are.
      * The elements in its in-line storage still need to be destroyed.
      */
   } else {
     /*
      * Take src's buffer, and turn src into an empty vector using
      * in-line storage.
      */
-    mBegin = rhs->mBegin;
-    rhs->mBegin = static_cast<T*>(rhs->storage.addr());
-    rhs->mCapacity = sInlineCapacity;
-    rhs->mLength = 0;
+    mBegin = rhs.mBegin;
+    rhs.mBegin = static_cast<T*>(rhs.storage.addr());
+    rhs.mCapacity = sInlineCapacity;
+    rhs.mLength = 0;
 #ifdef DEBUG
-    rhs->mReserved = sInlineCapacity;
+    rhs.mReserved = sInlineCapacity;
 #endif
   }
 }
 
 /* Move assignment. */
 template<typename T, size_t N, class AP, class TV>
 MOZ_ALWAYS_INLINE
 TV&
-VectorBase<T, N, AP, TV>::operator=(MoveRef<TV> rhs)
+VectorBase<T, N, AP, TV>::operator=(TV&& rhs)
 {
+  MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
   TV* tv = static_cast<TV*>(this);
   tv->~TV();
-  new(tv) TV(rhs);
+  new(tv) TV(Move(rhs));
   return *tv;
 }
 
 template<typename T, size_t N, class AP, class TV>
 MOZ_ALWAYS_INLINE
 VectorBase<T, N, AP, TV>::~VectorBase()
 {
   MOZ_REENTRANCY_GUARD_ET_AL;
@@ -890,21 +896,21 @@ MOZ_ALWAYS_INLINE void
 VectorBase<T, N, AP, TV>::internalAppendAll(const VectorBase<U, O, BP, UV>& other)
 {
   internalAppend(other.begin(), other.length());
 }
 
 template<typename T, size_t N, class AP, class TV>
 template<typename U>
 MOZ_ALWAYS_INLINE void
-VectorBase<T, N, AP, TV>::internalAppend(const U& u)
+VectorBase<T, N, AP, TV>::internalAppend(U&& u)
 {
   MOZ_ASSERT(mLength + 1 <= mReserved);
   MOZ_ASSERT(mReserved <= mCapacity);
-  new(endNoCheck()) T(u);
+  new(endNoCheck()) T(Forward<U>(u));
   ++mLength;
 }
 
 template<typename T, size_t N, class AP, class TV>
 MOZ_ALWAYS_INLINE bool
 VectorBase<T, N, AP, TV>::appendN(const T& t, size_t needed)
 {
   MOZ_REENTRANCY_GUARD_ET_AL;
@@ -925,34 +931,35 @@ VectorBase<T, N, AP, TV>::internalAppend
 {
   MOZ_ASSERT(mLength + needed <= mReserved);
   MOZ_ASSERT(mReserved <= mCapacity);
   Impl::copyConstructN(endNoCheck(), needed, t);
   mLength += needed;
 }
 
 template<typename T, size_t N, class AP, class TV>
+template<typename U>
 inline T*
-VectorBase<T, N, AP, TV>::insert(T* p, const T& val)
+VectorBase<T, N, AP, TV>::insert(T* p, U&& val)
 {
   MOZ_ASSERT(begin() <= p);
   MOZ_ASSERT(p <= end());
   size_t pos = p - begin();
   MOZ_ASSERT(pos <= mLength);
   size_t oldLength = mLength;
   if (pos == oldLength) {
-    if (!append(val))
+    if (!append(Forward<U>(val)))
       return nullptr;
   } else {
-    T oldBack = back();
-    if (!append(oldBack)) /* Dup the last element. */
+    T oldBack = Move(back());
+    if (!append(Move(oldBack))) /* Dup the last element. */
       return nullptr;
     for (size_t i = oldLength; i > pos; --i)
-      (*this)[i] = (*this)[i - 1];
-    (*this)[pos] = val;
+      (*this)[i] = Move((*this)[i - 1]);
+    (*this)[pos] = Forward<U>(val);
   }
   return begin() + pos;
 }
 
 template<typename T, size_t N, class AP, class TV>
 inline void
 VectorBase<T, N, AP, TV>::erase(T* it)
 {
@@ -992,27 +999,27 @@ VectorBase<T, N, AP, TV>::internalAppend
   MOZ_ASSERT(mReserved <= mCapacity);
   Impl::copyConstruct(endNoCheck(), insBegin, insBegin + insLength);
   mLength += insLength;
 }
 
 template<typename T, size_t N, class AP, class TV>
 template<typename U>
 MOZ_ALWAYS_INLINE bool
-VectorBase<T, N, AP, TV>::append(const U& u)
+VectorBase<T, N, AP, TV>::append(U&& u)
 {
   MOZ_REENTRANCY_GUARD_ET_AL;
   if (mLength == mCapacity && !growStorageBy(1))
     return false;
 
 #ifdef DEBUG
   if (mLength + 1 > mReserved)
     mReserved = mLength + 1;
 #endif
-  internalAppend(u);
+  internalAppend(Forward<U>(u));
   return true;
 }
 
 template<typename T, size_t N, class AP, class TV>
 template<typename U, size_t O, class BP, class UV>
 MOZ_ALWAYS_INLINE bool
 VectorBase<T, N, AP, TV>::appendAll(const VectorBase<U, O, BP, UV>& other)
 {
@@ -1172,19 +1179,19 @@ class Vector
                       MinInlineCapacity,
                       AllocPolicy,
                       Vector<T, MinInlineCapacity, AllocPolicy> >
 {
     typedef VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base;
 
   public:
     Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {}
-    Vector(mozilla::MoveRef<Vector> vec) : Base(vec) {}
-    Vector& operator=(mozilla::MoveRef<Vector> vec) {
-      return Base::operator=(vec);
+    Vector(Vector&& vec) : Base(Move(vec)) {}
+    Vector& operator=(Vector&& vec) {
+      return Base::operator=(Move(vec));
     }
 };
 
 } // namespace mozilla
 
 #ifdef _MSC_VER
 #pragma warning(pop)
 #endif
--- a/xpcom/glue/nsHashKeys.h
+++ b/xpcom/glue/nsHashKeys.h
@@ -478,20 +478,20 @@ class nsCharPtrHashKey : public PLDHashE
 {
 public:
   typedef const char* KeyType;
   typedef const char* KeyTypePointer;
 
   nsCharPtrHashKey(const char* aKey) : mKey(strdup(aKey)) { }
   nsCharPtrHashKey(const nsCharPtrHashKey& toCopy) : mKey(strdup(toCopy.mKey)) { }
 
-  nsCharPtrHashKey(mozilla::MoveRef<nsCharPtrHashKey> other)
-    : mKey(other->mKey)
+  nsCharPtrHashKey(nsCharPtrHashKey&& other)
+    : mKey(other.mKey)
   {
-    other->mKey = nullptr;
+    other.mKey = nullptr;
   }
 
   ~nsCharPtrHashKey() { if (mKey) free(const_cast<char *>(mKey)); }
 
   const char* GetKey() const { return mKey; }
   bool KeyEquals(KeyTypePointer aKey) const
   {
     return !strcmp(mKey, aKey);
@@ -515,20 +515,20 @@ class nsUnicharPtrHashKey : public PLDHa
 {
 public:
   typedef const PRUnichar* KeyType;
   typedef const PRUnichar* KeyTypePointer;
 
   nsUnicharPtrHashKey(const PRUnichar* aKey) : mKey(NS_strdup(aKey)) { }
   nsUnicharPtrHashKey(const nsUnicharPtrHashKey& toCopy) : mKey(NS_strdup(toCopy.mKey)) { }
 
-  nsUnicharPtrHashKey(mozilla::MoveRef<nsUnicharPtrHashKey> other)
-    : mKey(other->mKey)
+  nsUnicharPtrHashKey(nsUnicharPtrHashKey&& other)
+    : mKey(other.mKey)
   {
-    other->mKey = nullptr;
+    other.mKey = nullptr;
   }
 
   ~nsUnicharPtrHashKey() { if (mKey) NS_Free(const_cast<PRUnichar *>(mKey)); }
 
   const PRUnichar* GetKey() const { return mKey; }
   bool KeyEquals(KeyTypePointer aKey) const
   {
     return !NS_strcmp(mKey, aKey);