Bug 1791765 - Update matrix-js-sdk to v19.7.0. r=clokep
authorMartin Giger <martin@humanoids.be>
Wed, 28 Sep 2022 08:43:31 +0000
changeset 36817 b4c3c9d564e2a821597cadb9dcfc9aa9b4ca4163
parent 36816 abcaf011c387b6cb41c11b5bf1e6db466332d226
child 36818 144b74e92a2048554e782b49de3b04cdafaab189
push id20334
push userthunderbird@calypsoblue.org
push dateWed, 28 Sep 2022 13:10:29 +0000
treeherdercomm-central@144b74e92a20 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersclokep
bugs1791765
Bug 1791765 - Update matrix-js-sdk to v19.7.0. r=clokep Differential Revision: https://phabricator.services.mozilla.com/D158247
chat/protocols/matrix/lib/README.md
chat/protocols/matrix/lib/matrix-sdk/@types/auth.js
chat/protocols/matrix/lib/matrix-sdk/@types/crypto.js
chat/protocols/matrix/lib/matrix-sdk/@types/read_receipts.js
chat/protocols/matrix/lib/matrix-sdk/NamespacedValue.js
chat/protocols/matrix/lib/matrix-sdk/client.js
chat/protocols/matrix/lib/matrix-sdk/content-helpers.js
chat/protocols/matrix/lib/matrix-sdk/crypto/OlmDevice.js
chat/protocols/matrix/lib/matrix-sdk/crypto/SecretStorage.js
chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/megolm.js
chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/olm.js
chat/protocols/matrix/lib/matrix-sdk/crypto/backup.js
chat/protocols/matrix/lib/matrix-sdk/crypto/index.js
chat/protocols/matrix/lib/matrix-sdk/crypto/olmlib.js
chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store-backend.js
chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store.js
chat/protocols/matrix/lib/matrix-sdk/crypto/store/memory-crypto-store.js
chat/protocols/matrix/lib/matrix-sdk/crypto/verification/Base.js
chat/protocols/matrix/lib/matrix-sdk/event-mapper.js
chat/protocols/matrix/lib/matrix-sdk/models/beacon.js
chat/protocols/matrix/lib/matrix-sdk/models/event-timeline-set.js
chat/protocols/matrix/lib/matrix-sdk/models/event-timeline.js
chat/protocols/matrix/lib/matrix-sdk/models/event.js
chat/protocols/matrix/lib/matrix-sdk/models/invites-ignorer.js
chat/protocols/matrix/lib/matrix-sdk/models/room.js
chat/protocols/matrix/lib/matrix-sdk/sliding-sync-sdk.js
chat/protocols/matrix/lib/matrix-sdk/sync.js
chat/protocols/matrix/lib/matrix-sdk/utils.js
chat/protocols/matrix/lib/moz.build
chat/protocols/matrix/matrix-sdk.jsm
--- a/chat/protocols/matrix/lib/README.md
+++ b/chat/protocols/matrix/lib/README.md
@@ -1,10 +1,10 @@
 This directory contains the Matrix Client-Server SDK for Javascript available
-at https://github.com/matrix-org/matrix-js-sdk/. Current version is v19.4.0.
+at https://github.com/matrix-org/matrix-js-sdk/. Current version is v19.7.0.
 
 The following npm dependencies are included:
 
 * @matrix-org/olm: https://gitlab.matrix.org/matrix-org/olm/-/packages/10 v3.2.12
 * another-json: https://www.npmjs.com/package/another-json/ v0.2.0
 * base-x: https://www.npmjs.com/package/base-x v4.0.0
 * browser-request: https://www.npmjs.com/package/browser-request v0.3.3
 * bs58: https://www.npmjs.com/package/bs58 v5.0.0
--- a/chat/protocols/matrix/lib/matrix-sdk/@types/auth.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/@types/auth.js
@@ -1,5 +1,59 @@
 "use strict";
 
 Object.defineProperty(exports, "__esModule", {
   value: true
-});
\ No newline at end of file
+});
+exports.SSOAction = exports.IdentityProviderBrand = void 0;
+
+/*
+Copyright 2022 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// disable lint because these are wire responses
+
+/* eslint-disable camelcase */
+
+/**
+ * Represents a response to the CSAPI `/refresh` endpoint.
+ */
+
+/* eslint-enable camelcase */
+
+/**
+ * Response to GET login flows as per https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3login
+ */
+
+/**
+ * Representation of SSO flow as per https://spec.matrix.org/latest/client-server-api/#client-login-via-sso
+ */
+let IdentityProviderBrand;
+exports.IdentityProviderBrand = IdentityProviderBrand;
+
+(function (IdentityProviderBrand) {
+  IdentityProviderBrand["Gitlab"] = "gitlab";
+  IdentityProviderBrand["Github"] = "github";
+  IdentityProviderBrand["Apple"] = "apple";
+  IdentityProviderBrand["Google"] = "google";
+  IdentityProviderBrand["Facebook"] = "facebook";
+  IdentityProviderBrand["Twitter"] = "twitter";
+})(IdentityProviderBrand || (exports.IdentityProviderBrand = IdentityProviderBrand = {}));
+
+/* eslint-enable camelcase */
+let SSOAction;
+exports.SSOAction = SSOAction;
+
+(function (SSOAction) {
+  SSOAction["LOGIN"] = "login";
+  SSOAction["REGISTER"] = "register";
+})(SSOAction || (exports.SSOAction = SSOAction = {}));
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/chat/protocols/matrix/lib/matrix-sdk/@types/crypto.js
@@ -0,0 +1,5 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+  value: true
+});
\ No newline at end of file
--- a/chat/protocols/matrix/lib/matrix-sdk/@types/read_receipts.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/@types/read_receipts.js
@@ -22,10 +22,9 @@ limitations under the License.
 */
 let ReceiptType;
 exports.ReceiptType = ReceiptType;
 
 (function (ReceiptType) {
   ReceiptType["Read"] = "m.read";
   ReceiptType["FullyRead"] = "m.fully_read";
   ReceiptType["ReadPrivate"] = "m.read.private";
-  ReceiptType["UnstableReadPrivate"] = "org.matrix.msc2285.read.private";
 })(ReceiptType || (exports.ReceiptType = ReceiptType = {}));
\ No newline at end of file
--- a/chat/protocols/matrix/lib/matrix-sdk/NamespacedValue.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/NamespacedValue.js
@@ -3,17 +3,17 @@
 Object.defineProperty(exports, "__esModule", {
   value: true
 });
 exports.UnstableValue = exports.ServerControlledNamespacedValue = exports.NamespacedValue = void 0;
 
 function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
 
 /*
-Copyright 2021 The Matrix.org Foundation C.I.C.
+Copyright 2021 - 2022 The Matrix.org Foundation C.I.C.
 
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at
 
     http://www.apache.org/licenses/LICENSE-2.0
 
 Unless required by applicable law or agreed to in writing, software
@@ -50,16 +50,23 @@ class NamespacedValue {
   get altName() {
     if (!this.stable) {
       return null;
     }
 
     return this.unstable;
   }
 
+  get names() {
+    const names = [this.name];
+    const altName = this.altName;
+    if (altName) names.push(altName);
+    return names;
+  }
+
   matches(val) {
     return this.name === val || this.altName === val;
   } // this desperately wants https://github.com/microsoft/TypeScript/pull/26349 at the top level of the class
   // so we can instantiate `NamespacedValue<string, _, _>` as a default type for that namespace.
 
 
   findIn(obj) {
     let val;
--- a/chat/protocols/matrix/lib/matrix-sdk/client.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/client.js
@@ -82,18 +82,22 @@ var _typedEventEmitter = require("./mode
 var _read_receipts = require("./@types/read_receipts");
 
 var _slidingSyncSdk = require("./sliding-sync-sdk");
 
 var _thread = require("./models/thread");
 
 var _beacon = require("./@types/beacon");
 
+var _NamespacedValue = require("./NamespacedValue");
+
 var _ToDeviceMessageQueue = require("./ToDeviceMessageQueue");
 
+var _invitesIgnorer = require("./models/invites-ignorer");
+
 function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
 
 function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
 
 function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
 
 function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
 
@@ -147,35 +151,38 @@ exports.ClientEvent = ClientEvent;
   ClientEvent["Room"] = "Room";
   ClientEvent["DeleteRoom"] = "deleteRoom";
   ClientEvent["SyncUnexpectedError"] = "sync.unexpectedError";
   ClientEvent["ClientWellKnown"] = "WellKnown.client";
   ClientEvent["TurnServers"] = "turnServers";
   ClientEvent["TurnServersError"] = "turnServers.error";
 })(ClientEvent || (exports.ClientEvent = ClientEvent = {}));
 
+const SSO_ACTION_PARAM = new _NamespacedValue.UnstableValue("action", "org.matrix.msc3824.action");
 /**
  * Represents a Matrix Client. Only directly construct this if you want to use
  * custom modules. Normally, {@link createClient} should be used
  * as it specifies 'sensible' defaults for these modules.
  */
+
 class MatrixClient extends _typedEventEmitter.TypedEventEmitter {
   // populated after initCrypto
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // XXX: Intended private, used in code.
   // Note: these are all `protected` to let downstream consumers make mistakes if they want to.
   // We don't technically support this usage, but have reasons to do this.
   // The pushprocessor caches useful things, so keep one and re-use it
   // Promise to a response of the server's /versions response
   // TODO: This should expire: https://github.com/matrix-org/matrix-js-sdk/issues/1020
+  // A manager for determining which invites should be ignored.
   constructor(opts) {
     super();
 
     _defineProperty(this, "reEmitter", new _ReEmitter.TypedReEmitter(this));
 
     _defineProperty(this, "olmVersion", null);
 
     _defineProperty(this, "usingExternalCrypto", false);
@@ -271,16 +278,18 @@ class MatrixClient extends _typedEventEm
     _defineProperty(this, "txnCtr", 0);
 
     _defineProperty(this, "mediaHandler", new _mediaHandler.MediaHandler(this));
 
     _defineProperty(this, "pendingEventEncryption", new Map());
 
     _defineProperty(this, "toDeviceMessageQueue", void 0);
 
+    _defineProperty(this, "ignoredInvites", void 0);
+
     _defineProperty(this, "startCallEventHandler", () => {
       if (this.isInitialSyncComplete()) {
         this.callEventHandler.start();
         this.off(ClientEvent.Sync, this.startCallEventHandler);
       }
     });
 
     opts.baseUrl = utils.ensureNoTrailingSlash(opts.baseUrl);
@@ -440,16 +449,17 @@ class MatrixClient extends _typedEventEm
           highlightCount += pushActions.tweaks && pushActions.tweaks.highlight ? 1 : 0;
         } // Note: we don't need to handle 'total' notifications because the counts
         // will come from the server.
 
 
         room.setUnreadNotificationCount(_matrix.NotificationCountType.Highlight, highlightCount);
       }
     });
+    this.ignoredInvites = new _invitesIgnorer.IgnoredInvites(this);
   }
   /**
    * High level helper method to begin syncing and poll for new events. To listen for these
    * events, add a listener for {@link module:client~MatrixClient#event:"event"}
    * via {@link module:client~MatrixClient#on}. Alternatively, listen for specific
    * state change events.
    * @param {Object=} opts Options to apply when syncing.
    */
@@ -4409,16 +4419,17 @@ class MatrixClient extends _typedEventEm
     };
     this.ongoingScrollbacks[room.roomId] = info;
     return prom;
   }
   /**
    * @param {object} [options]
    * @param {boolean} options.preventReEmit don't re-emit events emitted on an event mapped by this mapper on the client
    * @param {boolean} options.decrypt decrypt event proactively
+   * @param {boolean} options.toDevice the event is a to_device event
    * @return {Function}
    */
 
 
   getEventMapper(options) {
     return (0, _eventMapper.eventMapperFor)(this, options || {});
   }
   /**
@@ -6234,30 +6245,33 @@ class MatrixClient extends _typedEventEm
     return this.getSsoLoginUrl(redirectUrl, "cas");
   }
   /**
    * @param {string} redirectUrl The URL to redirect to after the HS
    *     authenticates with the SSO.
    * @param {string} loginType The type of SSO login we are doing (sso or cas).
    *     Defaults to 'sso'.
    * @param {string} idpId The ID of the Identity Provider being targeted, optional.
+   * @param {SSOAction} action the SSO flow to indicate to the IdP, optional.
    * @return {string} The HS URL to hit to begin the SSO login process.
    */
 
 
-  getSsoLoginUrl(redirectUrl, loginType = "sso", idpId) {
+  getSsoLoginUrl(redirectUrl, loginType = "sso", idpId, action) {
     let url = "/login/" + loginType + "/redirect";
 
     if (idpId) {
       url += "/" + idpId;
     }
 
-    return this.http.getUrl(url, {
-      redirectUrl
-    }, _httpApi.PREFIX_R0);
+    const params = {
+      redirectUrl,
+      [SSO_ACTION_PARAM.unstable]: action
+    };
+    return this.http.getUrl(url, params, _httpApi.PREFIX_R0);
   }
   /**
    * @param {string} token Login token previously received from homeserver
    * @param {module:client.callback} callback Optional.
    * @return {Promise} Resolves: TODO
    * @return {module:http-api.MatrixError} Rejects: with an error response.
    */
 
@@ -6591,20 +6605,19 @@ class MatrixClient extends _typedEventEm
   async setRoomReadMarkersHttpRequest(roomId, rmEventId, rrEventId, rpEventId) {
     const path = utils.encodeUri("/rooms/$roomId/read_markers", {
       $roomId: roomId
     });
     const content = {
       [_read_receipts.ReceiptType.FullyRead]: rmEventId,
       [_read_receipts.ReceiptType.Read]: rrEventId
     };
-    const privateField = await utils.getPrivateReadReceiptField(this);
-
-    if (privateField) {
-      content[privateField] = rpEventId;
+
+    if (await this.doesServerSupportUnstableFeature("org.matrix.msc2285.stable")) {
+      content[_read_receipts.ReceiptType.ReadPrivate] = rpEventId;
     }
 
     return this.http.authedRequest(undefined, _httpApi.Method.Post, path, undefined, content);
   }
   /**
    * @return {Promise} Resolves: A list of the user's current rooms
    * @return {module:http-api.MatrixError} Rejects: with an error response.
    */
--- a/chat/protocols/matrix/lib/matrix-sdk/content-helpers.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/content-helpers.js
@@ -270,23 +270,20 @@ const makeBeaconContent = (uri, timestam
     rel_type: _matrixEventsSdk.REFERENCE_RELATION.name,
     event_id: beaconInfoEventId
   }
 });
 
 exports.makeBeaconContent = makeBeaconContent;
 
 const parseBeaconContent = content => {
-  const {
-    description,
-    uri
-  } = _location.M_LOCATION.findIn(content);
+  const location = _location.M_LOCATION.findIn(content);
 
   const timestamp = _location.M_TIMESTAMP.findIn(content);
 
   return {
-    description,
-    uri,
+    description: location?.description,
+    uri: location?.uri,
     timestamp
   };
 };
 
 exports.parseBeaconContent = parseBeaconContent;
\ No newline at end of file
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/OlmDevice.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/OlmDevice.js
@@ -949,24 +949,45 @@ class OlmDevice {
 
           if (sessionId != session.session_id()) {
             throw new Error("Mismatched group session ID from senderKey: " + senderKey);
           }
 
           if (existingSession) {
             _logger.logger.log("Update for megolm session " + senderKey + "/" + sessionId);
 
-            if (existingSession.first_known_index() <= session.first_known_index() && !(existingSession.first_known_index() == session.first_known_index() && !extraSessionData.untrusted && existingSessionData.untrusted)) {
-              // existing session has lower index (i.e. can
-              // decrypt more), or they have the same index and
-              // the new sessions trust does not win over the old
-              // sessions trust, so keep it
-              _logger.logger.log(`Keeping existing megolm session ${sessionId}`);
+            if (existingSession.first_known_index() <= session.first_known_index()) {
+              if (!existingSessionData.untrusted || extraSessionData.untrusted) {
+                // existing session has less-than-or-equal index
+                // (i.e. can decrypt at least as much), and the
+                // new session's trust does not win over the old
+                // session's trust, so keep it
+                _logger.logger.log(`Keeping existing megolm session ${sessionId}`);
+
+                return;
+              }
 
-              return;
+              if (existingSession.first_known_index() < session.first_known_index()) {
+                // We want to upgrade the existing session's trust,
+                // but we can't just use the new session because we'll
+                // lose the lower index. Check that the sessions connect
+                // properly, and then manually set the existing session
+                // as trusted.
+                if (existingSession.export_session(session.first_known_index()) === session.export_session(session.first_known_index())) {
+                  _logger.logger.info("Upgrading trust of existing megolm session " + sessionId + " based on newly-received trusted session");
+
+                  existingSessionData.untrusted = false;
+                  this.cryptoStore.storeEndToEndInboundGroupSession(senderKey, sessionId, existingSessionData, txn);
+                } else {
+                  _logger.logger.warn("Newly-received megolm session " + sessionId + " does not match existing session! Keeping existing session");
+                }
+
+                return;
+              } // If the sessions have the same index, go ahead and store the new trusted one.
+
             }
           }
 
           _logger.logger.info("Storing megolm session " + senderKey + "/" + sessionId + " with first index " + session.first_known_index());
 
           const sessionData = Object.assign({}, extraSessionData, {
             room_id: roomId,
             session: session.pickle(this.pickleKey),
@@ -1166,22 +1187,30 @@ class OlmDevice {
 
         if (chainIndex === undefined) {
           chainIndex = session.first_known_index();
         }
 
         const exportedSession = session.export_session(chainIndex);
         const claimedKeys = sessionData.keysClaimed || {};
         const senderEd25519Key = claimedKeys.ed25519 || null;
+        const forwardingKeyChain = sessionData.forwardingCurve25519KeyChain || []; // older forwarded keys didn't set the "untrusted"
+        // property, but can be identified by having a
+        // non-empty forwarding key chain.  These keys should
+        // be marked as untrusted since we don't know that they
+        // can be trusted
+
+        const untrusted = "untrusted" in sessionData ? sessionData.untrusted : forwardingKeyChain.length > 0;
         result = {
           "chain_index": chainIndex,
           "key": exportedSession,
-          "forwarding_curve25519_key_chain": sessionData.forwardingCurve25519KeyChain || [],
+          "forwarding_curve25519_key_chain": forwardingKeyChain,
           "sender_claimed_ed25519_key": senderEd25519Key,
-          "shared_history": sessionData.sharedHistory || false
+          "shared_history": sessionData.sharedHistory || false,
+          "untrusted": untrusted
         };
       });
     }, _logger.logger.withPrefix("[getInboundGroupSessionKey]"));
     return result;
   }
   /**
    * Export an inbound group session
    *
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/SecretStorage.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/SecretStorage.js
@@ -475,17 +475,30 @@ class SecretStorage {
 
   onSecretReceived(event) {
     if (event.getSender() !== this.baseApis.getUserId()) {
       // we shouldn't be receiving secrets from anyone else, so ignore
       // because someone could be trying to send us bogus data
       return;
     }
 
+    if (!olmlib.isOlmEncrypted(event)) {
+      _logger.logger.error("secret event not properly encrypted");
+
+      return;
+    }
+
     const content = event.getContent();
+    const senderKeyUser = this.baseApis.crypto.deviceList.getUserByIdentityKey(olmlib.OLM_ALGORITHM, content.sender_key);
+
+    if (senderKeyUser !== event.getSender()) {
+      _logger.logger.error("sending device does not belong to the user it claims to be from");
+
+      return;
+    }
 
     _logger.logger.log("got secret share for request", content.request_id);
 
     const requestControl = this.requests.get(content.request_id);
 
     if (requestControl) {
       // make sure that the device that sent it is one of the devices that
       // we requested from
@@ -496,16 +509,27 @@ class SecretStorage {
 
         return;
       }
 
       if (!requestControl.devices.includes(deviceInfo.deviceId)) {
         _logger.logger.log("unsolicited secret share from device", deviceInfo.deviceId);
 
         return;
+      } // unsure that the sender is trusted.  In theory, this check is
+      // unnecessary since we only accept secret shares from devices that
+      // we requested from, but it doesn't hurt.
+
+
+      const deviceTrust = this.baseApis.crypto.checkDeviceInfoTrust(event.getSender(), deviceInfo);
+
+      if (!deviceTrust.isVerified()) {
+        _logger.logger.log("secret share from unverified device");
+
+        return;
       }
 
       _logger.logger.log(`Successfully received secret ${requestControl.name} ` + `from ${deviceInfo.deviceId}`);
 
       requestControl.resolve(content.secret);
     }
   }
 
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/megolm.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/megolm.js
@@ -8,16 +8,18 @@ exports.isRoomSharedHistory = isRoomShar
 var _logger = require("../../logger");
 
 var olmlib = _interopRequireWildcard(require("../olmlib"));
 
 var _base = require("./base");
 
 var _OlmDevice = require("../OlmDevice");
 
+var _OutgoingRoomKeyRequestManager = require("../OutgoingRoomKeyRequestManager");
+
 function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
 
 function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
 
 function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
 
 // determine whether the key can be shared with invitees
 function isRoomSharedHistory(room) {
@@ -1118,21 +1120,26 @@ class MegolmDecryption extends _base.Dec
         throw new _base.DecryptionError("MEGOLM_UNKNOWN_INBOUND_SESSION_ID", problemDescription, {
           session: content.sender_key + '|' + content.session_id
         });
       }
 
       throw new _base.DecryptionError("MEGOLM_UNKNOWN_INBOUND_SESSION_ID", "The sender's device has not sent us the keys for this message.", {
         session: content.sender_key + '|' + content.session_id
       });
-    } // success. We can remove the event from the pending list, if that hasn't
-    // already happened.
+    } // Success. We can remove the event from the pending list, if
+    // that hasn't already happened. However, if the event was
+    // decrypted with an untrusted key, leave it on the pending
+    // list so it will be retried if we find a trusted key later.
 
 
-    this.removeEventFromPendingList(event);
+    if (!res.untrusted) {
+      this.removeEventFromPendingList(event);
+    }
+
     const payload = JSON.parse(res.result); // belt-and-braces check that the room id matches that indicated by the HS
     // (this is somewhat redundant, since the megolm session is scoped to the
     // room, so neither the sender nor a MITM can lie about the room_id).
 
     if (payload.room_id !== event.getRoomId()) {
       throw new _base.DecryptionError("MEGOLM_BAD_ROOM", "Message intended for room " + payload.room_id);
     }
 
@@ -1219,68 +1226,133 @@ class MegolmDecryption extends _base.Dec
 
 
   async onRoomKeyEvent(event) {
     const content = event.getContent();
     let senderKey = event.getSenderKey();
     let forwardingKeyChain = [];
     let exportFormat = false;
     let keysClaimed;
+    const extraSessionData = {};
 
     if (!content.room_id || !content.session_key || !content.session_id || !content.algorithm) {
       _logger.logger.error("key event is missing fields");
 
       return;
     }
 
-    if (!senderKey) {
-      _logger.logger.error("key event has no sender key (not encrypted?)");
+    if (!olmlib.isOlmEncrypted(event)) {
+      _logger.logger.error("key event not properly encrypted");
 
       return;
     }
 
+    if (content["org.matrix.msc3061.shared_history"]) {
+      extraSessionData.sharedHistory = true;
+    }
+
     if (event.getType() == "m.forwarded_room_key") {
+      const deviceInfo = this.crypto.deviceList.getDeviceByIdentityKey(olmlib.OLM_ALGORITHM, senderKey);
+      const senderKeyUser = this.baseApis.crypto.deviceList.getUserByIdentityKey(olmlib.OLM_ALGORITHM, senderKey);
+
+      if (senderKeyUser !== event.getSender()) {
+        _logger.logger.error("sending device does not belong to the user it claims to be from");
+
+        return;
+      }
+
+      const outgoingRequests = deviceInfo ? await this.crypto.cryptoStore.getOutgoingRoomKeyRequestsByTarget(event.getSender(), deviceInfo.deviceId, [_OutgoingRoomKeyRequestManager.RoomKeyRequestState.Sent]) : [];
+      const weRequested = outgoingRequests.some(req => req.requestBody.room_id === content.room_id && req.requestBody.session_id === content.session_id);
+      const room = this.baseApis.getRoom(content.room_id);
+      const memberEvent = room?.getMember(this.userId)?.events.member;
+      const fromInviter = memberEvent?.getSender() === event.getSender() || memberEvent?.getUnsigned()?.prev_sender === event.getSender() && memberEvent?.getPrevContent()?.membership === "invite";
+      const fromUs = event.getSender() === this.baseApis.getUserId();
+
+      if (!weRequested) {
+        // If someone sends us an unsolicited key and it's not
+        // shared history, ignore it
+        if (!extraSessionData.sharedHistory) {
+          _logger.logger.log("forwarded key not shared history - ignoring");
+
+          return;
+        } // If someone sends us an unsolicited key for a room
+        // we're already in, and they're not one of our other
+        // devices or the one who invited us, ignore it
+
+
+        if (room && !fromInviter && !fromUs) {
+          _logger.logger.log("forwarded key not from inviter or from us - ignoring");
+
+          return;
+        }
+      }
+
       exportFormat = true;
       forwardingKeyChain = Array.isArray(content.forwarding_curve25519_key_chain) ? content.forwarding_curve25519_key_chain : []; // copy content before we modify it
 
       forwardingKeyChain = forwardingKeyChain.slice();
       forwardingKeyChain.push(senderKey);
 
       if (!content.sender_key) {
         _logger.logger.error("forwarded_room_key event is missing sender_key field");
 
         return;
       }
 
-      senderKey = content.sender_key;
       const ed25519Key = content.sender_claimed_ed25519_key;
 
       if (!ed25519Key) {
         _logger.logger.error(`forwarded_room_key_event is missing sender_claimed_ed25519_key field`);
 
         return;
       }
 
       keysClaimed = {
         ed25519: ed25519Key
-      };
+      }; // If this is a key for a room we're not in, don't load it
+      // yet, just park it in case *this sender* invites us to
+      // that room later
+
+      if (!room) {
+        const parkedData = {
+          senderId: event.getSender(),
+          senderKey: content.sender_key,
+          sessionId: content.session_id,
+          sessionKey: content.session_key,
+          keysClaimed,
+          forwardingCurve25519KeyChain: forwardingKeyChain
+        };
+        await this.crypto.cryptoStore.doTxn('readwrite', ['parked_shared_history'], txn => this.crypto.cryptoStore.addParkedSharedHistory(content.room_id, parkedData, txn), _logger.logger.withPrefix("[addParkedSharedHistory]"));
+        return;
+      }
+
+      const sendingDevice = this.crypto.deviceList.getDeviceByIdentityKey(olmlib.OLM_ALGORITHM, senderKey);
+      const deviceTrust = this.crypto.checkDeviceInfoTrust(event.getSender(), sendingDevice);
+
+      if (fromUs && !deviceTrust.isVerified()) {
+        return;
+      } // forwarded keys are always untrusted
+
+
+      extraSessionData.untrusted = true; // replace the sender key with the sender key of the session
+      // creator for storage
+
+      senderKey = content.sender_key;
     } else {
       keysClaimed = event.getKeysClaimed();
     }
 
-    const extraSessionData = {};
-
     if (content["org.matrix.msc3061.shared_history"]) {
       extraSessionData.sharedHistory = true;
     }
 
     try {
       await this.olmDevice.addInboundGroupSession(content.room_id, senderKey, forwardingKeyChain, content.session_id, content.session_key, keysClaimed, exportFormat, extraSessionData); // have another go at decrypting events sent with this session.
 
-      if (await this.retryDecryption(senderKey, content.session_id)) {
+      if (await this.retryDecryption(senderKey, content.session_id, !extraSessionData.untrusted)) {
         // cancel any outstanding room key requests for this session.
         // Only do this if we managed to decrypt every message in the
         // session, because if we didn't, we leave the other key
         // requests in the hopes that someone sends us a key that
         // includes an earlier index.
         this.crypto.cancelRoomKeyRequest({
           algorithm: content.algorithm,
           room_id: content.room_id,
@@ -1465,32 +1537,34 @@ class MegolmDecryption extends _base.Dec
         this.crypto.backupManager.backupGroupSession(session.sender_key, session.session_id).catch(e => {
           // This throws if the upload failed, but this is fine
           // since it will have written it to the db and will retry.
           _logger.logger.log("Failed to back up megolm session", e);
         });
       } // have another go at decrypting events sent with this session.
 
 
-      this.retryDecryption(session.sender_key, session.session_id);
+      this.retryDecryption(session.sender_key, session.session_id, !extraSessionData.untrusted);
     });
   }
   /**
    * Have another go at decrypting events after we receive a key. Resolves once
    * decryption has been re-attempted on all events.
    *
    * @private
    * @param {String} senderKey
    * @param {String} sessionId
+   * @param {Boolean} keyTrusted
    *
-   * @return {Boolean} whether all messages were successfully decrypted
+   * @return {Boolean} whether all messages were successfully
+   *     decrypted with trusted keys
    */
 
 
-  async retryDecryption(senderKey, sessionId) {
+  async retryDecryption(senderKey, sessionId, keyTrusted) {
     const senderPendingEvents = this.pendingEvents.get(senderKey);
 
     if (!senderPendingEvents) {
       return true;
     }
 
     const pending = senderPendingEvents.get(sessionId);
 
@@ -1498,21 +1572,23 @@ class MegolmDecryption extends _base.Dec
       return true;
     }
 
     _logger.logger.debug("Retrying decryption on events", [...pending]);
 
     await Promise.all([...pending].map(async ev => {
       try {
         await ev.attemptDecryption(this.crypto, {
-          isRetry: true
+          isRetry: true,
+          keyTrusted
         });
       } catch (e) {// don't die if something goes wrong
       }
-    })); // If decrypted successfully, they'll have been removed from pendingEvents
+    })); // If decrypted successfully with trusted keys, they'll have
+    // been removed from pendingEvents
 
     return !this.pendingEvents.get(senderKey)?.has(sessionId);
   }
 
   async retryDecryptionFromSender(senderKey) {
     const senderPendingEvents = this.pendingEvents.get(senderKey);
 
     if (!senderPendingEvents) {
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/olm.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/algorithms/olm.js
@@ -171,16 +171,31 @@ class OlmDecryption extends _base.Decryp
       throw new _base.DecryptionError("OLM_BAD_RECIPIENT", "Message was intented for " + payload.recipient);
     }
 
     if (payload.recipient_keys.ed25519 != this.olmDevice.deviceEd25519Key) {
       throw new _base.DecryptionError("OLM_BAD_RECIPIENT_KEY", "Message not intended for this device", {
         intended: payload.recipient_keys.ed25519,
         our_key: this.olmDevice.deviceEd25519Key
       });
+    } // check that the device that encrypted the event belongs to the user
+    // that the event claims it's from.  We need to make sure that our
+    // device list is up-to-date.  If the device is unknown, we can only
+    // assume that the device logged out.  Some event handlers, such as
+    // secret sharing, may be more strict and reject events that come from
+    // unknown devices.
+
+
+    await this.crypto.deviceList.downloadKeys([event.getSender()], false);
+    const senderKeyUser = this.crypto.deviceList.getUserByIdentityKey(olmlib.OLM_ALGORITHM, deviceKey);
+
+    if (senderKeyUser !== event.getSender() && senderKeyUser !== undefined) {
+      throw new _base.DecryptionError("OLM_BAD_SENDER", "Message claimed to be from " + event.getSender(), {
+        real_sender: senderKeyUser
+      });
     } // check that the original sender matches what the homeserver told us, to
     // avoid people masquerading as others.
     // (this check is also provided via the sender's embedded ed25519 key,
     // which is checked elsewhere).
 
 
     if (payload.sender != event.getSender()) {
       throw new _base.DecryptionError("OLM_FORWARDED_MESSAGE", "Message forwarded from " + payload.sender, {
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/backup.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/backup.js
@@ -360,17 +360,16 @@ class BackupManager {
       }
 
       ret.sigs.push(sigInfo);
     }
 
     ret.usable = ret.sigs.some(s => {
       return s.valid && (s.device && s.deviceTrust.isVerified() || s.crossSigningId);
     });
-    ret.usable = ret.usable || ret.trusted_locally;
     return ret;
   }
   /**
    * Schedules sending all keys waiting to be sent to the backup, if not already
    * scheduled. Retries if necessary.
    *
    * @param maxDelay Maximum delay to wait in ms. 0 means no delay.
    */
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/index.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/index.js
@@ -1956,21 +1956,25 @@ class Crypto extends _typedEventEmitter.
    *     leave unchanged.
    *
    * @param {?boolean} blocked whether to mark the device as blocked. Null to
    *      leave unchanged.
    *
    * @param {?boolean} known whether to mark that the user has been made aware of
    *      the existence of this device. Null to leave unchanged
    *
+   * @param {?Record<string, any>} keys The list of keys that was present
+   * during the device verification. This will be double checked with the list
+   * of keys the given device has currently.
+   *
    * @return {Promise<module:crypto/deviceinfo>} updated DeviceInfo
    */
 
 
-  async setDeviceVerification(userId, deviceId, verified, blocked, known) {
+  async setDeviceVerification(userId, deviceId, verified, blocked, known, keys) {
     // get rid of any `undefined`s here so we can just check
     // for null rather than null or undefined
     if (verified === undefined) verified = null;
     if (blocked === undefined) blocked = null;
     if (known === undefined) known = null; // Check if the 'device' is actually a cross signing key
     // The js-sdk's verification treats cross-signing keys as devices
     // and so uses this method to mark them verified.
 
@@ -1980,16 +1984,22 @@ class Crypto extends _typedEventEmitter.
       if (blocked !== null || known !== null) {
         throw new Error("Cannot set blocked or known for a cross-signing key");
       }
 
       if (!verified) {
         throw new Error("Cannot set a cross-signing key as unverified");
       }
 
+      const gotKeyId = keys ? Object.values(keys)[0] : null;
+
+      if (keys && (Object.values(keys).length !== 1 || gotKeyId !== xsk.getId())) {
+        throw new Error(`Key did not match expected value: expected ${xsk.getId()}, got ${gotKeyId}`);
+      }
+
       if (!this.crossSigningInfo.getId() && userId === this.crossSigningInfo.userId) {
         this.storeTrustedSelfKeys(xsk.keys); // This will cause our own user trust to change, so emit the event
 
         this.emit(CryptoEvent.UserTrustStatusChanged, this.userId, this.checkUserTrust(userId));
       } // Now sign the master key with our user signing key (unless it's ourself)
 
 
       if (userId !== this.userId) {
@@ -2043,16 +2053,24 @@ class Crypto extends _typedEventEmitter.
     if (!devices || !devices[deviceId]) {
       throw new Error("Unknown device " + userId + ":" + deviceId);
     }
 
     const dev = devices[deviceId];
     let verificationStatus = dev.verified;
 
     if (verified) {
+      if (keys) {
+        for (const [keyId, key] of Object.entries(keys)) {
+          if (dev.keys[keyId] !== key) {
+            throw new Error(`Key did not match expected value: expected ${key}, got ${dev.keys[keyId]}`);
+          }
+        }
+      }
+
       verificationStatus = DeviceVerification.VERIFIED;
     } else if (verified !== null && verificationStatus == DeviceVerification.VERIFIED) {
       verificationStatus = DeviceVerification.UNVERIFIED;
     }
 
     if (blocked) {
       verificationStatus = DeviceVerification.BLOCKED;
     } else if (blocked !== null && verificationStatus == DeviceVerification.BLOCKED) {
@@ -2263,24 +2281,16 @@ class Crypto extends _typedEventEmitter.
   getEventSenderDeviceInfo(event) {
     const senderKey = event.getSenderKey();
     const algorithm = event.getWireContent().algorithm;
 
     if (!senderKey || !algorithm) {
       return null;
     }
 
-    const forwardingChain = event.getForwardingCurve25519KeyChain();
-
-    if (forwardingChain.length > 0) {
-      // we got the key this event from somewhere else
-      // TODO: check if we can trust the forwarders.
-      return null;
-    }
-
     if (event.isKeySourceUntrusted()) {
       // we got the key for this event from a source that we consider untrusted
       return null;
     } // senderKey is the Curve25519 identity key of the device which the event
     // was sent from. In the case of Megolm, it's actually the Curve25519
     // identity key of the device which set up the Megolm session.
 
 
@@ -2338,19 +2348,18 @@ class Crypto extends _typedEventEmitter.
     ret.algorithm = event.getWireContent().algorithm;
 
     if (!ret.senderKey || !ret.algorithm) {
       ret.encrypted = false;
       return ret;
     }
 
     ret.encrypted = true;
-    const forwardingChain = event.getForwardingCurve25519KeyChain();
-
-    if (forwardingChain.length > 0 || event.isKeySourceUntrusted()) {
+
+    if (event.isKeySourceUntrusted()) {
       // we got the key this event from somewhere else
       // TODO: check if we can trust the forwarders.
       ret.authenticated = false;
     } else {
       ret.authenticated = true;
     } // senderKey is the Curve25519 identity key of the device which the event
     // was sent from. In the case of Megolm, it's actually the Curve25519
     // identity key of the device which set up the Megolm session.
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/olmlib.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/olmlib.js
@@ -5,24 +5,27 @@ Object.defineProperty(exports, "__esModu
 });
 exports.OLM_ALGORITHM = exports.MEGOLM_BACKUP_ALGORITHM = exports.MEGOLM_ALGORITHM = void 0;
 exports.decodeBase64 = decodeBase64;
 exports.encodeBase64 = encodeBase64;
 exports.encodeUnpaddedBase64 = encodeUnpaddedBase64;
 exports.encryptMessageForDevice = encryptMessageForDevice;
 exports.ensureOlmSessionsForDevices = ensureOlmSessionsForDevices;
 exports.getExistingOlmSessions = getExistingOlmSessions;
+exports.isOlmEncrypted = isOlmEncrypted;
 exports.pkSign = pkSign;
 exports.pkVerify = pkVerify;
 exports.verifySignature = verifySignature;
 
 var _anotherJson = _interopRequireDefault(require("another-json"));
 
 var _logger = require("../logger");
 
+var _event = require("../@types/event");
+
 function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
 
 /*
 Copyright 2016 - 2021 The Matrix.org Foundation C.I.C.
 
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at
@@ -517,16 +520,36 @@ function pkVerify(obj, pubKey, userId) {
     util.ed25519_verify(pubKey, _anotherJson.default.stringify(obj), signature);
   } finally {
     obj.signatures = sigs;
     if (unsigned) obj.unsigned = unsigned;
     util.free();
   }
 }
 /**
+ * Check that an event was encrypted using olm.
+ */
+
+
+function isOlmEncrypted(event) {
+  if (!event.getSenderKey()) {
+    _logger.logger.error("Event has no sender key (not encrypted?)");
+
+    return false;
+  }
+
+  if (event.getWireType() !== _event.EventType.RoomMessageEncrypted || !["m.olm.v1.curve25519-aes-sha2"].includes(event.getWireContent().algorithm)) {
+    _logger.logger.error("Event was not encrypted using an appropriate algorithm");
+
+    return false;
+  }
+
+  return true;
+}
+/**
  * Encode a typed array of uint8 as base64.
  * @param {Uint8Array} uint8Array The data to encode.
  * @return {string} The base64.
  */
 
 
 function encodeBase64(uint8Array) {
   return Buffer.from(uint8Array).toString("base64");
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store-backend.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store-backend.js
@@ -236,20 +236,17 @@ class Backend {
     const results = [];
 
     function onsuccess() {
       const cursor = this.result;
 
       if (cursor) {
         const keyReq = cursor.value;
 
-        if (keyReq.recipients.includes({
-          userId,
-          deviceId
-        })) {
+        if (keyReq.recipients.some(recipient => recipient.userId === userId && recipient.deviceId === deviceId)) {
           results.push(keyReq);
         }
 
         cursor.continue();
       } else {
         // try the next state in the list
         stateIndex++;
 
@@ -852,16 +849,61 @@ class Backend {
         };
         resolve(sessions);
       };
 
       req.onerror = reject;
     });
   }
 
+  addParkedSharedHistory(roomId, parkedData, txn) {
+    if (!txn) {
+      txn = this.db.transaction("parked_shared_history", "readwrite");
+    }
+
+    const objectStore = txn.objectStore("parked_shared_history");
+    const req = objectStore.get([roomId]);
+
+    req.onsuccess = () => {
+      const {
+        parked
+      } = req.result || {
+        parked: []
+      };
+      parked.push(parkedData);
+      objectStore.put({
+        roomId,
+        parked
+      });
+    };
+  }
+
+  takeParkedSharedHistory(roomId, txn) {
+    if (!txn) {
+      txn = this.db.transaction("parked_shared_history", "readwrite");
+    }
+
+    const cursorReq = txn.objectStore("parked_shared_history").openCursor(roomId);
+    return new Promise((resolve, reject) => {
+      cursorReq.onsuccess = () => {
+        const cursor = cursorReq.result;
+
+        if (!cursor) {
+          resolve([]);
+        }
+
+        const data = cursor.value;
+        cursor.delete();
+        resolve(data);
+      };
+
+      cursorReq.onerror = reject;
+    });
+  }
+
   doTxn(mode, stores, func, log = _logger.logger) {
     let startTime;
     let description;
 
     if (PROFILE_TRANSACTIONS) {
       const txnId = this.nextTxnId++;
       startTime = Date.now();
       description = `${mode} crypto store transaction ${txnId} in ${stores}`;
@@ -945,16 +987,22 @@ function upgradeDatabase(db, oldVersion)
       keyPath: ["userId", "deviceId"]
     });
   }
 
   if (oldVersion < 10) {
     db.createObjectStore("shared_history_inbound_group_sessions", {
       keyPath: ["roomId"]
     });
+  }
+
+  if (oldVersion < 11) {
+    db.createObjectStore("parked_shared_history", {
+      keyPath: ["roomId"]
+    });
   } // Expand as needed.
 
 }
 
 function createDatabase(db) {
   const outgoingRoomKeyRequestsStore = db.createObjectStore("outgoingRoomKeyRequests", {
     keyPath: "requestId"
   }); // we assume that the RoomKeyRequestBody will have room_id and session_id
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/store/indexeddb-crypto-store.js
@@ -600,16 +600,32 @@ class IndexedDBCryptoStore {
    * @returns {Promise} Resolves to an array of [senderKey, sessionId]
    */
 
 
   getSharedHistoryInboundGroupSessions(roomId, txn) {
     return this.backend.getSharedHistoryInboundGroupSessions(roomId, txn);
   }
   /**
+   * Park a shared-history group session for a room we may be invited to later.
+   */
+
+
+  addParkedSharedHistory(roomId, parkedData, txn) {
+    this.backend.addParkedSharedHistory(roomId, parkedData, txn);
+  }
+  /**
+   * Pop out all shared-history group sessions for a room.
+   */
+
+
+  takeParkedSharedHistory(roomId, txn) {
+    return this.backend.takeParkedSharedHistory(roomId, txn);
+  }
+  /**
    * Perform a transaction on the crypto store. Any store methods
    * that require a transaction (txn) object to be passed in may
    * only be called within a callback of either this function or
    * one of the store functions operating on the same transaction.
    *
    * @param {string} mode 'readwrite' if you need to call setter
    *     functions with this transaction. Otherwise, 'readonly'.
    * @param {string[]} stores List IndexedDBCryptoStore.STORE_*
@@ -641,13 +657,15 @@ exports.IndexedDBCryptoStore = IndexedDB
 _defineProperty(IndexedDBCryptoStore, "STORE_SESSIONS", 'sessions');
 
 _defineProperty(IndexedDBCryptoStore, "STORE_INBOUND_GROUP_SESSIONS", 'inbound_group_sessions');
 
 _defineProperty(IndexedDBCryptoStore, "STORE_INBOUND_GROUP_SESSIONS_WITHHELD", 'inbound_group_sessions_withheld');
 
 _defineProperty(IndexedDBCryptoStore, "STORE_SHARED_HISTORY_INBOUND_GROUP_SESSIONS", 'shared_history_inbound_group_sessions');
 
+_defineProperty(IndexedDBCryptoStore, "STORE_PARKED_SHARED_HISTORY", 'parked_shared_history');
+
 _defineProperty(IndexedDBCryptoStore, "STORE_DEVICE_DATA", 'device_data');
 
 _defineProperty(IndexedDBCryptoStore, "STORE_ROOMS", 'rooms');
 
 _defineProperty(IndexedDBCryptoStore, "STORE_BACKUP", 'sessions_needing_backup');
\ No newline at end of file
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/store/memory-crypto-store.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/store/memory-crypto-store.js
@@ -50,18 +50,22 @@ class MemoryCryptoStore {
 
     _defineProperty(this, "deviceData", null);
 
     _defineProperty(this, "rooms", {});
 
     _defineProperty(this, "sessionsNeedingBackup", {});
 
     _defineProperty(this, "sharedHistoryInboundGroupSessions", {});
+
+    _defineProperty(this, "parkedSharedHistory", new Map());
   }
 
+  // keyed by room ID
+
   /**
    * Ensure the database exists and is up-to-date.
    *
    * This must be called before the store can be used.
    *
    * @return {Promise} resolves to the store.
    */
   async startup() {
@@ -182,20 +186,17 @@ class MemoryCryptoStore {
     return Promise.resolve(this.outgoingRoomKeyRequests.filter(r => r.state == wantedState));
   }
 
   getOutgoingRoomKeyRequestsByTarget(userId, deviceId, wantedStates) {
     const results = [];
 
     for (const req of this.outgoingRoomKeyRequests) {
       for (const state of wantedStates) {
-        if (req.state === state && req.recipients.includes({
-          userId,
-          deviceId
-        })) {
+        if (req.state === state && req.recipients.some(recipient => recipient.userId === userId && recipient.deviceId === deviceId)) {
           results.push(req);
         }
       }
     }
 
     return Promise.resolve(results);
   }
   /**
@@ -488,16 +489,28 @@ class MemoryCryptoStore {
   addSharedHistoryInboundGroupSession(roomId, senderKey, sessionId) {
     const sessions = this.sharedHistoryInboundGroupSessions[roomId] || [];
     sessions.push([senderKey, sessionId]);
     this.sharedHistoryInboundGroupSessions[roomId] = sessions;
   }
 
   getSharedHistoryInboundGroupSessions(roomId) {
     return Promise.resolve(this.sharedHistoryInboundGroupSessions[roomId] || []);
+  }
+
+  addParkedSharedHistory(roomId, parkedData) {
+    const parked = this.parkedSharedHistory.get(roomId) ?? [];
+    parked.push(parkedData);
+    this.parkedSharedHistory.set(roomId, parked);
+  }
+
+  takeParkedSharedHistory(roomId) {
+    const parked = this.parkedSharedHistory.get(roomId) ?? [];
+    this.parkedSharedHistory.delete(roomId);
+    return Promise.resolve(parked);
   } // Session key backups
 
 
   doTxn(mode, stores, func) {
     return Promise.resolve(func(null));
   }
 
 }
--- a/chat/protocols/matrix/lib/matrix-sdk/crypto/verification/Base.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/crypto/verification/Base.js
@@ -307,17 +307,25 @@ class VerificationBase extends _typedEve
         reject(e);
       };
     });
 
     if (this.doVerification && !this.started) {
       this.started = true;
       this.resetTimer(); // restart the timeout
 
-      Promise.resolve(this.doVerification()).then(this.done.bind(this), this.cancel.bind(this));
+      new Promise((resolve, reject) => {
+        const crossSignId = this.baseApis.crypto.deviceList.getStoredCrossSigningForUser(this.userId)?.getId();
+
+        if (crossSignId === this.deviceId) {
+          reject(new Error("Device ID is the same as the cross-signing ID"));
+        }
+
+        resolve();
+      }).then(() => this.doVerification()).then(this.done.bind(this), this.cancel.bind(this));
     }
 
     return this.promise;
   }
 
   async verifyKeys(userId, keys, verifier) {
     // we try to verify all the keys that we're told about, but we might
     // not know about all of them, so keep track of the keys that we know
@@ -325,27 +333,27 @@ class VerificationBase extends _typedEve
     const verifiedDevices = [];
 
     for (const [keyId, keyInfo] of Object.entries(keys)) {
       const deviceId = keyId.split(':', 2)[1];
       const device = this.baseApis.getStoredDevice(userId, deviceId);
 
       if (device) {
         verifier(keyId, device, keyInfo);
-        verifiedDevices.push(deviceId);
+        verifiedDevices.push([deviceId, keyId, device.keys[keyId]]);
       } else {
         const crossSigningInfo = this.baseApis.crypto.deviceList.getStoredCrossSigningForUser(userId);
 
         if (crossSigningInfo && crossSigningInfo.getId() === deviceId) {
           verifier(keyId, _deviceinfo.DeviceInfo.fromStorage({
             keys: {
               [keyId]: deviceId
             }
           }, deviceId), keyInfo);
-          verifiedDevices.push(deviceId);
+          verifiedDevices.push([deviceId, keyId, deviceId]);
         } else {
           _logger.logger.warn(`verification: Could not find device ${deviceId} to verify`);
         }
       }
     } // if none of the keys could be verified, then error because the app
     // should be informed about that
 
 
@@ -353,18 +361,27 @@ class VerificationBase extends _typedEve
       throw new Error("No devices could be verified");
     }
 
     _logger.logger.info("Verification completed! Marking devices verified: ", verifiedDevices); // TODO: There should probably be a batch version of this, otherwise it's going
     // to upload each signature in a separate API call which is silly because the
     // API supports as many signatures as you like.
 
 
-    for (const deviceId of verifiedDevices) {
-      await this.baseApis.setDeviceVerified(userId, deviceId);
+    for (const [deviceId, keyId, key] of verifiedDevices) {
+      await this.baseApis.crypto.setDeviceVerification(userId, deviceId, true, null, null, {
+        [keyId]: key
+      });
+    } // if one of the user's own devices is being marked as verified / unverified,
+    // check the key backup status, since whether or not we use this depends on
+    // whether it has a signature from a verified device
+
+
+    if (userId == this.baseApis.credentials.userId) {
+      await this.baseApis.checkKeyBackup();
     }
   }
 
   get events() {
     return undefined;
   }
 
 }
--- a/chat/protocols/matrix/lib/matrix-sdk/event-mapper.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/event-mapper.js
@@ -13,16 +13,20 @@ function _objectSpread(target) { for (va
 
 function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
 
 function eventMapperFor(client, options) {
   let preventReEmit = Boolean(options.preventReEmit);
   const decrypt = options.decrypt !== false;
 
   function mapper(plainOldJsObject) {
+    if (options.toDevice) {
+      delete plainOldJsObject.room_id;
+    }
+
     const room = client.getRoom(plainOldJsObject.room_id);
     let event; // If the event is already known to the room, let's re-use the model rather than duplicating.
     // We avoid doing this to state events as they may be forward or backwards looking which tweaks behaviour.
 
     if (room && plainOldJsObject.state_key === undefined) {
       event = room.findEventById(plainOldJsObject.event_id);
     }
 
--- a/chat/protocols/matrix/lib/matrix-sdk/models/beacon.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/beacon.js
@@ -1,17 +1,15 @@
 "use strict";
 
 Object.defineProperty(exports, "__esModule", {
   value: true
 });
 exports.isTimestampInDuration = exports.getBeaconInfoIdentifier = exports.BeaconEvent = exports.Beacon = void 0;
 
-var _location = require("../@types/location");
-
 var _contentHelpers = require("../content-helpers");
 
 var _utils = require("../utils");
 
 var _typedEventEmitter = require("./typed-event-emitter");
 
 function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
 
@@ -155,19 +153,22 @@ class Beacon extends _typedEventEmitter.
   addLocations(beaconLocationEvents) {
     // discard locations for beacons that are not live
     if (!this.isLive) {
       return;
     }
 
     const validLocationEvents = beaconLocationEvents.filter(event => {
       const content = event.getContent();
+      const parsed = (0, _contentHelpers.parseBeaconContent)(content);
+      if (!parsed.uri || !parsed.timestamp) return false; // we won't be able to process these
 
-      const timestamp = _location.M_TIMESTAMP.findIn(content);
-
+      const {
+        timestamp
+      } = parsed;
       return (// only include positions that were taken inside the beacon's live period
         isTimestampInDuration(this._beaconInfo.timestamp, this._beaconInfo.timeout, timestamp) && ( // ignore positions older than our current latest location
         !this.latestLocationState || timestamp > this.latestLocationState.timestamp)
       );
     });
     const latestLocationEvent = validLocationEvents.sort(_utils.sortEventsByLatestContentTimestamp)?.[0];
 
     if (latestLocationEvent) {
--- a/chat/protocols/matrix/lib/matrix-sdk/models/event-timeline-set.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/event-timeline-set.js
@@ -623,28 +623,20 @@ class EventTimelineSet extends _typedEve
   handleRemoteEcho(localEvent, oldEventId, newEventId) {
     // XXX: why don't we infer newEventId from localEvent?
     const existingTimeline = this._eventIdToTimeline.get(oldEventId);
 
     if (existingTimeline) {
       this._eventIdToTimeline.delete(oldEventId);
 
       this._eventIdToTimeline.set(newEventId, existingTimeline);
-    } else {
-      if (this.filter) {
-        if (this.filter.filterRoomTimeline([localEvent]).length) {
-          this.addEventToTimeline(localEvent, this.liveTimeline, {
-            toStartOfTimeline: false
-          });
-        }
-      } else {
-        this.addEventToTimeline(localEvent, this.liveTimeline, {
-          toStartOfTimeline: false
-        });
-      }
+    } else if (!this.filter || this.filter.filterRoomTimeline([localEvent]).length) {
+      this.addEventToTimeline(localEvent, this.liveTimeline, {
+        toStartOfTimeline: false
+      });
     }
   }
   /**
    * Removes a single event from this room.
    *
    * @param {String} eventId  The id of the event to remove
    *
    * @return {?MatrixEvent} the removed event, or null if the event was not found
--- a/chat/protocols/matrix/lib/matrix-sdk/models/event-timeline.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/event-timeline.js
@@ -295,17 +295,17 @@ class EventTimeline {
   getPaginationToken(direction) {
     return this.getState(direction).paginationToken;
   }
   /**
    * Set a pagination token
    *
    * @param {?string} token       pagination token
    *
-   * @param {string} direction    EventTimeline.BACKWARDS to set the pagination
+   * @param {string} direction    EventTimeline.BACKWARDS to set the paginatio
    *   token for going backwards in time; EventTimeline.FORWARDS to set the
    *   pagination token for going forwards in time.
    */
 
 
   setPaginationToken(token, direction) {
     this.getState(direction).paginationToken = token;
   }
--- a/chat/protocols/matrix/lib/matrix-sdk/models/event.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/event.js
@@ -577,17 +577,17 @@ class MatrixEvent extends _typedEventEmi
       };
     } // start with a couple of sanity checks.
 
 
     if (!this.isEncrypted()) {
       throw new Error("Attempt to decrypt event which isn't encrypted");
     }
 
-    if (this.clearEvent && !this.isDecryptionFailure()) {
+    if (this.clearEvent && !this.isDecryptionFailure() && !(this.isKeySourceUntrusted() && options.keyTrusted)) {
       // we may want to just ignore this? let's start with rejecting it.
       throw new Error("Attempt to decrypt event which has already been decrypted");
     } // if we already have a decryption attempt in progress, then it may
     // fail because it was using outdated info. We now have reason to
     // succeed where it failed before, but we don't want to have multiple
     // attempts going at the same time, so just set a flag that says we have
     // new info.
     //
new file mode 100644
--- /dev/null
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/invites-ignorer.js
@@ -0,0 +1,401 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+  value: true
+});
+exports.PolicyScope = exports.POLICIES_ACCOUNT_EVENT_TYPE = exports.IgnoredInvites = exports.IGNORE_INVITES_ACCOUNT_EVENT_KEY = void 0;
+
+var _matrixEventsSdk = require("matrix-events-sdk");
+
+var _eventTimeline = require("./event-timeline");
+
+var _partials = require("../@types/partials");
+
+var _utils = require("../utils");
+
+/*
+Copyright 2022 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+/// The event type storing the user's individual policies.
+///
+/// Exported for testing purposes.
+const POLICIES_ACCOUNT_EVENT_TYPE = new _matrixEventsSdk.UnstableValue("m.policies", "org.matrix.msc3847.policies"); /// The key within the user's individual policies storing the user's ignored invites.
+///
+/// Exported for testing purposes.
+
+exports.POLICIES_ACCOUNT_EVENT_TYPE = POLICIES_ACCOUNT_EVENT_TYPE;
+const IGNORE_INVITES_ACCOUNT_EVENT_KEY = new _matrixEventsSdk.UnstableValue("m.ignore.invites", "org.matrix.msc3847.ignore.invites"); /// The types of recommendations understood.
+
+exports.IGNORE_INVITES_ACCOUNT_EVENT_KEY = IGNORE_INVITES_ACCOUNT_EVENT_KEY;
+var PolicyRecommendation;
+/**
+ * The various scopes for policies.
+ */
+
+(function (PolicyRecommendation) {
+  PolicyRecommendation["Ban"] = "m.ban";
+})(PolicyRecommendation || (PolicyRecommendation = {}));
+
+let PolicyScope;
+/**
+ * A container for ignored invites.
+ *
+ * # Performance
+ *
+ * This implementation is extremely naive. It expects that we are dealing
+ * with a very short list of sources (e.g. only one). If real-world
+ * applications turn out to require longer lists, we may need to rework
+ * our data structures.
+ */
+
+exports.PolicyScope = PolicyScope;
+
+(function (PolicyScope) {
+  PolicyScope["User"] = "m.policy.user";
+  PolicyScope["Room"] = "m.policy.room";
+  PolicyScope["Server"] = "m.policy.server";
+})(PolicyScope || (exports.PolicyScope = PolicyScope = {}));
+
+class IgnoredInvites {
+  constructor(client) {
+    this.client = client;
+  }
+  /**
+   * Add a new rule.
+   *
+   * @param scope The scope for this rule.
+   * @param entity The entity covered by this rule. Globs are supported.
+   * @param reason A human-readable reason for introducing this new rule.
+   * @return The event id for the new rule.
+   */
+
+
+  async addRule(scope, entity, reason) {
+    const target = await this.getOrCreateTargetRoom();
+    const response = await this.client.sendStateEvent(target.roomId, scope, {
+      entity,
+      reason,
+      recommendation: PolicyRecommendation.Ban
+    });
+    return response.event_id;
+  }
+  /**
+   * Remove a rule.
+   */
+
+
+  async removeRule(event) {
+    await this.client.redactEvent(event.getRoomId(), event.getId());
+  }
+  /**
+   * Add a new room to the list of sources. If the user isn't a member of the
+   * room, attempt to join it.
+   *
+   * @param roomId A valid room id. If this room is already in the list
+   * of sources, it will not be duplicated.
+   * @return `true` if the source was added, `false` if it was already present.
+   * @throws If `roomId` isn't the id of a room that the current user is already
+   * member of or can join.
+   *
+   * # Safety
+   *
+   * This method will rewrite the `Policies` object in the user's account data.
+   * This rewrite is inherently racy and could overwrite or be overwritten by
+   * other concurrent rewrites of the same object.
+   */
+
+
+  async addSource(roomId) {
+    // We attempt to join the room *before* calling
+    // `await this.getOrCreateSourceRooms()` to decrease the duration
+    // of the racy section.
+    await this.client.joinRoom(roomId); // Race starts.
+
+    const sources = (await this.getOrCreateSourceRooms()).map(room => room.roomId);
+
+    if (sources.includes(roomId)) {
+      return false;
+    }
+
+    sources.push(roomId);
+    await this.withIgnoreInvitesPolicies(ignoreInvitesPolicies => {
+      ignoreInvitesPolicies.sources = sources;
+    }); // Race ends.
+
+    return true;
+  }
+  /**
+   * Find out whether an invite should be ignored.
+   *
+   * @param sender The user id for the user who issued the invite.
+   * @param roomId The room to which the user is invited.
+   * @returns A rule matching the entity, if any was found, `null` otherwise.
+   */
+
+
+  async getRuleForInvite({
+    sender,
+    roomId
+  }) {
+    // In this implementation, we perform a very naive lookup:
+    // - search in each policy room;
+    // - turn each (potentially glob) rule entity into a regexp.
+    //
+    // Real-world testing will tell us whether this is performant enough.
+    // In the (unfortunately likely) case it isn't, there are several manners
+    // in which we could optimize this:
+    // - match several entities per go;
+    // - pre-compile each rule entity into a regexp;
+    // - pre-compile entire rooms into a single regexp.
+    const policyRooms = await this.getOrCreateSourceRooms();
+    const senderServer = sender.split(":")[1];
+    const roomServer = roomId.split(":")[1];
+
+    for (const room of policyRooms) {
+      const state = room.getUnfilteredTimelineSet().getLiveTimeline().getState(_eventTimeline.EventTimeline.FORWARDS);
+
+      for (const {
+        scope,
+        entities
+      } of [{
+        scope: PolicyScope.Room,
+        entities: [roomId]
+      }, {
+        scope: PolicyScope.User,
+        entities: [sender]
+      }, {
+        scope: PolicyScope.Server,
+        entities: [senderServer, roomServer]
+      }]) {
+        const events = state.getStateEvents(scope);
+
+        for (const event of events) {
+          const content = event.getContent();
+
+          if (content?.recommendation != PolicyRecommendation.Ban) {
+            // Ignoring invites only looks at `m.ban` recommendations.
+            continue;
+          }
+
+          const glob = content?.entity;
+
+          if (!glob) {
+            // Invalid event.
+            continue;
+          }
+
+          let regexp;
+
+          try {
+            regexp = new RegExp((0, _utils.globToRegexp)(glob, false));
+          } catch (ex) {
+            // Assume invalid event.
+            continue;
+          }
+
+          for (const entity of entities) {
+            if (entity && regexp.test(entity)) {
+              return event;
+            }
+          } // No match.
+
+        }
+      }
+    }
+
+    return null;
+  }
+  /**
+   * Get the target room, i.e. the room in which any new rule should be written.
+   *
+   * If there is no target room setup, a target room is created.
+   *
+   * Note: This method is public for testing reasons. Most clients should not need
+   * to call it directly.
+   *
+   * # Safety
+   *
+   * This method will rewrite the `Policies` object in the user's account data.
+   * This rewrite is inherently racy and could overwrite or be overwritten by
+   * other concurrent rewrites of the same object.
+   */
+
+
+  async getOrCreateTargetRoom() {
+    const ignoreInvitesPolicies = this.getIgnoreInvitesPolicies();
+    let target = ignoreInvitesPolicies.target; // Validate `target`. If it is invalid, trash out the current `target`
+    // and create a new room.
+
+    if (typeof target !== "string") {
+      target = null;
+    }
+
+    if (target) {
+      // Check that the room exists and is valid.
+      const room = this.client.getRoom(target);
+
+      if (room) {
+        return room;
+      } else {
+        target = null;
+      }
+    } // We need to create our own policy room for ignoring invites.
+
+
+    target = (await this.client.createRoom({
+      name: "Individual Policy Room",
+      preset: _partials.Preset.PrivateChat
+    })).room_id;
+    await this.withIgnoreInvitesPolicies(ignoreInvitesPolicies => {
+      ignoreInvitesPolicies.target = target;
+    }); // Since we have just called `createRoom`, `getRoom` should not be `null`.
+
+    return this.client.getRoom(target);
+  }
+  /**
+   * Get the list of source rooms, i.e. the rooms from which rules need to be read.
+   *
+   * If no source rooms are setup, the target room is used as sole source room.
+   *
+   * Note: This method is public for testing reasons. Most clients should not need
+   * to call it directly.
+   *
+   * # Safety
+   *
+   * This method will rewrite the `Policies` object in the user's account data.
+   * This rewrite is inherently racy and could overwrite or be overwritten by
+   * other concurrent rewrites of the same object.
+   */
+
+
+  async getOrCreateSourceRooms() {
+    const ignoreInvitesPolicies = this.getIgnoreInvitesPolicies();
+    let sources = ignoreInvitesPolicies.sources; // Validate `sources`. If it is invalid, trash out the current `sources`
+    // and create a new list of sources from `target`.
+
+    let hasChanges = false;
+
+    if (!Array.isArray(sources)) {
+      // `sources` could not be an array.
+      hasChanges = true;
+      sources = [];
+    }
+
+    let sourceRooms = sources // `sources` could contain non-string / invalid room ids
+    .filter(roomId => typeof roomId === "string").map(roomId => this.client.getRoom(roomId)).filter(room => !!room);
+
+    if (sourceRooms.length != sources.length) {
+      hasChanges = true;
+    }
+
+    if (sourceRooms.length == 0) {
+      // `sources` could be empty (possibly because we've removed
+      // invalid content)
+      const target = await this.getOrCreateTargetRoom();
+      hasChanges = true;
+      sourceRooms = [target];
+    }
+
+    if (hasChanges) {
+      // Reload `policies`/`ignoreInvitesPolicies` in case it has been changed
+      // during or by our call to `this.getTargetRoom()`.
+      await this.withIgnoreInvitesPolicies(ignoreInvitesPolicies => {
+        ignoreInvitesPolicies.sources = sources;
+      });
+    }
+
+    return sourceRooms;
+  }
+  /**
+   * Fetch the `IGNORE_INVITES_POLICIES` object from account data.
+   *
+   * If both an unstable prefix version and a stable prefix version are available,
+   * it will return the stable prefix version preferentially.
+   *
+   * The result is *not* validated but is guaranteed to be a non-null object.
+   *
+   * @returns A non-null object.
+   */
+
+
+  getIgnoreInvitesPolicies() {
+    return this.getPoliciesAndIgnoreInvitesPolicies().ignoreInvitesPolicies;
+  }
+  /**
+   * Modify in place the `IGNORE_INVITES_POLICIES` object from account data.
+   */
+
+
+  async withIgnoreInvitesPolicies(cb) {
+    const {
+      policies,
+      ignoreInvitesPolicies
+    } = this.getPoliciesAndIgnoreInvitesPolicies();
+    cb(ignoreInvitesPolicies);
+    policies[IGNORE_INVITES_ACCOUNT_EVENT_KEY.name] = ignoreInvitesPolicies;
+    await this.client.setAccountData(POLICIES_ACCOUNT_EVENT_TYPE.name, policies);
+  }
+  /**
+   * As `getIgnoreInvitesPolicies` but also return the `POLICIES_ACCOUNT_EVENT_TYPE`
+   * object.
+   */
+
+
+  getPoliciesAndIgnoreInvitesPolicies() {
+    let policies = {};
+
+    for (const key of [POLICIES_ACCOUNT_EVENT_TYPE.name, POLICIES_ACCOUNT_EVENT_TYPE.altName]) {
+      if (!key) {
+        continue;
+      }
+
+      const value = this.client.getAccountData(key)?.getContent();
+
+      if (value) {
+        policies = value;
+        break;
+      }
+    }
+
+    let ignoreInvitesPolicies = {};
+    let hasIgnoreInvitesPolicies = false;
+
+    for (const key of [IGNORE_INVITES_ACCOUNT_EVENT_KEY.name, IGNORE_INVITES_ACCOUNT_EVENT_KEY.altName]) {
+      if (!key) {
+        continue;
+      }
+
+      const value = policies[key];
+
+      if (value && typeof value == "object") {
+        ignoreInvitesPolicies = value;
+        hasIgnoreInvitesPolicies = true;
+        break;
+      }
+    }
+
+    if (!hasIgnoreInvitesPolicies) {
+      policies[IGNORE_INVITES_ACCOUNT_EVENT_KEY.name] = ignoreInvitesPolicies;
+    }
+
+    return {
+      policies,
+      ignoreInvitesPolicies
+    };
+  }
+
+}
+
+exports.IgnoredInvites = IgnoredInvites;
\ No newline at end of file
--- a/chat/protocols/matrix/lib/matrix-sdk/models/room.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/models/room.js
@@ -1888,32 +1888,23 @@ class Room extends _typedEventEmitter.Ty
 
           const unsigned = event.getUnsigned();
           unsigned.transaction_id = tid;
           event.setUnsigned(unsigned);
           break;
         }
       }
     }
-
-    if (event.getUnsigned().transaction_id) {
-      const existingEvent = this.txnToEvent[event.getUnsigned().transaction_id];
-
-      if (existingEvent) {
-        // remote echo of an event we sent earlier
-        this.handleRemoteEcho(event, existingEvent);
-      }
-    }
   }
   /**
    * Add an event to the end of this room's live timelines. Will fire
    * "Room.timeline".
    *
    * @param {MatrixEvent} event Event to be added
-   * @param {IAddLiveEventOptions} options addLiveEvent options
+   * @param {IAddLiveEventOptions} addLiveEventOptions addLiveEvent options
    * @fires module:client~MatrixClient#event:"Room.timeline"
    * @private
    */
 
 
   addLiveEvent(event, addLiveEventOptions) {
     const {
       duplicateStrategy,
@@ -2254,17 +2245,17 @@ class Room extends _typedEventEmitter.Ty
    * @param {MatrixEvent[]} events A list of events to add.
    * @param {IAddLiveEventOptions} options addLiveEvent options
    * @throws If <code>duplicateStrategy</code> is not falsey, 'replace' or 'ignore'.
    */
 
 
   addLiveEvents(events, duplicateStrategyOrOpts, fromCache = false) {
     let duplicateStrategy = duplicateStrategyOrOpts;
-    let timelineWasEmpty;
+    let timelineWasEmpty = false;
 
     if (typeof duplicateStrategyOrOpts === 'object') {
       ({
         duplicateStrategy,
         fromCache = false,
 
         /* roomState, (not used here) */
         timelineWasEmpty
@@ -2289,38 +2280,50 @@ class Room extends _typedEventEmitter.Ty
 
       if (liveTimeline.getNeighbouringTimeline(_eventTimeline.EventTimeline.FORWARDS)) {
         throw new Error(`live timeline ${i} is no longer live - it has a neighbouring timeline`);
       }
     }
 
     const threadRoots = this.findThreadRoots(events);
     const eventsByThread = {};
+    const options = {
+      duplicateStrategy,
+      fromCache,
+      timelineWasEmpty
+    };
 
     for (const event of events) {
       // TODO: We should have a filter to say "only add state event types X Y Z to the timeline".
       this.processLiveEvent(event);
+
+      if (event.getUnsigned().transaction_id) {
+        const existingEvent = this.txnToEvent[event.getUnsigned().transaction_id];
+
+        if (existingEvent) {
+          // remote echo of an event we sent earlier
+          this.handleRemoteEcho(event, existingEvent);
+          continue; // we can skip adding the event to the timeline sets, it is already there
+        }
+      }
+
       const {
         shouldLiveInRoom,
         shouldLiveInThread,
         threadId
       } = this.eventShouldLiveIn(event, events, threadRoots);
 
       if (shouldLiveInThread && !eventsByThread[threadId]) {
         eventsByThread[threadId] = [];
       }
 
       eventsByThread[threadId]?.push(event);
 
       if (shouldLiveInRoom) {
-        this.addLiveEvent(event, {
-          duplicateStrategy,
-          fromCache,
-          timelineWasEmpty
-        });
+        this.addLiveEvent(event, options);
       }
     }
 
     Object.entries(eventsByThread).forEach(([threadId, threadEvents]) => {
       this.addThreadedEvents(threadId, threadEvents, false);
     });
   }
 
@@ -2516,39 +2519,34 @@ class Room extends _typedEventEmitter.Ty
 
 
   getEventReadUpTo(userId, ignoreSynthesized = false) {
     // XXX: This is very very ugly and I hope I won't have to ever add a new
     // receipt type here again. IMHO this should be done by the server in
     // some more intelligent manner or the client should just use timestamps
     const timelineSet = this.getUnfilteredTimelineSet();
     const publicReadReceipt = this.getReadReceiptForUserId(userId, ignoreSynthesized, _read_receipts.ReceiptType.Read);
-    const privateReadReceipt = this.getReadReceiptForUserId(userId, ignoreSynthesized, _read_receipts.ReceiptType.ReadPrivate);
-    const unstablePrivateReadReceipt = this.getReadReceiptForUserId(userId, ignoreSynthesized, _read_receipts.ReceiptType.UnstableReadPrivate); // If we have all, compare them
-
-    if (publicReadReceipt?.eventId && privateReadReceipt?.eventId && unstablePrivateReadReceipt?.eventId) {
-      const comparison1 = timelineSet.compareEventOrdering(publicReadReceipt.eventId, privateReadReceipt.eventId);
-      const comparison2 = timelineSet.compareEventOrdering(publicReadReceipt.eventId, unstablePrivateReadReceipt.eventId);
-      const comparison3 = timelineSet.compareEventOrdering(privateReadReceipt.eventId, unstablePrivateReadReceipt.eventId);
-
-      if (comparison1 && comparison2 && comparison3) {
-        return comparison1 > 0 ? comparison2 > 0 ? publicReadReceipt.eventId : unstablePrivateReadReceipt.eventId : comparison3 > 0 ? privateReadReceipt.eventId : unstablePrivateReadReceipt.eventId;
-      }
-    }
-
-    let latest = privateReadReceipt;
-    [unstablePrivateReadReceipt, publicReadReceipt].forEach(receipt => {
-      if (receipt?.data?.ts > latest?.data?.ts || !latest) {
-        latest = receipt;
-      }
-    });
-    if (latest?.eventId) return latest?.eventId; // The more less likely it is for a read receipt to drift out of date
-    // the bigger is its precedence
-
-    return privateReadReceipt?.eventId ?? unstablePrivateReadReceipt?.eventId ?? publicReadReceipt?.eventId ?? null;
+    const privateReadReceipt = this.getReadReceiptForUserId(userId, ignoreSynthesized, _read_receipts.ReceiptType.ReadPrivate); // If we have both, compare them
+
+    let comparison;
+
+    if (publicReadReceipt?.eventId && privateReadReceipt?.eventId) {
+      comparison = timelineSet.compareEventOrdering(publicReadReceipt?.eventId, privateReadReceipt?.eventId);
+    } // If we didn't get a comparison try to compare the ts of the receipts
+
+
+    if (!comparison && publicReadReceipt?.data?.ts && privateReadReceipt?.data?.ts) {
+      comparison = publicReadReceipt?.data?.ts - privateReadReceipt?.data?.ts;
+    } // The public receipt is more likely to drift out of date so the private
+    // one has precedence
+
+
+    if (!comparison) return privateReadReceipt?.eventId ?? publicReadReceipt?.eventId ?? null; // If public read receipt is older, return the private one
+
+    return (comparison < 0 ? privateReadReceipt?.eventId : publicReadReceipt?.eventId) ?? null;
   }
   /**
    * Determines if the given user has read a particular event ID with the known
    * history of the room. This is not a definitive check as it relies only on
    * what is available to the room at the time of execution.
    * @param {String} userId The user ID to check the read state of.
    * @param {String} eventId The event ID to check if the user read.
    * @returns {Boolean} True if the user has read the event, false otherwise.
--- a/chat/protocols/matrix/lib/matrix-sdk/sliding-sync-sdk.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/sliding-sync-sdk.js
@@ -299,17 +299,22 @@ class SlidingSyncSdk {
 
   onLifecycle(state, resp, err) {
     if (err) {
       _logger.logger.debug("onLifecycle", state, err);
     }
 
     switch (state) {
       case _slidingSync.SlidingSyncState.Complete:
-        this.purgeNotifications(); // Element won't stop showing the initial loading spinner unless we fire SyncState.Prepared
+        this.purgeNotifications();
+
+        if (!resp) {
+          break;
+        } // Element won't stop showing the initial loading spinner unless we fire SyncState.Prepared
+
 
         if (!this.lastPos) {
           this.updateSyncState(_sync.SyncState.Prepared, {
             oldSyncToken: this.lastPos,
             nextSyncToken: resp.pos,
             catchingUp: false,
             fromCache: false
           });
@@ -478,16 +483,24 @@ class SlidingSyncSdk {
       // bother setting it here. We trust our calculations better than the
       // server's for this case, and therefore will assume that our non-zero
       // count is accurate.
       if (!encrypted || encrypted && room.getUnreadNotificationCount(_room.NotificationCountType.Highlight) <= 0) {
         room.setUnreadNotificationCount(_room.NotificationCountType.Highlight, roomData.highlight_count);
       }
     }
 
+    if (Number.isInteger(roomData.invited_count)) {
+      room.currentState.setInvitedMemberCount(roomData.invited_count);
+    }
+
+    if (Number.isInteger(roomData.joined_count)) {
+      room.currentState.setJoinedMemberCount(roomData.joined_count);
+    }
+
     if (roomData.invite_state) {
       const inviteStateEvents = mapEvents(this.client, room.roomId, roomData.invite_state);
       this.processRoomEvents(room, inviteStateEvents);
 
       if (roomData.initial) {
         room.recalculate();
         this.client.store.storeRoom(room);
         this.client.emit(_client.ClientEvent.Room, room);
@@ -549,17 +562,20 @@ class SlidingSyncSdk {
             // reset the timeline.
             this.client.resetNotifTimelineSet();
         }
     } */
 
 
     this.processRoomEvents(room, stateEvents, timelineEvents, false); // we deliberately don't add ephemeral events to the timeline
 
-    room.addEphemeralEvents(ephemeralEvents);
+    room.addEphemeralEvents(ephemeralEvents); // local fields must be set before any async calls because call site assumes
+    // synchronous execution prior to emitting SlidingSyncState.Complete
+
+    room.updateMyMembership("join");
     room.recalculate();
 
     if (roomData.initial) {
       client.store.storeRoom(room);
       client.emit(_client.ClientEvent.Room, room);
     } // check if any timeline events should bing and add them to the notifEvents array:
     // we'll purge this once we've fully processed the sync response
 
@@ -573,18 +589,17 @@ class SlidingSyncSdk {
         await this.opts.crypto.onCryptoEvent(e);
       }
     };
 
     await utils.promiseMapSeries(stateEvents, processRoomEvent);
     await utils.promiseMapSeries(timelineEvents, processRoomEvent);
     ephemeralEvents.forEach(function (e) {
       client.emit(_client.ClientEvent.Event, e);
-    });
-    room.updateMyMembership("join"); // Decrypt only the last message in all rooms to make sure we can generate a preview
+    }); // Decrypt only the last message in all rooms to make sure we can generate a preview
     // And decrypt all events after the recorded read receipt to ensure an accurate
     // notification count
 
     room.decryptCriticalEvents();
   }
   /**
    * @param {Room} room
    * @param {MatrixEvent[]} stateEventList A list of state events. This is the state
--- a/chat/protocols/matrix/lib/matrix-sdk/sync.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/sync.js
@@ -65,23 +65,21 @@ exports.SyncState = SyncState;
   SyncState["Reconnecting"] = "RECONNECTING";
 })(SyncState || (exports.SyncState = SyncState = {}));
 
 const MSC2716_ROOM_VERSIONS = ['org.matrix.msc2716v3'];
 
 function getFilterName(userId, suffix) {
   // scope this on the user ID because people may login on many accounts
   // and they all need to be stored!
-  return "FILTER_SYNC_" + userId + (suffix ? "_" + suffix : "");
+  return `FILTER_SYNC_${userId}` + (suffix ? "_" + suffix : "");
 }
 
 function debuglog(...params) {
-  if (!DEBUG) {
-    return;
-  }
+  if (!DEBUG) return;
 
   _logger.logger.log(...params);
 }
 
 var SetPresence;
 
 (function (SetPresence) {
   SetPresence["Offline"] = "offline";
@@ -130,16 +128,131 @@ class SyncApi {
     _defineProperty(this, "connectionReturnedDefer", null);
 
     _defineProperty(this, "notifEvents", []);
 
     _defineProperty(this, "failedSyncCount", 0);
 
     _defineProperty(this, "storeIsInvalid", false);
 
+    _defineProperty(this, "getPushRules", async () => {
+      try {
+        debuglog("Getting push rules...");
+        const result = await this.client.getPushRules();
+        debuglog("Got push rules");
+        this.client.pushRules = result;
+      } catch (err) {
+        _logger.logger.error("Getting push rules failed", err);
+
+        if (this.shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else,
+        // otherwise the sync state will end up being incorrect
+
+        debuglog("Waiting for saved sync before retrying push rules...");
+        await this.recoverFromSyncStartupError(this.savedSyncPromise, err);
+        return this.getPushRules(); // try again
+      }
+    });
+
+    _defineProperty(this, "buildDefaultFilter", () => {
+      return new _filter.Filter(this.client.credentials.userId);
+    });
+
+    _defineProperty(this, "checkLazyLoadStatus", async () => {
+      debuglog("Checking lazy load status...");
+
+      if (this.opts.lazyLoadMembers && this.client.isGuest()) {
+        this.opts.lazyLoadMembers = false;
+      }
+
+      if (this.opts.lazyLoadMembers) {
+        debuglog("Checking server lazy load support...");
+        const supported = await this.client.doesServerSupportLazyLoading();
+
+        if (supported) {
+          debuglog("Enabling lazy load on sync filter...");
+
+          if (!this.opts.filter) {
+            this.opts.filter = this.buildDefaultFilter();
+          }
+
+          this.opts.filter.setLazyLoadMembers(true);
+        } else {
+          debuglog("LL: lazy loading requested but not supported " + "by server, so disabling");
+          this.opts.lazyLoadMembers = false;
+        }
+      } // need to vape the store when enabling LL and wasn't enabled before
+
+
+      debuglog("Checking whether lazy loading has changed in store...");
+      const shouldClear = await this.wasLazyLoadingToggled(this.opts.lazyLoadMembers);
+
+      if (shouldClear) {
+        this.storeIsInvalid = true;
+        const reason = _errors.InvalidStoreError.TOGGLED_LAZY_LOADING;
+        const error = new _errors.InvalidStoreError(reason, !!this.opts.lazyLoadMembers);
+        this.updateSyncState(SyncState.Error, {
+          error
+        }); // bail out of the sync loop now: the app needs to respond to this error.
+        // we leave the state as 'ERROR' which isn't great since this normally means
+        // we're retrying. The client must be stopped before clearing the stores anyway
+        // so the app should stop the client, clear the store and start it again.
+
+        _logger.logger.warn("InvalidStoreError: store is not usable: stopping sync.");
+
+        return;
+      }
+
+      if (this.opts.lazyLoadMembers) {
+        this.opts.crypto?.enableLazyLoading();
+      }
+
+      try {
+        debuglog("Storing client options...");
+        await this.client.storeClientOptions();
+        debuglog("Stored client options");
+      } catch (err) {
+        _logger.logger.error("Storing client options failed", err);
+
+        throw err;
+      }
+    });
+
+    _defineProperty(this, "getFilter", async () => {
+      debuglog("Getting filter...");
+      let filter;
+
+      if (this.opts.filter) {
+        filter = this.opts.filter;
+      } else {
+        filter = this.buildDefaultFilter();
+      }
+
+      let filterId;
+
+      try {
+        filterId = await this.client.getOrCreateFilter(getFilterName(this.client.credentials.userId), filter);
+      } catch (err) {
+        _logger.logger.error("Getting filter failed", err);
+
+        if (this.shouldAbortSync(err)) return {}; // wait for saved sync to complete before doing anything else,
+        // otherwise the sync state will end up being incorrect
+
+        debuglog("Waiting for saved sync before retrying filter...");
+        await this.recoverFromSyncStartupError(this.savedSyncPromise, err);
+        return this.getFilter(); // try again
+      }
+
+      return {
+        filter,
+        filterId
+      };
+    });
+
+    _defineProperty(this, "savedSyncPromise", void 0);
+
     _defineProperty(this, "onOnline", () => {
       debuglog("Browser thinks we are back online");
       this.startKeepAlives(0);
     });
 
     this.opts.initialSyncLimit = this.opts.initialSyncLimit ?? 8;
     this.opts.resolveInvitesToProfiles = this.opts.resolveInvitesToProfiles || false;
     this.opts.pollTimeout = this.opts.pollTimeout || 30 * 1000;
@@ -171,17 +284,17 @@ class SyncApi {
     return room;
   }
   /** When we see the marker state change in the room, we know there is some
    * new historical messages imported by MSC2716 `/batch_send` somewhere in
    * the room and we need to throw away the timeline to make sure the
    * historical messages are shown when we paginate `/messages` again.
    * @param {Room} room The room where the marker event was sent
    * @param {MatrixEvent} markerEvent The new marker event
-   * @param {ISetStateOptions} setStateOptions When `timelineWasEmpty` is set
+   * @param {IMarkerFoundOptions} setStateOptions When `timelineWasEmpty` is set
    * as `true`, the given marker event will be ignored
   */
 
 
   onMarkerStateEvent(room, markerEvent, {
     timelineWasEmpty
   } = {}) {
     // We don't need to refresh the timeline if it was empty before the
@@ -307,17 +420,17 @@ class SyncApi {
       response.state = response.state || []; // FIXME: Mostly duplicated from processRoomEvents but not entirely
       // because "state" in this API is at the BEGINNING of the chunk
 
       const oldStateEvents = utils.deepCopy(response.state).map(client.getEventMapper());
       const stateEvents = response.state.map(client.getEventMapper());
       const messages = response.messages.chunk.map(client.getEventMapper()); // XXX: copypasted from /sync until we kill off this minging v1 API stuff)
       // handle presence events (User objects)
 
-      if (response.presence && Array.isArray(response.presence)) {
+      if (Array.isArray(response.presence)) {
         response.presence.map(client.getEventMapper()).forEach(function (presenceEvent) {
           let user = client.store.getUser(presenceEvent.getContent().user_id);
 
           if (user) {
             user.setPresenceEvent(presenceEvent);
           } else {
             user = createNewUser(client, presenceEvent.getContent().user_id);
             user.setPresenceEvent(presenceEvent);
@@ -497,219 +610,112 @@ class SyncApi {
       this.updateSyncState(SyncState.Error, {
         error
       });
       return true;
     }
 
     return false;
   }
+
   /**
    * Main entry point
    */
+  async sync() {
+    this.running = true;
+    global.window?.addEventListener?.("online", this.onOnline, false);
+
+    if (this.client.isGuest()) {
+      // no push rules for guests, no access to POST filter for guests.
+      return this.doSync({});
+    } // Pull the saved sync token out first, before the worker starts sending
+    // all the sync data which could take a while. This will let us send our
+    // first incremental sync request before we've processed our saved data.
 
 
-  sync() {
-    const client = this.client;
-    this.running = true;
+    debuglog("Getting saved sync token...");
+    const savedSyncTokenPromise = this.client.store.getSavedSyncToken().then(tok => {
+      debuglog("Got saved sync token");
+      return tok;
+    });
+    this.savedSyncPromise = this.client.store.getSavedSync().then(savedSync => {
+      debuglog(`Got reply from saved sync, exists? ${!!savedSync}`);
 
-    if (global.window && global.window.addEventListener) {
-      global.window.addEventListener("online", this.onOnline, false);
-    }
-
-    let savedSyncPromise = Promise.resolve();
-    let savedSyncToken = null; // We need to do one-off checks before we can begin the /sync loop.
+      if (savedSync) {
+        return this.syncFromCache(savedSync);
+      }
+    }).catch(err => {
+      _logger.logger.error("Getting saved sync failed", err);
+    }); // We need to do one-off checks before we can begin the /sync loop.
     // These are:
     //   1) We need to get push rules so we can check if events should bing as we get
     //      them from /sync.
     //   2) We need to get/create a filter which we can use for /sync.
     //   3) We need to check the lazy loading option matches what was used in the
     //       stored sync. If it doesn't, we can't use the stored sync.
-
-    const getPushRules = async () => {
-      try {
-        debuglog("Getting push rules...");
-        const result = await client.getPushRules();
-        debuglog("Got push rules");
-        client.pushRules = result;
-      } catch (err) {
-        _logger.logger.error("Getting push rules failed", err);
+    // Now start the first incremental sync request: this can also
+    // take a while so if we set it going now, we can wait for it
+    // to finish while we process our saved sync data.
 
-        if (this.shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else,
-        // otherwise the sync state will end up being incorrect
-
-        debuglog("Waiting for saved sync before retrying push rules...");
-        await this.recoverFromSyncStartupError(savedSyncPromise, err);
-        getPushRules();
-        return;
-      }
-
-      checkLazyLoadStatus(); // advance to the next stage
-    };
+    await this.getPushRules();
+    await this.checkLazyLoadStatus();
+    const {
+      filterId,
+      filter
+    } = await this.getFilter();
+    if (!filter) return; // bail, getFilter failed
+    // reset the notifications timeline to prepare it to paginate from
+    // the current point in time.
+    // The right solution would be to tie /sync pagination tokens into
+    // /notifications API somehow.
 
-    const buildDefaultFilter = () => {
-      const filter = new _filter.Filter(client.credentials.userId);
-      filter.setTimelineLimit(this.opts.initialSyncLimit);
-      return filter;
-    };
+    this.client.resetNotifTimelineSet();
 
-    const checkLazyLoadStatus = async () => {
-      debuglog("Checking lazy load status...");
-
-      if (this.opts.lazyLoadMembers && client.isGuest()) {
-        this.opts.lazyLoadMembers = false;
-      }
+    if (this.currentSyncRequest === null) {
+      let firstSyncFilter = filterId;
+      const savedSyncToken = await savedSyncTokenPromise;
 
-      if (this.opts.lazyLoadMembers) {
-        debuglog("Checking server lazy load support...");
-        const supported = await client.doesServerSupportLazyLoading();
-
-        if (supported) {
-          debuglog("Enabling lazy load on sync filter...");
+      if (savedSyncToken) {
+        debuglog("Sending first sync request...");
+      } else {
+        debuglog("Sending initial sync request...");
+        const initialFilter = this.buildDefaultFilter();
+        initialFilter.setDefinition(filter.getDefinition());
+        initialFilter.setTimelineLimit(this.opts.initialSyncLimit); // Use an inline filter, no point uploading it for a single usage
 
-          if (!this.opts.filter) {
-            this.opts.filter = buildDefaultFilter();
-          }
-
-          this.opts.filter.setLazyLoadMembers(true);
-        } else {
-          debuglog("LL: lazy loading requested but not supported " + "by server, so disabling");
-          this.opts.lazyLoadMembers = false;
-        }
-      } // need to vape the store when enabling LL and wasn't enabled before
+        firstSyncFilter = JSON.stringify(initialFilter.getDefinition());
+      } // Send this first sync request here so we can then wait for the saved
+      // sync data to finish processing before we process the results of this one.
 
 
-      debuglog("Checking whether lazy loading has changed in store...");
-      const shouldClear = await this.wasLazyLoadingToggled(this.opts.lazyLoadMembers);
-
-      if (shouldClear) {
-        this.storeIsInvalid = true;
-        const reason = _errors.InvalidStoreError.TOGGLED_LAZY_LOADING;
-        const error = new _errors.InvalidStoreError(reason, !!this.opts.lazyLoadMembers);
-        this.updateSyncState(SyncState.Error, {
-          error
-        }); // bail out of the sync loop now: the app needs to respond to this error.
-        // we leave the state as 'ERROR' which isn't great since this normally means
-        // we're retrying. The client must be stopped before clearing the stores anyway
-        // so the app should stop the client, clear the store and start it again.
-
-        _logger.logger.warn("InvalidStoreError: store is not usable: stopping sync.");
-
-        return;
-      }
-
-      if (this.opts.lazyLoadMembers && this.opts.crypto) {
-        this.opts.crypto.enableLazyLoading();
-      }
-
-      try {
-        debuglog("Storing client options...");
-        await this.client.storeClientOptions();
-        debuglog("Stored client options");
-      } catch (err) {
-        _logger.logger.error("Storing client options failed", err);
-
-        throw err;
-      }
-
-      getFilter(); // Now get the filter and start syncing
-    };
-
-    const getFilter = async () => {
-      debuglog("Getting filter...");
-      let filter;
-
-      if (this.opts.filter) {
-        filter = this.opts.filter;
-      } else {
-        filter = buildDefaultFilter();
-      }
-
-      let filterId;
-
-      try {
-        filterId = await client.getOrCreateFilter(getFilterName(client.credentials.userId), filter);
-      } catch (err) {
-        _logger.logger.error("Getting filter failed", err);
-
-        if (this.shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else,
-        // otherwise the sync state will end up being incorrect
-
-        debuglog("Waiting for saved sync before retrying filter...");
-        await this.recoverFromSyncStartupError(savedSyncPromise, err);
-        getFilter();
-        return;
-      } // reset the notifications timeline to prepare it to paginate from
-      // the current point in time.
-      // The right solution would be to tie /sync pagination tokens into
-      // /notifications API somehow.
+      this.currentSyncRequest = this.doSyncRequest({
+        filter: firstSyncFilter
+      }, savedSyncToken);
+    } // Now wait for the saved sync to finish...
 
 
-      client.resetNotifTimelineSet();
-
-      if (this.currentSyncRequest === null) {
-        // Send this first sync request here so we can then wait for the saved
-        // sync data to finish processing before we process the results of this one.
-        debuglog("Sending first sync request...");
-        this.currentSyncRequest = this.doSyncRequest({
-          filterId
-        }, savedSyncToken);
-      } // Now wait for the saved sync to finish...
-
-
-      debuglog("Waiting for saved sync before starting sync processing...");
-      await savedSyncPromise;
-      this.doSync({
-        filterId
-      });
-    };
+    debuglog("Waiting for saved sync before starting sync processing...");
+    await this.savedSyncPromise; // process the first sync request and continue syncing with the normal filterId
 
-    if (client.isGuest()) {
-      // no push rules for guests, no access to POST filter for guests.
-      this.doSync({});
-    } else {
-      // Pull the saved sync token out first, before the worker starts sending
-      // all the sync data which could take a while. This will let us send our
-      // first incremental sync request before we've processed our saved data.
-      debuglog("Getting saved sync token...");
-      savedSyncPromise = client.store.getSavedSyncToken().then(tok => {
-        debuglog("Got saved sync token");
-        savedSyncToken = tok;
-        debuglog("Getting saved sync...");
-        return client.store.getSavedSync();
-      }).then(savedSync => {
-        debuglog(`Got reply from saved sync, exists? ${!!savedSync}`);
-
-        if (savedSync) {
-          return this.syncFromCache(savedSync);
-        }
-      }).catch(err => {
-        _logger.logger.error("Getting saved sync failed", err);
-      }); // Now start the first incremental sync request: this can also
-      // take a while so if we set it going now, we can wait for it
-      // to finish while we process our saved sync data.
-
-      getPushRules();
-    }
+    return this.doSync({
+      filter: filterId
+    });
   }
   /**
    * Stops the sync object from syncing.
    */
 
 
   stop() {
     debuglog("SyncApi.stop"); // It is necessary to check for the existance of
     // global.window AND global.window.removeEventListener.
     // Some platforms (e.g. React Native) register global.window,
     // but do not have global.window.removeEventListener.
 
-    if (global.window && global.window.removeEventListener) {
-      global.window.removeEventListener("online", this.onOnline, false);
-    }
-
+    global.window?.removeEventListener?.("online", this.onOnline, false);
     this.running = false;
     this.currentSyncRequest?.abort();
 
     if (this.keepAliveTimer) {
       clearTimeout(this.keepAliveTimer);
       this.keepAliveTimer = null;
     }
   }
@@ -737,17 +743,16 @@ class SyncApi {
 
   async syncFromCache(savedSync) {
     debuglog("sync(): not doing HTTP hit, instead returning stored /sync data");
     const nextSyncToken = savedSync.nextBatch; // Set sync token for future incremental syncing
 
     this.client.store.setSyncToken(nextSyncToken); // No previous sync, set old token to null
 
     const syncEventData = {
-      oldSyncToken: null,
       nextSyncToken,
       catchingUp: false,
       fromCache: true
     };
     const data = {
       next_batch: nextSyncToken,
       rooms: savedSync.roomsData,
       account_data: {
@@ -772,225 +777,218 @@ class SyncApi {
    * Invoke me to do /sync calls
    * @param {Object} syncOptions
    * @param {string} syncOptions.filterId
    * @param {boolean} syncOptions.hasSyncedBefore
    */
 
 
   async doSync(syncOptions) {
-    const client = this.client;
+    while (this.running) {
+      const syncToken = this.client.store.getSyncToken();
+      let data;
+
+      try {
+        //debuglog('Starting sync since=' + syncToken);
+        if (this.currentSyncRequest === null) {
+          this.currentSyncRequest = this.doSyncRequest(syncOptions, syncToken);
+        }
+
+        data = await this.currentSyncRequest;
+      } catch (e) {
+        const abort = await this.onSyncError(e);
+        if (abort) return;
+        continue;
+      } finally {
+        this.currentSyncRequest = null;
+      } //debuglog('Completed sync, next_batch=' + data.next_batch);
+      // set the sync token NOW *before* processing the events. We do this so
+      // if something barfs on an event we can skip it rather than constantly
+      // polling with the same token.
+
+
+      this.client.store.setSyncToken(data.next_batch); // Reset after a successful sync
+
+      this.failedSyncCount = 0;
+      await this.client.store.setSyncData(data);
+      const syncEventData = {
+        oldSyncToken: syncToken,
+        nextSyncToken: data.next_batch,
+        catchingUp: this.catchingUp
+      };
+
+      if (this.opts.crypto) {
+        // tell the crypto module we're about to process a sync
+        // response
+        await this.opts.crypto.onSyncWillProcess(syncEventData);
+      }
+
+      try {
+        await this.processSyncResponse(syncEventData, data);
+      } catch (e) {
+        // log the exception with stack if we have it, else fall back
+        // to the plain description
+        _logger.logger.error("Caught /sync error", e); // Emit the exception for client handling
+
+
+        this.client.emit(_client.ClientEvent.SyncUnexpectedError, e);
+      } // update this as it may have changed
+
+
+      syncEventData.catchingUp = this.catchingUp; // emit synced events
+
+      if (!syncOptions.hasSyncedBefore) {
+        this.updateSyncState(SyncState.Prepared, syncEventData);
+        syncOptions.hasSyncedBefore = true;
+      } // tell the crypto module to do its processing. It may block (to do a
+      // /keys/changes request).
+
+
+      if (this.opts.crypto) {
+        await this.opts.crypto.onSyncCompleted(syncEventData);
+      } // keep emitting SYNCING -> SYNCING for clients who want to do bulk updates
+
+
+      this.updateSyncState(SyncState.Syncing, syncEventData);
+
+      if (this.client.store.wantsSave()) {
+        // We always save the device list (if it's dirty) before saving the sync data:
+        // this means we know the saved device list data is at least as fresh as the
+        // stored sync data which means we don't have to worry that we may have missed
+        // device changes. We can also skip the delay since we're not calling this very
+        // frequently (and we don't really want to delay the sync for it).
+        if (this.opts.crypto) {
+          await this.opts.crypto.saveDeviceList(0);
+        } // tell databases that everything is now in a consistent state and can be saved.
+
+
+        this.client.store.save();
+      }
+    }
 
     if (!this.running) {
       debuglog("Sync no longer running: exiting.");
 
       if (this.connectionReturnedDefer) {
         this.connectionReturnedDefer.reject();
         this.connectionReturnedDefer = null;
       }
 
       this.updateSyncState(SyncState.Stopped);
-      return;
     }
-
-    const syncToken = client.store.getSyncToken();
-    let data;
-
-    try {
-      //debuglog('Starting sync since=' + syncToken);
-      if (this.currentSyncRequest === null) {
-        this.currentSyncRequest = this.doSyncRequest(syncOptions, syncToken);
-      }
-
-      data = await this.currentSyncRequest;
-    } catch (e) {
-      this.onSyncError(e, syncOptions);
-      return;
-    } finally {
-      this.currentSyncRequest = null;
-    } //debuglog('Completed sync, next_batch=' + data.next_batch);
-    // set the sync token NOW *before* processing the events. We do this so
-    // if something barfs on an event we can skip it rather than constantly
-    // polling with the same token.
-
-
-    client.store.setSyncToken(data.next_batch); // Reset after a successful sync
-
-    this.failedSyncCount = 0;
-    await client.store.setSyncData(data);
-    const syncEventData = {
-      oldSyncToken: syncToken,
-      nextSyncToken: data.next_batch,
-      catchingUp: this.catchingUp
-    };
-
-    if (this.opts.crypto) {
-      // tell the crypto module we're about to process a sync
-      // response
-      await this.opts.crypto.onSyncWillProcess(syncEventData);
-    }
-
-    try {
-      await this.processSyncResponse(syncEventData, data);
-    } catch (e) {
-      // log the exception with stack if we have it, else fall back
-      // to the plain description
-      _logger.logger.error("Caught /sync error", e); // Emit the exception for client handling
-
-
-      this.client.emit(_client.ClientEvent.SyncUnexpectedError, e);
-    } // update this as it may have changed
-
-
-    syncEventData.catchingUp = this.catchingUp; // emit synced events
-
-    if (!syncOptions.hasSyncedBefore) {
-      this.updateSyncState(SyncState.Prepared, syncEventData);
-      syncOptions.hasSyncedBefore = true;
-    } // tell the crypto module to do its processing. It may block (to do a
-    // /keys/changes request).
-
-
-    if (this.opts.crypto) {
-      await this.opts.crypto.onSyncCompleted(syncEventData);
-    } // keep emitting SYNCING -> SYNCING for clients who want to do bulk updates
-
-
-    this.updateSyncState(SyncState.Syncing, syncEventData);
-
-    if (client.store.wantsSave()) {
-      // We always save the device list (if it's dirty) before saving the sync data:
-      // this means we know the saved device list data is at least as fresh as the
-      // stored sync data which means we don't have to worry that we may have missed
-      // device changes. We can also skip the delay since we're not calling this very
-      // frequently (and we don't really want to delay the sync for it).
-      if (this.opts.crypto) {
-        await this.opts.crypto.saveDeviceList(0);
-      } // tell databases that everything is now in a consistent state and can be saved.
-
-
-      client.store.save();
-    } // Begin next sync
-
-
-    this.doSync(syncOptions);
   }
 
   doSyncRequest(syncOptions, syncToken) {
     const qps = this.getSyncParams(syncOptions, syncToken);
     return this.client.http.authedRequest(undefined, _httpApi.Method.Get, "/sync", qps, undefined, qps.timeout + BUFFER_PERIOD_MS);
   }
 
   getSyncParams(syncOptions, syncToken) {
     let pollTimeout = this.opts.pollTimeout;
 
-    if (this.getSyncState() !== 'SYNCING' || this.catchingUp) {
+    if (this.getSyncState() !== SyncState.Syncing || this.catchingUp) {
       // unless we are happily syncing already, we want the server to return
       // as quickly as possible, even if there are no events queued. This
       // serves two purposes:
       //
       // * When the connection dies, we want to know asap when it comes back,
       //   so that we can hide the error from the user. (We don't want to
       //   have to wait for an event or a timeout).
       //
       // * We want to know if the server has any to_device messages queued up
       //   for us. We do that by calling it with a zero timeout until it
       //   doesn't give us any more to_device messages.
       this.catchingUp = true;
       pollTimeout = 0;
     }
 
-    let filterId = syncOptions.filterId;
+    let filter = syncOptions.filter;
 
-    if (this.client.isGuest() && !filterId) {
-      filterId = this.getGuestFilter();
+    if (this.client.isGuest() && !filter) {
+      filter = this.getGuestFilter();
     }
 
     const qps = {
-      filter: filterId,
+      filter,
       timeout: pollTimeout
     };
 
     if (this.opts.disablePresence) {
       qps.set_presence = SetPresence.Offline;
     }
 
     if (syncToken) {
       qps.since = syncToken;
     } else {
       // use a cachebuster for initialsyncs, to make sure that
       // we don't get a stale sync
       // (https://github.com/vector-im/vector-web/issues/1354)
       qps._cacheBuster = Date.now();
     }
 
-    if (this.getSyncState() == 'ERROR' || this.getSyncState() == 'RECONNECTING') {
+    if ([SyncState.Reconnecting, SyncState.Error].includes(this.getSyncState())) {
       // we think the connection is dead. If it comes back up, we won't know
       // about it till /sync returns. If the timeout= is high, this could
       // be a long time. Set it to 0 when doing retries so we don't have to wait
       // for an event or a timeout before emiting the SYNCING event.
       qps.timeout = 0;
     }
 
     return qps;
   }
 
-  onSyncError(err, syncOptions) {
+  async onSyncError(err) {
     if (!this.running) {
       debuglog("Sync no longer running: exiting");
 
       if (this.connectionReturnedDefer) {
         this.connectionReturnedDefer.reject();
         this.connectionReturnedDefer = null;
       }
 
       this.updateSyncState(SyncState.Stopped);
-      return;
+      return true; // abort
     }
 
     _logger.logger.error("/sync error %s", err);
 
-    _logger.logger.error(err);
-
     if (this.shouldAbortSync(err)) {
-      return;
+      return true; // abort
     }
 
     this.failedSyncCount++;
 
     _logger.logger.log('Number of consecutive failed sync requests:', this.failedSyncCount);
 
     debuglog("Starting keep-alive"); // Note that we do *not* mark the sync connection as
     // lost yet: we only do this if a keepalive poke
     // fails, since long lived HTTP connections will
     // go away sometimes and we shouldn't treat this as
     // erroneous. We set the state to 'reconnecting'
     // instead, so that clients can observe this state
     // if they wish.
 
-    this.startKeepAlives().then(connDidFail => {
-      // Only emit CATCHUP if we detected a connectivity error: if we didn't,
-      // it's quite likely the sync will fail again for the same reason and we
-      // want to stay in ERROR rather than keep flip-flopping between ERROR
-      // and CATCHUP.
-      if (connDidFail && this.getSyncState() === SyncState.Error) {
-        this.updateSyncState(SyncState.Catchup, {
-          oldSyncToken: null,
-          nextSyncToken: null,
-          catchingUp: true
-        });
-      }
-
-      this.doSync(syncOptions);
-    });
+    const keepAlivePromise = this.startKeepAlives();
     this.currentSyncRequest = null; // Transition from RECONNECTING to ERROR after a given number of failed syncs
 
     this.updateSyncState(this.failedSyncCount >= FAILED_SYNC_ERROR_THRESHOLD ? SyncState.Error : SyncState.Reconnecting, {
       error: err
     });
+    const connDidFail = await keepAlivePromise; // Only emit CATCHUP if we detected a connectivity error: if we didn't,
+    // it's quite likely the sync will fail again for the same reason and we
+    // want to stay in ERROR rather than keep flip-flopping between ERROR
+    // and CATCHUP.
+
+    if (connDidFail && this.getSyncState() === SyncState.Error) {
+      this.updateSyncState(SyncState.Catchup, {
+        catchingUp: true
+      });
+    }
+
+    return false;
   }
   /**
    * Process data returned from a sync response and propagate it
    * into the model objects
    *
    * @param {Object} syncEventData Object containing sync tokens associated with this sync
    * @param {Object} data The response from /sync
    */
@@ -1037,34 +1035,34 @@ class SyncApi {
     //    }
     // }
     // TODO-arch:
     // - Each event we pass through needs to be emitted via 'event', can we
     //   do this in one place?
     // - The isBrandNewRoom boilerplate is boilerplatey.
     // handle presence events (User objects)
 
-    if (data.presence && Array.isArray(data.presence.events)) {
+    if (Array.isArray(data.presence?.events)) {
       data.presence.events.map(client.getEventMapper()).forEach(function (presenceEvent) {
         let user = client.store.getUser(presenceEvent.getSender());
 
         if (user) {
           user.setPresenceEvent(presenceEvent);
         } else {
           user = createNewUser(client, presenceEvent.getSender());
           user.setPresenceEvent(presenceEvent);
           client.store.storeUser(user);
         }
 
         client.emit(_client.ClientEvent.Event, presenceEvent);
       });
     } // handle non-room account_data
 
 
-    if (data.account_data && Array.isArray(data.account_data.events)) {
+    if (Array.isArray(data.account_data?.events)) {
       const events = data.account_data.events.map(client.getEventMapper());
       const prevEventsMap = events.reduce((m, c) => {
         m[c.getId()] = client.store.getAccountData(c.getType());
         return m;
       }, {});
       client.store.storeAccountDataEvents(events);
       events.forEach(function (accountDataEvent) {
         // Honour push rules that come down the sync stream but also
@@ -1080,17 +1078,27 @@ class SyncApi {
         client.emit(_client.ClientEvent.AccountData, accountDataEvent, prevEvent);
         return accountDataEvent;
       });
     } // handle to-device events
 
 
     if (Array.isArray(data.to_device?.events) && data.to_device.events.length > 0) {
       const cancelledKeyVerificationTxns = [];
-      data.to_device.events.map(client.getEventMapper()).map(toDeviceEvent => {
+      data.to_device.events.filter(eventJSON => {
+        if (eventJSON.type === _event.EventType.RoomMessageEncrypted && !["m.olm.v1.curve25519-aes-sha2"].includes(eventJSON.content?.algorithm)) {
+          _logger.logger.log('Ignoring invalid encrypted to-device event from ' + eventJSON.sender);
+
+          return false;
+        }
+
+        return true;
+      }).map(client.getEventMapper({
+        toDevice: true
+      })).map(toDeviceEvent => {
         // map is a cheap inline forEach
         // We want to flag m.key.verification.start events as cancelled
         // if there's an accompanying m.key.verification.cancel event, so
         // we pull out the transaction IDs from the cancellation events
         // so we can flag the verification events as cancelled in the loop
         // below.
         if (toDeviceEvent.getType() === "m.key.verification.cancel") {
           const txnId = toDeviceEvent.getContent()['transaction_id'];
@@ -1150,16 +1158,27 @@ class SyncApi {
     }
 
     this.notifEvents = []; // Handle invites
 
     await utils.promiseMapSeries(inviteRooms, async inviteObj => {
       const room = inviteObj.room;
       const stateEvents = this.mapSyncEventsFormat(inviteObj.invite_state, room);
       await this.processRoomEvents(room, stateEvents);
+      const inviter = room.currentState.getStateEvents(_event.EventType.RoomMember, client.getUserId())?.getSender();
+      const parkedHistory = await client.crypto.cryptoStore.takeParkedSharedHistory(room.roomId);
+
+      for (const parked of parkedHistory) {
+        if (parked.senderId === inviter) {
+          await this.client.crypto.olmDevice.addInboundGroupSession(room.roomId, parked.senderKey, parked.forwardingCurve25519KeyChain, parked.sessionId, parked.sessionKey, parked.keysClaimed, true, {
+            sharedHistory: true,
+            untrusted: true
+          });
+        }
+      }
 
       if (inviteObj.isBrandNewRoom) {
         room.recalculate();
         client.store.storeRoom(room);
         client.emit(_client.ClientEvent.Room, room);
       } else {
         // Update room state for invite->reject->invite cycles
         room.recalculate();
@@ -1183,17 +1202,17 @@ class SyncApi {
       const encrypted = client.isRoomEncrypted(room.roomId); // we do this first so it's correct when any of the events fire
 
       if (joinObj.unread_notifications) {
         room.setUnreadNotificationCount(_room.NotificationCountType.Total, joinObj.unread_notifications.notification_count); // We track unread notifications ourselves in encrypted rooms, so don't
         // bother setting it here. We trust our calculations better than the
         // server's for this case, and therefore will assume that our non-zero
         // count is accurate.
 
-        if (!encrypted || encrypted && room.getUnreadNotificationCount(_room.NotificationCountType.Highlight) <= 0) {
+        if (!encrypted || room.getUnreadNotificationCount(_room.NotificationCountType.Highlight) <= 0) {
           room.setUnreadNotificationCount(_room.NotificationCountType.Highlight, joinObj.unread_notifications.highlight_count);
         }
       }
 
       joinObj.timeline = joinObj.timeline || {};
 
       if (joinObj.isBrandNewRoom) {
         // set the back-pagination token. Do this *before* adding any
@@ -1237,20 +1256,25 @@ class SyncApi {
           room.resetLiveTimeline(joinObj.timeline.prev_batch, this.opts.canResetEntireTimeline(room.roomId) ? null : syncEventData.oldSyncToken); // We have to assume any gap in any timeline is
           // reason to stop incrementally tracking notifications and
           // reset the timeline.
 
           client.resetNotifTimelineSet();
         }
       }
 
-      await this.processRoomEvents(room, stateEvents, events, syncEventData.fromCache); // set summary after processing events,
+      try {
+        await this.processRoomEvents(room, stateEvents, events, syncEventData.fromCache);
+      } catch (e) {
+        _logger.logger.error(`Failed to process events on room ${room.roomId}:`, e);
+      } // set summary after processing events,
       // because it will trigger a name calculation
       // which needs the room state to be up to date
 
+
       if (joinObj.summary) {
         room.setSummary(joinObj.summary);
       } // we deliberately don't add ephemeral events to the timeline
 
 
       room.addEphemeralEvents(ephemeralEvents); // we deliberately don't add accountData to the timeline
 
       room.addAccountData(accountDataEvents);
--- a/chat/protocols/matrix/lib/matrix-sdk/utils.js
+++ b/chat/protocols/matrix/lib/matrix-sdk/utils.js
@@ -15,17 +15,16 @@ exports.deepCompare = deepCompare;
 exports.deepCopy = deepCopy;
 exports.deepSortedObjectEntries = deepSortedObjectEntries;
 exports.defer = defer;
 exports.encodeParams = encodeParams;
 exports.encodeUri = encodeUri;
 exports.ensureNoTrailingSlash = ensureNoTrailingSlash;
 exports.escapeRegExp = escapeRegExp;
 exports.getCrypto = getCrypto;
-exports.getPrivateReadReceiptField = getPrivateReadReceiptField;
 exports.globToRegexp = globToRegexp;
 exports.internaliseString = internaliseString;
 exports.isFunction = isFunction;
 exports.isNullOrUndefined = isNullOrUndefined;
 exports.isNumber = isNumber;
 exports.isSupportedReceiptType = isSupportedReceiptType;
 exports.lexicographicCompare = lexicographicCompare;
 exports.nextString = nextString;
@@ -285,46 +284,33 @@ function deepCompare(x, y) {
     }
 
     for (let i = 0; i < x.length; i++) {
       if (!deepCompare(x[i], y[i])) {
         return false;
       }
     }
   } else {
-    // disable jshint "The body of a for in should be wrapped in an if
-    // statement"
-
-    /* jshint -W089 */
     // check that all of y's direct keys are in x
-    let p;
-
-    for (p in y) {
+    for (const p in y) {
       if (y.hasOwnProperty(p) !== x.hasOwnProperty(p)) {
         return false;
       }
     } // finally, compare each of x's keys with y
 
 
-    for (p in y) {
-      // eslint-disable-line guard-for-in
-      if (y.hasOwnProperty(p) !== x.hasOwnProperty(p)) {
-        return false;
-      }
-
-      if (!deepCompare(x[p], y[p])) {
+    for (const p in x) {
+      if (y.hasOwnProperty(p) !== x.hasOwnProperty(p) || !deepCompare(x[p], y[p])) {
         return false;
       }
     }
   }
-  /* jshint +W089 */
-
 
   return true;
-} // Dev note: This returns a tuple, but jsdoc doesn't like that. https://github.com/jsdoc/jsdoc/issues/1703
+} // Dev note: This returns an array of tuples, but jsdoc doesn't like that. https://github.com/jsdoc/jsdoc/issues/1703
 
 /**
  * Creates an array of object properties/values (entries) then
  * sorts the result by key, recursively. The input object must
  * ensure it does not have loops. If the input is not an object
  * then it will be returned as-is.
  * @param {*} obj The object to get entries of
  * @returns {Array} The entries, sorted by key.
@@ -404,22 +390,22 @@ function normalize(str) {
 
 
 const removeHiddenCharsRegex = /[\u2000-\u200F\u202A-\u202F\u0300-\u036F\uFEFF\u061C\s]/g;
 
 function escapeRegExp(string) {
   return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
 }
 
-function globToRegexp(glob, extended) {
+function globToRegexp(glob, extended = false) {
   // From
   // https://github.com/matrix-org/synapse/blob/abbee6b29be80a77e05730707602f3bbfc3f38cb/synapse/push/__init__.py#L132
   // Because micromatch is about 130KB with dependencies,
   // and minimatch is not much better.
-  const replacements = [[/\\\*/g, '.*'], [/\?/g, '.'], extended !== false && [/\\\[(!|)(.*)\\]/g, (_match, neg, pat) => ['[', neg ? '^' : '', pat.replace(/\\-/, '-'), ']'].join('')]];
+  const replacements = [[/\\\*/g, '.*'], [/\?/g, '.'], !extended && [/\\\[(!|)(.*)\\]/g, (_match, neg, pat) => ['[', neg ? '^' : '', pat.replace(/\\-/, '-'), ']'].join('')]];
   return replacements.reduce( // https://github.com/microsoft/TypeScript/issues/30134
   (pat, args) => args ? pat.replace(args[0], args[1]) : pat, escapeRegExp(glob));
 }
 
 function ensureNoTrailingSlash(url) {
   if (url && url.endsWith("/")) {
     return url.slice(0, -1);
   } else {
@@ -730,17 +716,11 @@ function getContentTimestampWithFallback
  * Latest timestamp first
  */
 
 
 function sortEventsByLatestContentTimestamp(left, right) {
   return getContentTimestampWithFallback(right) - getContentTimestampWithFallback(left);
 }
 
-async function getPrivateReadReceiptField(client) {
-  if (await client.doesServerSupportUnstableFeature("org.matrix.msc2285.stable")) return _read_receipts.ReceiptType.ReadPrivate;
-  if (await client.doesServerSupportUnstableFeature("org.matrix.msc2285")) return _read_receipts.ReceiptType.UnstableReadPrivate;
-  return null;
-}
-
 function isSupportedReceiptType(receiptType) {
-  return [_read_receipts.ReceiptType.Read, _read_receipts.ReceiptType.ReadPrivate, _read_receipts.ReceiptType.UnstableReadPrivate].includes(receiptType);
+  return [_read_receipts.ReceiptType.Read, _read_receipts.ReceiptType.ReadPrivate].includes(receiptType);
 }
\ No newline at end of file
--- a/chat/protocols/matrix/lib/moz.build
+++ b/chat/protocols/matrix/lib/moz.build
@@ -86,16 +86,17 @@ EXTRA_JS_MODULES.matrix.matrix_sdk.crypt
 
 EXTRA_JS_MODULES.matrix.matrix_sdk.models += [
     "matrix-sdk/models/beacon.js",
     "matrix-sdk/models/event-context.js",
     "matrix-sdk/models/event-status.js",
     "matrix-sdk/models/event-timeline-set.js",
     "matrix-sdk/models/event-timeline.js",
     "matrix-sdk/models/event.js",
+    "matrix-sdk/models/invites-ignorer.js",
     "matrix-sdk/models/MSC3089Branch.js",
     "matrix-sdk/models/MSC3089TreeSpace.js",
     "matrix-sdk/models/related-relations.js",
     "matrix-sdk/models/relations-container.js",
     "matrix-sdk/models/relations.js",
     "matrix-sdk/models/room-member.js",
     "matrix-sdk/models/room-state.js",
     "matrix-sdk/models/room-summary.js",
@@ -112,16 +113,17 @@ EXTRA_JS_MODULES.matrix.matrix_sdk.store
     "matrix-sdk/store/indexeddb-store-worker.js",
     "matrix-sdk/store/indexeddb.js",
     "matrix-sdk/store/local-storage-events-emitter.js",
     "matrix-sdk/store/memory.js",
     "matrix-sdk/store/stub.js",
 ]
 
 EXTRA_JS_MODULES.matrix.matrix_sdk.types += [
+    "matrix-sdk/@types/auth.js",
     "matrix-sdk/@types/beacon.js",
     "matrix-sdk/@types/event.js",
     "matrix-sdk/@types/extensible_events.js",
     "matrix-sdk/@types/location.js",
     "matrix-sdk/@types/partials.js",
     "matrix-sdk/@types/PushRules.js",
     "matrix-sdk/@types/read_receipts.js",
     "matrix-sdk/@types/search.js",
--- a/chat/protocols/matrix/matrix-sdk.jsm
+++ b/chat/protocols/matrix/matrix-sdk.jsm
@@ -96,19 +96,21 @@ let loader = Loader({
     "../randomstring": matrixPath + "matrix_sdk/randomstring.js",
     "../ReEmitter": matrixPath + "matrix_sdk/ReEmitter.js",
     "../sync-accumulator": matrixPath + "matrix_sdk/sync-accumulator.js",
     "../utils": matrixPath + "matrix_sdk/utils.js",
     "../utils.js": matrixPath + "matrix_sdk/utils.js",
     "../../utils": matrixPath + "matrix_sdk/utils.js",
 
     // @types
+    "@types/auth": matrixPath + "matrix_sdk/types/auth.js",
     "@types/beacon": matrixPath + "matrix_sdk/types/beacon.js",
     "@types/event": matrixPath + "matrix_sdk/types/event.js",
     "../@types/event": matrixPath + "matrix_sdk/types/event.js",
+    "../../@types/event": matrixPath + "matrix_sdk/types/event.js",
     "@types/extensible_events":
       matrixPath + "matrix_sdk/types/extensible_events.js",
     "@types/location": matrixPath + "matrix_sdk/types/location.js",
     "@types/partials": matrixPath + "matrix_sdk/types/partials.js",
     "@types/PushRules": matrixPath + "matrix_sdk/types/PushRules.js",
     "@types/read_receipts": matrixPath + "matrix_sdk/types/read_receipts.js",
     "@types/requests": matrixPath + "empty.js",
     "@types/search": matrixPath + "matrix_sdk/types/search.js",
@@ -128,16 +130,18 @@ let loader = Loader({
     "../olmlib": matrixPath + "matrix_sdk/crypto/olmlib.js",
     "crypto/olmlib": matrixPath + "matrix_sdk/crypto/olmlib.js",
     OlmDevice: matrixPath + "matrix_sdk/crypto/OlmDevice.js",
     "../OlmDevice": matrixPath + "matrix_sdk/crypto/OlmDevice.js",
     "crypto/recoverykey": matrixPath + "matrix_sdk/crypto/recoverykey.js",
     recoverykey: matrixPath + "matrix_sdk/crypto/recoverykey.js",
     OutgoingRoomKeyRequestManager:
       matrixPath + "matrix_sdk/crypto/OutgoingRoomKeyRequestManager.js",
+    "../OutgoingRoomKeyRequestManager":
+      matrixPath + "matrix_sdk/crypto/OutgoingRoomKeyRequestManager.js",
     "crypto/RoomList": matrixPath + "matrix_sdk/crypto/RoomList.js",
     "crypto/CrossSigning": matrixPath + "matrix_sdk/crypto/CrossSigning.js",
     CrossSigning: matrixPath + "matrix_sdk/crypto/CrossSigning.js",
     EncryptionSetup: matrixPath + "matrix_sdk/crypto/EncryptionSetup.js",
     SecretStorage: matrixPath + "matrix_sdk/crypto/SecretStorage.js",
     aes: matrixPath + "matrix_sdk/crypto/aes.js",
     dehydration: matrixPath + "matrix_sdk/crypto/dehydration.js",
     "crypto/dehydration": matrixPath + "matrix_sdk/crypto/dehydration.js",