Bug 1606276 - Spocs from cache load optimization r=gvn
authorScott <scott.downe@gmail.com>
Thu, 21 May 2020 16:51:18 +0000
changeset 531493 8b1bfe0d44265063bf65677a2c37a1ebed0dc397
parent 531492 9c5b9c33920bd312f86167e9577a028e980a4678
child 531494 b55f7e60274403af65725021ef85361137846f4b
push id37440
push userabutkovits@mozilla.com
push dateFri, 22 May 2020 09:43:16 +0000
treeherdermozilla-central@fbf71e4d2e21 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgvn
bugs1606276
milestone78.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1606276 - Spocs from cache load optimization r=gvn Differential Revision: https://phabricator.services.mozilla.com/D75128
browser/components/newtab/lib/DiscoveryStreamFeed.jsm
browser/components/newtab/test/unit/lib/DiscoveryStreamFeed.test.js
--- a/browser/components/newtab/lib/DiscoveryStreamFeed.jsm
+++ b/browser/components/newtab/lib/DiscoveryStreamFeed.jsm
@@ -435,16 +435,19 @@ this.DiscoveryStreamFeed = class Discove
       if (!newFeeds[url]) {
         // We initially stub this out so we don't fetch dupes,
         // we then fill in with the proper object inside the promise.
         newFeeds[url] = {};
         const feedPromise = this.getComponentFeed(url, isStartup);
 
         feedPromise
           .then(feed => {
+            // If we stored the result of filter in feed cache as it happened,
+            // I think we could reduce doing this for cache fetches.
+            // Bug https://bugzilla.mozilla.org/show_bug.cgi?id=1606277
             newFeeds[url] = this.filterRecommendations(feed);
             sendUpdate({
               type: at.DISCOVERY_STREAM_FEED_UPDATE,
               data: {
                 feed: newFeeds[url],
                 url,
               },
             });
@@ -596,16 +599,21 @@ this.DiscoveryStreamFeed = class Discove
       ...(flight_id ? { flight_id } : {}),
     };
   }
 
   async loadSpocs(sendUpdate, isStartup) {
     const cachedData = (await this.cache.get()) || {};
     let spocsState;
 
+    let frequencyCapped = [];
+    let blockedItems = [];
+    let belowMinScore = [];
+    let flightDupes = [];
+
     const { placements } = this.store.getState().DiscoveryStream.spocs;
 
     if (this.showSpocs) {
       spocsState = cachedData.spocs;
       if (this.isExpired({ cachedData, key: "spocs", isStartup })) {
         const endpoint = this.store.getState().DiscoveryStream.spocs
           .spocs_endpoint;
         const start = Cu.now();
@@ -631,18 +639,107 @@ this.DiscoveryStreamFeed = class Discove
           this.spocsRequestTime = Math.round(Cu.now() - start);
           spocsState = {
             lastUpdated: Date.now(),
             spocs: {
               ...spocsResponse,
             },
           };
 
+          const spocsResultPromises = this.getPlacements().map(
+            async placement => {
+              const freshSpocs = spocsState.spocs[placement.name];
+
+              if (!freshSpocs) {
+                return;
+              }
+
+              // spocs can be returns as an array, or an object with an items array.
+              // We want to normalize this so all our spocs have an items array.
+              // There can also be some meta data for title and context.
+              // This is mostly because of backwards compat.
+              const {
+                items: normalizedSpocsItems,
+                title,
+                context,
+              } = this.normalizeSpocsItems(freshSpocs);
+
+              if (!normalizedSpocsItems || !normalizedSpocsItems.length) {
+                // In the case of old data, we still want to ensure we normalize the data structure,
+                // even if it's empty. We expect the empty data to be an object with items array,
+                // and not just an empty array.
+                spocsState.spocs = {
+                  ...spocsState.spocs,
+                  [placement.name]: {
+                    title,
+                    context,
+                    items: [],
+                  },
+                };
+                return;
+              }
+
+              // Migrate flight_id
+              const { data: migratedSpocs } = this.migrateFlightId(
+                normalizedSpocsItems
+              );
+
+              const {
+                data: capResult,
+                filtered: caps,
+              } = this.frequencyCapSpocs(migratedSpocs);
+              frequencyCapped = [...frequencyCapped, ...caps];
+
+              const {
+                data: blockedResults,
+                filtered: blocks,
+              } = this.filterBlocked(capResult);
+              blockedItems = [...blockedItems, ...blocks];
+
+              // It's important that we score before removing flight dupes.
+              // This ensure we remove the lower ranking dupes.
+              const {
+                data: scoredResults,
+                filtered: minScoreFilter,
+              } = await this.scoreItems(blockedResults);
+
+              belowMinScore = [...belowMinScore, ...minScoreFilter];
+
+              let {
+                data: dupesResult,
+                filtered: dupes,
+              } = this.removeFlightDupes(scoredResults);
+              flightDupes = [...flightDupes, ...dupes];
+
+              spocsState.spocs = {
+                ...spocsState.spocs,
+                [placement.name]: {
+                  title,
+                  context,
+                  items: dupesResult,
+                },
+              };
+            }
+          );
+          await Promise.all(spocsResultPromises);
+
           this.cleanUpFlightImpressionPref(spocsState.spocs);
-          await this.cache.set("spocs", spocsState);
+          await this.cache.set("spocs", {
+            lastUpdated: spocsState.lastUpdated,
+            spocs: spocsState.spocs,
+          });
+          this._sendSpocsFill(
+            {
+              frequency_cap: frequencyCapped,
+              blocked_by_user: blockedItems,
+              below_min_score: belowMinScore,
+              flight_duplicate: flightDupes,
+            },
+            true
+          );
         } else {
           Cu.reportError("No response for spocs_endpoint prop");
         }
       }
     }
 
     // Use good data if we have it, otherwise nothing.
     // We can have no data if spocs set to off.
@@ -651,109 +748,23 @@ this.DiscoveryStreamFeed = class Discove
     spocsState =
       spocsState && spocsState.spocs
         ? spocsState
         : {
             lastUpdated: Date.now(),
             spocs: {},
           };
 
-    let frequencyCapped = [];
-    let blockedItems = [];
-    let belowMinScore = [];
-    let flightDupes = [];
-    const spocsResultPromises = this.getPlacements().map(async placement => {
-      const freshSpocs = spocsState.spocs[placement.name];
-
-      if (!freshSpocs) {
-        return;
-      }
-
-      // spocs can be returns as an array, or an object with an items array.
-      // We want to normalize this so all our spocs have an items array.
-      // There can also be some meta data for title and context.
-      // This is mostly because of backwards compat.
-      const {
-        items: normalizedSpocsItems,
-        title,
-        context,
-        flight_id,
-      } = this.normalizeSpocsItems(freshSpocs);
-
-      if (!normalizedSpocsItems || !normalizedSpocsItems.length) {
-        // In the case of old data, we still want to ensure we normalize the data structure,
-        // even if it's empty. We expect the empty data to be an object with items array,
-        // and not just an empty array.
-        spocsState.spocs = {
-          ...spocsState.spocs,
-          [placement.name]: {
-            title,
-            context,
-            items: [],
-          },
-        };
-        return;
-      }
-
-      // Migrate flight_id
-      const { data: migratedSpocs } = this.migrateFlightId(
-        normalizedSpocsItems
-      );
-
-      const { data: capResult, filtered: caps } = this.frequencyCapSpocs(
-        migratedSpocs
-      );
-      frequencyCapped = [...frequencyCapped, ...caps];
-
-      const { data: blockedResults, filtered: blocks } = this.filterBlocked(
-        capResult
-      );
-      blockedItems = [...blockedItems, ...blocks];
-
-      let {
-        data: transformResult,
-        filtered: transformFilter,
-      } = await this.transform(blockedResults);
-      let {
-        below_min_score: minScoreFilter,
-        flight_duplicate: dupes,
-      } = transformFilter;
-      belowMinScore = [...belowMinScore, ...minScoreFilter];
-      flightDupes = [...flightDupes, ...dupes];
-
-      spocsState.spocs = {
-        ...spocsState.spocs,
-        [placement.name]: {
-          title,
-          context,
-          ...(flight_id ? { flight_id } : {}),
-          items: transformResult,
-        },
-      };
-    });
-    await Promise.all(spocsResultPromises);
-
     sendUpdate({
       type: at.DISCOVERY_STREAM_SPOCS_UPDATE,
       data: {
         lastUpdated: spocsState.lastUpdated,
         spocs: spocsState.spocs,
       },
     });
-    // TODO make sure this works in other places we use it.
-    // TODO make sure to also validate all of these that they still contain the right ites in the array.
-    this._sendSpocsFill(
-      {
-        frequency_cap: frequencyCapped,
-        blocked_by_user: blockedItems,
-        below_min_score: belowMinScore,
-        flight_duplicate: flightDupes,
-      },
-      true
-    );
   }
 
   async clearSpocs() {
     const endpoint = this.store.getState().Prefs.values[
       PREF_SPOCS_CLEAR_ENDPOINT
     ];
     if (!endpoint) {
       return;
@@ -903,56 +914,45 @@ this.DiscoveryStreamFeed = class Discove
       return {
         data: filteredItems,
         filtered,
       };
     }
     return { data, filtered };
   }
 
-  async transform(spocs) {
+  removeFlightDupes(spocs) {
     if (spocs && spocs.length) {
       const spocsPerDomain =
         this.store.getState().DiscoveryStream.spocs.spocs_per_domain || 1;
       const flightMap = {};
       const flightDuplicates = [];
 
-      // This order of operations is intended.
-      const {
-        data: items,
-        filtered: belowMinScoreItems,
-      } = await this.scoreItems(spocs);
       // This removes flight dupes.
       // We do this only after scoring and sorting because that way
       // we can keep the first item we see, and end up keeping the highest scored.
-      const newSpocs = items.filter(s => {
+      const newSpocs = spocs.filter(s => {
         if (!flightMap[s.flight_id]) {
           flightMap[s.flight_id] = 1;
           return true;
         } else if (flightMap[s.flight_id] < spocsPerDomain) {
           flightMap[s.flight_id]++;
           return true;
         }
         flightDuplicates.push(s);
         return false;
       });
       return {
         data: newSpocs,
-        filtered: {
-          below_min_score: belowMinScoreItems,
-          flight_duplicate: flightDuplicates,
-        },
+        filtered: flightDuplicates,
       };
     }
     return {
       data: spocs,
-      filtered: {
-        below_min_score: [],
-        flight_duplicate: [],
-      },
+      filtered: [],
     };
   }
 
   // For backwards compatibility, older spoc endpoint don't have flight_id,
   // but instead had campaign_id we can use
   //
   // @param {Object} data  An object that might have a SPOCS array.
   // @returns {Object} An object with a property `data` as the result.
@@ -1509,25 +1509,17 @@ this.DiscoveryStreamFeed = class Discove
   cleanUpFlightImpressionPref(data) {
     let flightIds = [];
     this.placementsForEach(placement => {
       const newSpocs = data[placement.name];
       if (!newSpocs) {
         return;
       }
 
-      // We need to do a small items migration here.
-      // In bug 1567271 we moved spoc data array into items,
-      // but we also need backwards comp here, because
-      // this is the only place where we use spocs before the migration.
-      // We however don't need to do a total migration, we *just* need the items.
-      // A total migration would involve setting the data with new values,
-      // and also ensuring metadata like context and title are there or empty strings.
-      // see #normalizeSpocsItems function.
-      const items = newSpocs.items || newSpocs;
+      const items = newSpocs.items || [];
       flightIds = [...flightIds, ...items.map(s => `${s.flight_id}`)];
     });
     if (flightIds && flightIds.length) {
       this.cleanUpImpressionPref(
         id => !flightIds.includes(id),
         PREF_SPOC_IMPRESSIONS
       );
     }
@@ -1679,35 +1671,42 @@ this.DiscoveryStreamFeed = class Discove
           this.recordFlightImpression(action.data.flightId);
 
           // Apply frequency capping to SPOCs in the redux store, only update the
           // store if the SPOCs are changed.
           const spocsState = this.store.getState().DiscoveryStream.spocs;
 
           let frequencyCapped = [];
           this.placementsForEach(placement => {
-            const freshSpocs = spocsState.data[placement.name];
-            if (!freshSpocs || !freshSpocs.items) {
+            const spocs = spocsState.data[placement.name];
+            if (!spocs || !spocs.items) {
               return;
             }
 
-            const { data: newSpocs, filtered } = this.frequencyCapSpocs(
-              freshSpocs.items
+            const { data: capResult, filtered } = this.frequencyCapSpocs(
+              spocs.items
             );
             frequencyCapped = [...frequencyCapped, ...filtered];
 
             spocsState.data = {
               ...spocsState.data,
               [placement.name]: {
-                ...freshSpocs,
-                items: newSpocs,
+                ...spocs,
+                items: capResult,
               },
             };
           });
+
           if (frequencyCapped.length) {
+            // Update cache here so we don't need to re calculate frequency caps on loads from cache.
+            await this.cache.set("spocs", {
+              lastUpdated: spocsState.lastUpdated,
+              spocs: spocsState.data,
+            });
+
             this.store.dispatch(
               ac.AlsoToPreloaded({
                 type: at.DISCOVERY_STREAM_SPOCS_UPDATE,
                 data: {
                   lastUpdated: spocsState.lastUpdated,
                   spocs: spocsState.data,
                 },
               })
@@ -1716,29 +1715,53 @@ this.DiscoveryStreamFeed = class Discove
           }
         }
         break;
       // This is fired from the browser, it has no concept of spocs, flight or pocket.
       // We match the blocked url with our available spoc urls to see if there is a match.
       // I suspect we *could* instead do this in BLOCK_URL but I'm not sure.
       case at.PLACES_LINK_BLOCKED:
         if (this.showSpocs) {
+          let blockedItems = [];
           const spocsState = this.store.getState().DiscoveryStream.spocs;
-          let spocsList = [];
+
           this.placementsForEach(placement => {
             const spocs = spocsState.data[placement.name];
             if (spocs && spocs.items && spocs.items.length) {
-              spocsList = [...spocsList, ...spocs.items];
+              const blockedResults = [];
+              const blocks = spocs.items.filter(s => {
+                const blocked = s.url === action.data.url;
+                if (!blocked) {
+                  blockedResults.push(s);
+                }
+                return blocked;
+              });
+
+              blockedItems = [...blockedItems, ...blocks];
+
+              spocsState.data = {
+                ...spocsState.data,
+                [placement.name]: {
+                  ...spocs,
+                  items: blockedResults,
+                },
+              };
             }
           });
-          const filtered = spocsList.filter(s => s.url === action.data.url);
-          if (filtered.length) {
-            this._sendSpocsFill({ blocked_by_user: filtered }, false);
 
-            // If we're blocking a spoc, we want a slightly different treatment for open tabs.
+          if (blockedItems.length) {
+            // Update cache here so we don't need to re calculate blocks on loads from cache.
+            await this.cache.set("spocs", {
+              lastUpdated: spocsState.lastUpdated,
+              spocs: spocsState.data,
+            });
+
+            this._sendSpocsFill({ blocked_by_user: blockedItems }, false);
+            // If we're blocking a spoc, we want open tabs to have
+            // a slightly different treatment from future tabs.
             // AlsoToPreloaded updates the source data and preloaded tabs with a new spoc.
             // BroadcastToContent updates open tabs with a non spoc instead of a new spoc.
             this.store.dispatch(
               ac.AlsoToPreloaded({
                 type: at.DISCOVERY_STREAM_LINK_BLOCKED,
                 data: action.data,
               })
             );
@@ -1746,16 +1769,17 @@ this.DiscoveryStreamFeed = class Discove
               ac.BroadcastToContent({
                 type: at.DISCOVERY_STREAM_SPOC_BLOCKED,
                 data: action.data,
               })
             );
             break;
           }
         }
+
         this.store.dispatch(
           ac.BroadcastToContent({
             type: at.DISCOVERY_STREAM_LINK_BLOCKED,
             data: action.data,
           })
         );
         break;
       case at.UNINIT:
--- a/browser/components/newtab/test/unit/lib/DiscoveryStreamFeed.test.js
+++ b/browser/components/newtab/test/unit/lib/DiscoveryStreamFeed.test.js
@@ -1099,112 +1099,139 @@ describe("DiscoveryStreamFeed", () => {
       const fourthCall = feed.cache.set.getCall(3);
       assert.deepEqual(firstCall.args, ["layout", {}]);
       assert.deepEqual(secondCall.args, ["feeds", {}]);
       assert.deepEqual(thirdCall.args, ["spocs", {}]);
       assert.deepEqual(fourthCall.args, ["affinities", {}]);
     });
   });
 
-  describe("#transform", () => {
+  describe("#scoreItems", () => {
     it("should return initial data if spocs are empty", async () => {
-      const { data: result } = await feed.transform({ spocs: [] });
-
-      assert.equal(result.spocs.length, 0);
+      const { data: result } = await feed.scoreItems([]);
+
+      assert.equal(result.length, 0);
     });
+
     it("should sort based on item_score", async () => {
-      const { data: result } = await feed.transform([
+      const { data: result } = await feed.scoreItems([
         { id: 2, flight_id: 2, item_score: 0.8, min_score: 0.1 },
         { id: 3, flight_id: 3, item_score: 0.7, min_score: 0.1 },
         { id: 1, flight_id: 1, item_score: 0.9, min_score: 0.1 },
       ]);
 
       assert.deepEqual(result, [
         { id: 1, flight_id: 1, item_score: 0.9, score: 0.9, min_score: 0.1 },
         { id: 2, flight_id: 2, item_score: 0.8, score: 0.8, min_score: 0.1 },
         { id: 3, flight_id: 3, item_score: 0.7, score: 0.7, min_score: 0.1 },
       ]);
     });
+
     it("should remove items with scores lower than min_score", async () => {
-      const { data: result, filtered } = await feed.transform([
+      const { data: result, filtered } = await feed.scoreItems([
         { id: 2, flight_id: 2, item_score: 0.8, min_score: 0.9 },
         { id: 3, flight_id: 3, item_score: 0.7, min_score: 0.7 },
         { id: 1, flight_id: 1, item_score: 0.9, min_score: 0.8 },
       ]);
 
       assert.deepEqual(result, [
         { id: 1, flight_id: 1, item_score: 0.9, score: 0.9, min_score: 0.8 },
         { id: 3, flight_id: 3, item_score: 0.7, score: 0.7, min_score: 0.7 },
       ]);
 
-      assert.deepEqual(filtered.below_min_score, [
+      assert.deepEqual(filtered, [
         { id: 2, flight_id: 2, item_score: 0.8, min_score: 0.9, score: 0.8 },
       ]);
     });
+
     it("should add a score prop to spocs", async () => {
-      const { data: result } = await feed.transform([
+      const { data: result } = await feed.scoreItems([
         { flight_id: 1, item_score: 0.9, min_score: 0.1 },
       ]);
 
       assert.equal(result[0].score, 0.9);
     });
-    it("should filter out duplicate flights", async () => {
-      const { data: result, filtered } = await feed.transform([
-        { id: 1, flight_id: 2, item_score: 0.8, min_score: 0.1 },
-        { id: 2, flight_id: 3, item_score: 0.6, min_score: 0.1 },
-        { id: 3, flight_id: 1, item_score: 0.9, min_score: 0.1 },
-        { id: 4, flight_id: 3, item_score: 0.7, min_score: 0.1 },
-        { id: 5, flight_id: 1, item_score: 0.9, min_score: 0.1 },
+    it("should score items using item_score and min_score", async () => {
+      const { data: result, filtered } = await feed.scoreItems([
+        { item_score: 0.8, min_score: 0.1 },
+        { item_score: 0.5, min_score: 0.6 },
+        { item_score: 0.7, min_score: 0.1 },
+        { item_score: 0.9, min_score: 0.1 },
+      ]);
+      assert.deepEqual(result, [
+        { item_score: 0.9, score: 0.9, min_score: 0.1 },
+        { item_score: 0.8, score: 0.8, min_score: 0.1 },
+        { item_score: 0.7, score: 0.7, min_score: 0.1 },
+      ]);
+      assert.deepEqual(filtered, [
+        { item_score: 0.5, min_score: 0.6, score: 0.5 },
+      ]);
+    });
+  });
+  describe("#removeFlightDupes", () => {
+    it("should no op with no params", () => {
+      const { data: result, filtered } = feed.removeFlightDupes([]);
+
+      assert.equal(result.length, 0);
+      assert.equal(filtered.length, 0);
+    });
+    it("should filter out duplicate flights", () => {
+      const { data: result, filtered } = feed.removeFlightDupes([
+        { id: 1, flight_id: 2 },
+        { id: 2, flight_id: 3 },
+        { id: 3, flight_id: 1 },
+        { id: 4, flight_id: 3 },
+        { id: 5, flight_id: 1 },
       ]);
 
       assert.deepEqual(result, [
-        { id: 3, flight_id: 1, item_score: 0.9, score: 0.9, min_score: 0.1 },
-        { id: 1, flight_id: 2, item_score: 0.8, score: 0.8, min_score: 0.1 },
-        { id: 4, flight_id: 3, item_score: 0.7, score: 0.7, min_score: 0.1 },
+        { id: 1, flight_id: 2 },
+        { id: 2, flight_id: 3 },
+        { id: 3, flight_id: 1 },
       ]);
 
-      assert.deepEqual(filtered.flight_duplicate, [
-        { id: 5, flight_id: 1, item_score: 0.9, min_score: 0.1, score: 0.9 },
-        { id: 2, flight_id: 3, item_score: 0.6, min_score: 0.1, score: 0.6 },
+      assert.deepEqual(filtered, [
+        { id: 4, flight_id: 3 },
+        { id: 5, flight_id: 1 },
       ]);
     });
     it("should filter out duplicate flight while using spocs_per_domain", async () => {
       sandbox.stub(feed.store, "getState").returns({
         DiscoveryStream: {
           spocs: { spocs_per_domain: 2 },
         },
       });
 
-      const { data: result, filtered } = await feed.transform([
-        { id: 1, flight_id: 2, item_score: 0.8, min_score: 0.1 },
-        { id: 2, flight_id: 3, item_score: 0.6, min_score: 0.1 },
-        { id: 3, flight_id: 1, item_score: 0.6, min_score: 0.1 },
-        { id: 4, flight_id: 3, item_score: 0.7, min_score: 0.1 },
-        { id: 5, flight_id: 1, item_score: 0.9, min_score: 0.1 },
-        { id: 6, flight_id: 2, item_score: 0.6, min_score: 0.1 },
-        { id: 7, flight_id: 3, item_score: 0.7, min_score: 0.1 },
-        { id: 8, flight_id: 1, item_score: 0.8, min_score: 0.1 },
-        { id: 9, flight_id: 3, item_score: 0.7, min_score: 0.1 },
-        { id: 10, flight_id: 1, item_score: 0.8, min_score: 0.1 },
+      const { data: result, filtered } = feed.removeFlightDupes([
+        { id: 1, flight_id: 2 },
+        { id: 2, flight_id: 3 },
+        { id: 3, flight_id: 1 },
+        { id: 4, flight_id: 3 },
+        { id: 5, flight_id: 1 },
+        { id: 6, flight_id: 2 },
+        { id: 7, flight_id: 3 },
+        { id: 8, flight_id: 1 },
+        { id: 9, flight_id: 3 },
+        { id: 10, flight_id: 1 },
       ]);
 
       assert.deepEqual(result, [
-        { id: 5, flight_id: 1, item_score: 0.9, score: 0.9, min_score: 0.1 },
-        { id: 1, flight_id: 2, item_score: 0.8, score: 0.8, min_score: 0.1 },
-        { id: 8, flight_id: 1, item_score: 0.8, score: 0.8, min_score: 0.1 },
-        { id: 4, flight_id: 3, item_score: 0.7, score: 0.7, min_score: 0.1 },
-        { id: 7, flight_id: 3, item_score: 0.7, score: 0.7, min_score: 0.1 },
-        { id: 6, flight_id: 2, item_score: 0.6, score: 0.6, min_score: 0.1 },
+        { id: 1, flight_id: 2 },
+        { id: 2, flight_id: 3 },
+        { id: 3, flight_id: 1 },
+        { id: 4, flight_id: 3 },
+        { id: 5, flight_id: 1 },
+        { id: 6, flight_id: 2 },
       ]);
 
-      assert.deepEqual(filtered.flight_duplicate, [
-        { id: 10, flight_id: 1, item_score: 0.8, min_score: 0.1, score: 0.8 },
-        { id: 9, flight_id: 3, item_score: 0.7, min_score: 0.1, score: 0.7 },
-        { id: 2, flight_id: 3, item_score: 0.6, min_score: 0.1, score: 0.6 },
-        { id: 3, flight_id: 1, item_score: 0.6, min_score: 0.1, score: 0.6 },
+      assert.deepEqual(filtered, [
+        { id: 7, flight_id: 3 },
+        { id: 8, flight_id: 1 },
+        { id: 9, flight_id: 3 },
+        { id: 10, flight_id: 1 },
       ]);
     });
   });
 
   describe("#filterBlocked", () => {
     it("should return initial data if spocs are empty", () => {
       const { data: result } = feed.filterBlocked([]);
 
@@ -1516,30 +1543,16 @@ describe("DiscoveryStreamFeed", () => {
       sandbox.stub(feed, "writeDataPref").returns();
 
       feed.cleanUpFlightImpressionPref(fakeSpocs);
 
       assert.calledWith(feed.writeDataPref, SPOC_IMPRESSION_TRACKING_PREF, {
         "flight-2": [-1],
       });
     });
-    it("should use old spocs data strucutre", async () => {
-      const fakeSpocs = {
-        spocs: [
-          {
-            flight_id: "flight-2",
-          },
-        ],
-      };
-      sandbox.stub(feed, "cleanUpImpressionPref").returns();
-
-      feed.cleanUpFlightImpressionPref(fakeSpocs);
-
-      assert.calledOnce(feed.cleanUpImpressionPref);
-    });
   });
 
   describe("#recordTopRecImpressions", () => {
     it("should add a rec id to the rec impression pref", () => {
       sandbox.stub(feed, "readDataPref").returns({});
       sandbox.stub(feed, "writeDataPref");
 
       feed.recordTopRecImpressions("rec");