Browse Source

ref(replays): Refactor how we fetch segments to use the new batch api endpoint (#38281)

Instead of doing potentially 100's of requests, one for each `segment_id`, we can get the segment data in batches, paging through with a cursor.

Fixes #38208
Ryan Albrecht 2 years ago
parent
commit
f3d31eee22

+ 0 - 1
package.json

@@ -116,7 +116,6 @@
     "mockdate": "3.0.5",
     "moment": "2.29.4",
     "moment-timezone": "0.5.34",
-    "pako": "^2.0.4",
     "papaparse": "^5.3.2",
     "pegjs": "^0.10.0",
     "pegjs-loader": "^0.5.6",

+ 2 - 1
static/app/utils/parseLinkHeader.tsx

@@ -1,4 +1,5 @@
-type Result = Record<string, {cursor: string; href: string; results: boolean | null}>;
+export type ParsedHeader = {cursor: string; href: string; results: boolean | null};
+type Result = Record<string, ParsedHeader>;
 
 export default function parseLinkHeader(header: string | null): Result {
   if (header === null || header === '') {

+ 35 - 64
static/app/utils/replays/hooks/useReplayData.tsx

@@ -1,8 +1,7 @@
 import {useCallback, useEffect, useMemo, useState} from 'react';
 import * as Sentry from '@sentry/react';
-import {inflate} from 'pako';
 
-import type {ResponseMeta} from 'sentry/api';
+import parseLinkHeader, {ParsedHeader} from 'sentry/utils/parseLinkHeader';
 import flattenListOfObjects from 'sentry/utils/replays/flattenListOfObjects';
 import {mapResponseToReplayRecord} from 'sentry/utils/replays/replayDataUtils';
 import ReplayReader from 'sentry/utils/replays/replayReader';
@@ -13,7 +12,6 @@ import type {
   ReplayCrumb,
   ReplayError,
   ReplayRecord,
-  ReplaySegment,
   ReplaySpan,
 } from 'sentry/views/replays/types';
 
@@ -104,30 +102,6 @@ const INITIAL_STATE: State = Object.freeze({
   spans: undefined,
 });
 
-async function decompressSegmentData(
-  data: any,
-  _textStatus: string | undefined,
-  resp: ResponseMeta | undefined
-) {
-  // for non-compressed events, parse and return
-  try {
-    return mapRRWebAttachments(JSON.parse(data));
-  } catch (error) {
-    // swallow exception.. if we can't parse it, it's going to be compressed
-  }
-
-  // for non-compressed events, parse and return
-  try {
-    // for compressed events, inflate the blob and map the events
-    const responseBlob = await resp?.rawResponse.blob();
-    const responseArray = (await responseBlob?.arrayBuffer()) as Uint8Array;
-    const parsedPayload = JSON.parse(inflate(responseArray, {to: 'string'}));
-    return mapRRWebAttachments(parsedPayload);
-  } catch (error) {
-    return {};
-  }
-}
-
 /**
  * A react hook to load core replay data over the network.
  *
@@ -160,33 +134,35 @@ function useReplayData({replaySlug, orgSlug}: Options): Result {
     return response.data;
   }, [api, orgSlug, projectSlug, replayId]);
 
-  const fetchSegmentList = useCallback(async () => {
-    const response = await api.requestPromise(
-      `/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/`
-    );
-    return response.data as ReplaySegment[];
-  }, [api, orgSlug, projectSlug, replayId]);
+  const fetchAllRRwebEvents = useCallback(async () => {
+    const rootUrl = `/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/?download`;
+    let next: ParsedHeader = {
+      href: rootUrl,
+      results: true,
+      cursor: '',
+    };
+
+    const segmentRanges: any = [];
+    // TODO(replay): It would be good to load the first page of results then
+    // start to render the UI while the next N pages continue to get fetched in
+    // the background.
+    while (next.results) {
+      const url = rootUrl + '&cursor=' + next.cursor;
+
+      const [data, _textStatus, resp] = await api.requestPromise(url, {
+        includeAllArgs: true,
+      });
+      segmentRanges.push(data);
+      const links = parseLinkHeader(resp?.getResponseHeader('Link') ?? '');
+      next = links.next;
+    }
 
-  const fetchRRWebEvents = useCallback(
-    async (segmentIds: number[]) => {
-      const attachments = await Promise.all(
-        segmentIds.map(async segmentId => {
-          const response = await api.requestPromise(
-            `/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/${segmentId}/?download`,
-            {
-              includeAllArgs: true,
-            }
-          );
-
-          return decompressSegmentData(...response);
-        })
-      );
-
-      // ReplayAttachment[] => ReplayAttachment (merge each key of ReplayAttachment)
-      return flattenListOfObjects(attachments);
-    },
-    [api, replayId, orgSlug, projectSlug]
-  );
+    const rrwebEvents = segmentRanges
+      .flatMap(segment => segment)
+      .flatMap(attachments => mapRRWebAttachments(attachments));
+
+    return flattenListOfObjects(rrwebEvents);
+  }, [api, orgSlug, projectSlug, replayId]);
 
   const fetchErrors = useCallback(
     async (replayRecord: ReplayRecord) => {
@@ -213,17 +189,12 @@ function useReplayData({replaySlug, orgSlug}: Options): Result {
     setState(INITIAL_STATE);
 
     try {
-      const [record, segments] = await Promise.all([fetchReplay(), fetchSegmentList()]);
-      const replayRecord = mapResponseToReplayRecord(record);
-
-      // TODO(replays): Something like `range(record.countSegments)` could work
-      // once we make sure that segments have sequential id's and are not dropped.
-      const segmentIds = segments.map(segment => segment.segmentId);
-
-      const [attachments, errors] = await Promise.all([
-        fetchRRWebEvents(segmentIds),
-        fetchErrors(replayRecord),
+      const [record, attachments] = await Promise.all([
+        fetchReplay(),
+        fetchAllRRwebEvents(),
       ]);
+      const replayRecord = mapResponseToReplayRecord(record);
+      const errors = await fetchErrors(replayRecord);
 
       setState(prev => ({
         ...prev,
@@ -243,7 +214,7 @@ function useReplayData({replaySlug, orgSlug}: Options): Result {
         fetching: false,
       });
     }
-  }, [fetchReplay, fetchSegmentList, fetchRRWebEvents, fetchErrors]);
+  }, [fetchReplay, fetchAllRRwebEvents, fetchErrors]);
 
   useEffect(() => {
     loadEvents();