replayReader.tsx 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752
  1. import * as Sentry from '@sentry/react';
  2. import memoize from 'lodash/memoize';
  3. import {type Duration, duration} from 'moment-timezone';
  4. import {defined} from 'sentry/utils';
  5. import domId from 'sentry/utils/domId';
  6. import localStorageWrapper from 'sentry/utils/localStorage';
  7. import clamp from 'sentry/utils/number/clamp';
  8. import extractDomNodes from 'sentry/utils/replays/extractDomNodes';
  9. import hydrateBreadcrumbs, {
  10. replayInitBreadcrumb,
  11. } from 'sentry/utils/replays/hydrateBreadcrumbs';
  12. import hydrateErrors from 'sentry/utils/replays/hydrateErrors';
  13. import hydrateFrames from 'sentry/utils/replays/hydrateFrames';
  14. import {
  15. clipEndFrame,
  16. recordingEndFrame,
  17. } from 'sentry/utils/replays/hydrateRRWebRecordingFrames';
  18. import hydrateSpans from 'sentry/utils/replays/hydrateSpans';
  19. import {replayTimestamps} from 'sentry/utils/replays/replayDataUtils';
  20. import replayerStepper from 'sentry/utils/replays/replayerStepper';
  21. import type {
  22. BreadcrumbFrame,
  23. ClipWindow,
  24. ErrorFrame,
  25. fullSnapshotEvent,
  26. incrementalSnapshotEvent,
  27. MemoryFrame,
  28. OptionFrame,
  29. RecordingFrame,
  30. serializedNodeWithId,
  31. SlowClickFrame,
  32. SpanFrame,
  33. VideoEvent,
  34. WebVitalFrame,
  35. } from 'sentry/utils/replays/types';
  36. import {
  37. BreadcrumbCategories,
  38. EventType,
  39. IncrementalSource,
  40. isCLSFrame,
  41. isConsoleFrame,
  42. isDeadClick,
  43. isDeadRageClick,
  44. isMetaFrame,
  45. isPaintFrame,
  46. isTouchEndFrame,
  47. isTouchStartFrame,
  48. isWebVitalFrame,
  49. NodeType,
  50. } from 'sentry/utils/replays/types';
  51. import type {ReplayError, ReplayRecord} from 'sentry/views/replays/types';
  52. interface ReplayReaderParams {
  53. /**
  54. * Loaded segment data
  55. *
  56. * This is a mix of rrweb data, breadcrumbs and spans/transactions sorted by time
  57. * All three types are mixed together.
  58. */
  59. attachments: unknown[] | undefined;
  60. /**
  61. * Error objects related to this replay
  62. *
  63. * Error instances could be frontend, backend, or come from the error platform
  64. * like performance-errors or replay-errors
  65. */
  66. errors: ReplayError[] | undefined;
  67. /**
  68. * Is replay data still fetching?
  69. */
  70. fetching: boolean;
  71. /**
  72. * The root Replay event, created at the start of the browser session.
  73. */
  74. replayRecord: ReplayRecord | undefined;
  75. /**
  76. * If provided, the replay will be clipped to this window.
  77. */
  78. clipWindow?: ClipWindow;
  79. /**
  80. * The org's feature flags
  81. */
  82. featureFlags?: string[];
  83. }
  84. type RequiredNotNull<T> = {
  85. [P in keyof T]: NonNullable<T[P]>;
  86. };
  87. const sortFrames = (a, b) => a.timestampMs - b.timestampMs;
  88. function removeDuplicateClicks(frames: BreadcrumbFrame[]) {
  89. const slowClickFrames = frames.filter(
  90. frame => frame.category === 'ui.slowClickDetected'
  91. );
  92. const clickFrames = frames.filter(frame => frame.category === 'ui.click');
  93. const otherFrames = frames.filter(
  94. frame => !(slowClickFrames.includes(frame) || clickFrames.includes(frame))
  95. );
  96. const uniqueClickFrames: BreadcrumbFrame[] = clickFrames.filter(clickFrame => {
  97. return !slowClickFrames.some(
  98. slowClickFrame =>
  99. slowClickFrame.data &&
  100. 'nodeId' in slowClickFrame.data &&
  101. clickFrame.data &&
  102. 'nodeId' in clickFrame.data &&
  103. slowClickFrame.data.nodeId === clickFrame.data.nodeId &&
  104. slowClickFrame.timestampMs === clickFrame.timestampMs
  105. );
  106. });
  107. return uniqueClickFrames.concat(otherFrames).concat(slowClickFrames);
  108. }
  109. // If a `navigation` crumb and `navigation.*` span happen within this timeframe,
  110. // we'll consider them duplicates.
  111. const DUPLICATE_NAV_THRESHOLD_MS = 2;
  112. /**
  113. * Return a list of BreadcrumbFrames, where any navigation crumb is removed if
  114. * there is a matching navigation.* span to replace it.
  115. *
  116. * SpanFrame is preferred because they render with more specific titles.
  117. */
  118. function removeDuplicateNavCrumbs(
  119. crumbFrames: BreadcrumbFrame[],
  120. spanFrames: SpanFrame[]
  121. ) {
  122. const navCrumbs = crumbFrames.filter(crumb => crumb.category === 'navigation');
  123. const otherBreadcrumbFrames = crumbFrames.filter(
  124. crumb => crumb.category !== 'navigation'
  125. );
  126. const navSpans = spanFrames.filter(span => span.op.startsWith('navigation.'));
  127. const uniqueNavCrumbs = navCrumbs.filter(
  128. crumb =>
  129. !navSpans.some(
  130. span => Math.abs(crumb.offsetMs - span.offsetMs) <= DUPLICATE_NAV_THRESHOLD_MS
  131. )
  132. );
  133. return otherBreadcrumbFrames.concat(uniqueNavCrumbs);
  134. }
  135. export default class ReplayReader {
  136. static factory({
  137. attachments,
  138. errors,
  139. replayRecord,
  140. clipWindow,
  141. featureFlags,
  142. fetching,
  143. }: ReplayReaderParams) {
  144. if (!attachments || !replayRecord || !errors) {
  145. return null;
  146. }
  147. try {
  148. return new ReplayReader({
  149. attachments,
  150. errors,
  151. replayRecord,
  152. featureFlags,
  153. fetching,
  154. clipWindow,
  155. });
  156. } catch (err) {
  157. Sentry.captureException(err);
  158. // If something happens then we don't really know if it's the attachments
  159. // array or errors array to blame (it's probably attachments though).
  160. // Either way we can use the replayRecord to show some metadata, and then
  161. // put an error message below it.
  162. return new ReplayReader({
  163. attachments: [],
  164. errors: [],
  165. featureFlags,
  166. fetching,
  167. replayRecord,
  168. clipWindow,
  169. });
  170. }
  171. }
  172. private constructor({
  173. attachments,
  174. errors,
  175. featureFlags,
  176. fetching,
  177. replayRecord,
  178. clipWindow,
  179. }: RequiredNotNull<ReplayReaderParams>) {
  180. this._cacheKey = domId('replayReader-');
  181. this._fetching = fetching;
  182. if (replayRecord.is_archived) {
  183. this._replayRecord = replayRecord;
  184. const archivedReader = new Proxy(this, {
  185. get(_target, prop, _receiver) {
  186. if (prop === 'getReplay') {
  187. return () => replayRecord;
  188. }
  189. return () => {};
  190. },
  191. });
  192. return archivedReader;
  193. }
  194. const {breadcrumbFrames, optionFrame, rrwebFrames, spanFrames, videoFrames} =
  195. hydrateFrames(attachments);
  196. if (localStorageWrapper.getItem('REPLAY-BACKEND-TIMESTAMPS') !== '1') {
  197. // TODO(replays): We should get correct timestamps from the backend instead
  198. // of having to fix them up here.
  199. const {startTimestampMs, endTimestampMs} = replayTimestamps(
  200. replayRecord,
  201. rrwebFrames,
  202. breadcrumbFrames,
  203. spanFrames
  204. );
  205. this.timestampDeltas = {
  206. startedAtDelta: startTimestampMs - replayRecord.started_at.getTime(),
  207. finishedAtDelta: endTimestampMs - replayRecord.finished_at.getTime(),
  208. };
  209. replayRecord.started_at = new Date(startTimestampMs);
  210. replayRecord.finished_at = new Date(endTimestampMs);
  211. replayRecord.duration = duration(
  212. replayRecord.finished_at.getTime() - replayRecord.started_at.getTime()
  213. );
  214. }
  215. // Hydrate the data we were given
  216. this._replayRecord = replayRecord;
  217. this._featureFlags = featureFlags;
  218. // Errors don't need to be sorted here, they will be merged with breadcrumbs
  219. // and spans in the getter and then sorted together.
  220. const {errorFrames, feedbackFrames} = hydrateErrors(replayRecord, errors);
  221. this._errors = errorFrames.sort(sortFrames);
  222. // RRWeb Events are not sorted here, they are fetched in sorted order.
  223. this._sortedRRWebEvents = rrwebFrames;
  224. this._videoEvents = videoFrames;
  225. // Breadcrumbs must be sorted. Crumbs like `slowClick` and `multiClick` will
  226. // have the same timestamp as the click breadcrumb, but will be emitted a
  227. // few seconds later.
  228. this._sortedBreadcrumbFrames = hydrateBreadcrumbs(replayRecord, breadcrumbFrames)
  229. .concat(feedbackFrames)
  230. .sort(sortFrames);
  231. // Spans must be sorted so components like the Timeline and Network Chart
  232. // can have an easier time to render.
  233. this._sortedSpanFrames = hydrateSpans(replayRecord, spanFrames).sort(sortFrames);
  234. this._optionFrame = optionFrame;
  235. // Insert extra records to satisfy minimum requirements for the UI
  236. // e.g. we have buffered events from browser that happen *before* replay
  237. // recording is started these can show up in the timeline (navigation) and
  238. // in Network table
  239. //
  240. // We fake the start time so that the timelines of these UI components and
  241. // the replay recording all match up
  242. this._sortedBreadcrumbFrames.unshift(replayInitBreadcrumb(replayRecord));
  243. const startTimestampMs = replayRecord.started_at.getTime();
  244. const firstMeta = rrwebFrames.find(frame => frame.type === EventType.Meta);
  245. const firstSnapshot = rrwebFrames.find(
  246. frame => frame.type === EventType.FullSnapshot
  247. );
  248. if (firstMeta && firstSnapshot && firstMeta.timestamp > startTimestampMs) {
  249. this._sortedRRWebEvents.unshift({
  250. ...firstSnapshot,
  251. timestamp: startTimestampMs,
  252. });
  253. this._sortedRRWebEvents.unshift({
  254. ...firstMeta,
  255. timestamp: startTimestampMs,
  256. });
  257. }
  258. this._sortedRRWebEvents.push(recordingEndFrame(replayRecord));
  259. this._duration = replayRecord.duration;
  260. if (clipWindow) {
  261. this._applyClipWindow(clipWindow);
  262. }
  263. }
  264. public timestampDeltas = {startedAtDelta: 0, finishedAtDelta: 0};
  265. private _cacheKey: string;
  266. private _duration: Duration = duration(0);
  267. private _errors: ErrorFrame[] = [];
  268. private _featureFlags: string[] | undefined = [];
  269. private _fetching: boolean = true;
  270. private _optionFrame: undefined | OptionFrame;
  271. private _replayRecord: ReplayRecord;
  272. private _sortedBreadcrumbFrames: BreadcrumbFrame[] = [];
  273. private _sortedRRWebEvents: RecordingFrame[] = [];
  274. private _sortedSpanFrames: SpanFrame[] = [];
  275. private _startOffsetMs = 0;
  276. private _videoEvents: VideoEvent[] = [];
  277. private _clipWindow: ClipWindow | undefined = undefined;
  278. private _applyClipWindow = (clipWindow: ClipWindow) => {
  279. const clipStartTimestampMs = clamp(
  280. clipWindow.startTimestampMs,
  281. this._replayRecord.started_at.getTime(),
  282. this._replayRecord.finished_at.getTime()
  283. );
  284. const clipEndTimestampMs = clamp(
  285. clipWindow.endTimestampMs,
  286. clipStartTimestampMs,
  287. this._replayRecord.finished_at.getTime()
  288. );
  289. this._duration = duration(clipEndTimestampMs - clipStartTimestampMs);
  290. // For video replays, we need to bypass setting the global offset (_startOffsetMs)
  291. // because it messes with the playback time by causing it
  292. // to become negative sometimes. Instead we pass a clip window directly into
  293. // the video player, which runs on an external timer
  294. if (this.isVideoReplay()) {
  295. this._clipWindow = {
  296. startTimestampMs: clipStartTimestampMs,
  297. endTimestampMs: clipEndTimestampMs,
  298. };
  299. // Trim error frames and update offsets so they show inside the clip window
  300. // Do this in here since we bypass setting the global offset
  301. // Eventually when we have video breadcrumbs we'll probably need to trim them here too
  302. const updateVideoFrameOffsets = <T extends {offsetMs: number}>(
  303. frames: Array<T>
  304. ) => {
  305. const offset = clipStartTimestampMs - this._replayRecord.started_at.getTime();
  306. return frames.map(frame => ({
  307. ...frame,
  308. offsetMs: frame.offsetMs - offset,
  309. }));
  310. };
  311. this._errors = updateVideoFrameOffsets(
  312. this._trimFramesToClipWindow(
  313. this._errors,
  314. clipStartTimestampMs,
  315. clipEndTimestampMs
  316. )
  317. );
  318. return;
  319. }
  320. // For RRWeb frames we only trim from the end because playback will
  321. // not work otherwise. The start offset is used to begin playback at
  322. // the correct time.
  323. this._sortedRRWebEvents = this._sortedRRWebEvents.filter(
  324. frame => frame.timestamp <= clipEndTimestampMs
  325. );
  326. this._sortedRRWebEvents.push(clipEndFrame(clipEndTimestampMs));
  327. this._startOffsetMs = clipStartTimestampMs - this._replayRecord.started_at.getTime();
  328. // We also only trim from the back for breadcrumbs/spans to keep
  329. // historical information about the replay, such as the current URL.
  330. this._sortedBreadcrumbFrames = this._updateFrameOffsets(
  331. this._trimFramesToClipWindow(
  332. this._sortedBreadcrumbFrames,
  333. this._replayRecord.started_at.getTime(),
  334. clipEndTimestampMs
  335. )
  336. );
  337. this._sortedSpanFrames = this._updateFrameOffsets(
  338. this._trimFramesToClipWindow(
  339. this._sortedSpanFrames,
  340. this._replayRecord.started_at.getTime(),
  341. clipEndTimestampMs
  342. )
  343. );
  344. this._errors = this._updateFrameOffsets(
  345. this._trimFramesToClipWindow(this._errors, clipStartTimestampMs, clipEndTimestampMs)
  346. );
  347. };
  348. /**
  349. * Filters out frames that are outside of the supplied window
  350. */
  351. _trimFramesToClipWindow = <T extends {timestampMs: number}>(
  352. frames: Array<T>,
  353. startTimestampMs: number,
  354. endTimestampMs: number
  355. ) => {
  356. return frames.filter(
  357. frame =>
  358. frame.timestampMs >= startTimestampMs && frame.timestampMs <= endTimestampMs
  359. );
  360. };
  361. /**
  362. * Updates the offsetMs of all frames to be relative to the start of the clip window
  363. */
  364. _updateFrameOffsets = <T extends {offsetMs: number}>(frames: Array<T>) => {
  365. return frames.map(frame => ({
  366. ...frame,
  367. offsetMs: frame.offsetMs - this.getStartOffsetMs(),
  368. }));
  369. };
  370. toJSON = () => this._cacheKey;
  371. processingErrors = memoize(() => {
  372. return [
  373. this.getRRWebFrames().length < 2
  374. ? `Replay has ${this.getRRWebFrames().length} frames`
  375. : null,
  376. !this.getRRWebFrames().some(frame => frame.type === EventType.Meta)
  377. ? 'Missing Meta Frame'
  378. : null,
  379. ].filter(defined);
  380. });
  381. hasProcessingErrors = () => {
  382. return this.processingErrors().length;
  383. };
  384. getExtractDomNodes = memoize(async () => {
  385. if (this._fetching) {
  386. return null;
  387. }
  388. const {onVisitFrame, shouldVisitFrame} = extractDomNodes;
  389. const results = await replayerStepper({
  390. frames: this.getDOMFrames(),
  391. rrwebEvents: this.getRRWebFrames(),
  392. startTimestampMs: this.getReplay().started_at.getTime() ?? 0,
  393. onVisitFrame,
  394. shouldVisitFrame,
  395. });
  396. return results;
  397. });
  398. getClipWindow = () => this._clipWindow;
  399. /**
  400. * @returns Duration of Replay (milliseonds)
  401. */
  402. getDurationMs = () => {
  403. return this._duration.asMilliseconds();
  404. };
  405. getStartOffsetMs = () => this._startOffsetMs;
  406. getStartTimestampMs = () => {
  407. // For video replays we bypass setting the global _startOffsetMs
  408. // because it messes with the player time by causing it to
  409. // be negative in some cases, but we still need that calculated value here
  410. const start =
  411. this.isVideoReplay() && this._clipWindow
  412. ? this._clipWindow?.startTimestampMs - this._replayRecord.started_at.getTime()
  413. : this._startOffsetMs;
  414. return this._replayRecord.started_at.getTime() + start;
  415. };
  416. getReplay = () => {
  417. return this._replayRecord;
  418. };
  419. getRRWebFrames = () => this._sortedRRWebEvents;
  420. getRRWebFramesWithSnapshots = memoize(() => {
  421. const eventsWithSnapshots: RecordingFrame[] = [];
  422. const events = this._sortedRRWebEvents;
  423. events.forEach((e, index) => {
  424. // For taps, sometimes the timestamp difference between TouchStart
  425. // and TouchEnd is too small. This clamps the tap to a min time
  426. // if the difference is less, so that the rrweb tap is visible and obvious.
  427. if (isTouchStartFrame(e) && index < events.length - 2) {
  428. const nextEvent = events[index + 1];
  429. if (isTouchEndFrame(nextEvent)) {
  430. nextEvent.timestamp = Math.max(nextEvent.timestamp, e.timestamp + 500);
  431. }
  432. }
  433. eventsWithSnapshots.push(e);
  434. if (isMetaFrame(e)) {
  435. // Create a mock full snapshot event, in order to render rrweb gestures properly
  436. // Need to add one for every meta event we see
  437. // The hardcoded data.node.id here should match the ID of the data being sent
  438. // in the `positions` arrays
  439. eventsWithSnapshots.push({
  440. type: EventType.FullSnapshot,
  441. data: {
  442. node: {
  443. type: NodeType.Document,
  444. childNodes: [
  445. {
  446. type: NodeType.DocumentType,
  447. id: 1,
  448. name: 'html',
  449. publicId: '',
  450. systemId: '',
  451. },
  452. {
  453. type: NodeType.Element,
  454. id: 2,
  455. tagName: 'html',
  456. attributes: {
  457. lang: 'en',
  458. },
  459. childNodes: [],
  460. },
  461. ],
  462. id: 0,
  463. },
  464. initialOffset: {
  465. top: 0,
  466. left: 0,
  467. },
  468. },
  469. timestamp: e.timestamp,
  470. });
  471. }
  472. });
  473. return eventsWithSnapshots;
  474. });
  475. getRRwebTouchEvents = memoize(() =>
  476. this.getRRWebFramesWithSnapshots().filter(
  477. e => isTouchEndFrame(e) || isTouchStartFrame(e)
  478. )
  479. );
  480. getBreadcrumbFrames = () => this._sortedBreadcrumbFrames;
  481. getRRWebMutations = () =>
  482. this._sortedRRWebEvents.filter(
  483. event =>
  484. [EventType.IncrementalSnapshot].includes(event.type) &&
  485. [IncrementalSource.Mutation].includes(
  486. (event as incrementalSnapshotEvent).data.source
  487. ) // filter only for mutation events
  488. );
  489. getErrorFrames = () => this._errors;
  490. getConsoleFrames = memoize(() =>
  491. this._sortedBreadcrumbFrames.filter(frame => isConsoleFrame(frame))
  492. );
  493. getNavigationFrames = memoize(() =>
  494. [
  495. ...this._sortedBreadcrumbFrames.filter(frame => frame.category === 'replay.init'),
  496. ...this._sortedSpanFrames.filter(frame => frame.op.startsWith('navigation.')),
  497. ].sort(sortFrames)
  498. );
  499. getMobileNavigationFrames = memoize(() =>
  500. [
  501. ...this._sortedBreadcrumbFrames.filter(frame =>
  502. ['replay.init', 'navigation'].includes(frame.category)
  503. ),
  504. ].sort(sortFrames)
  505. );
  506. getNetworkFrames = memoize(() =>
  507. this._sortedSpanFrames.filter(
  508. frame => frame.op.startsWith('navigation.') || frame.op.startsWith('resource.')
  509. )
  510. );
  511. getDOMFrames = memoize(() =>
  512. [
  513. ...removeDuplicateClicks(
  514. this._sortedBreadcrumbFrames
  515. .filter(frame => 'nodeId' in (frame.data ?? {}))
  516. .filter(
  517. frame =>
  518. !(
  519. (frame.category === 'ui.slowClickDetected' &&
  520. !isDeadClick(frame as SlowClickFrame)) ||
  521. frame.category === 'ui.multiClick'
  522. )
  523. )
  524. ),
  525. ...this._sortedSpanFrames.filter(
  526. frame => 'nodeId' in (frame.data ?? {}) || 'nodeIds' in (frame.data ?? {})
  527. ),
  528. ].sort(sortFrames)
  529. );
  530. getMemoryFrames = memoize(() =>
  531. this._sortedSpanFrames.filter((frame): frame is MemoryFrame => frame.op === 'memory')
  532. );
  533. getCustomFrames = memoize(() =>
  534. this._sortedBreadcrumbFrames.filter(
  535. frame => !BreadcrumbCategories.includes(frame.category)
  536. )
  537. );
  538. getChapterFrames = memoize(() =>
  539. this._trimFramesToClipWindow(
  540. [
  541. ...this.getPerfFrames(),
  542. ...this.getWebVitalFrames(),
  543. ...this.getCustomFrames(),
  544. ...this._sortedBreadcrumbFrames.filter(frame =>
  545. [
  546. 'replay.hydrate-error',
  547. 'replay.init',
  548. 'replay.mutations',
  549. 'feedback',
  550. 'device.battery',
  551. 'device.connectivity',
  552. 'device.orientation',
  553. 'app.foreground',
  554. 'app.background',
  555. ].includes(frame.category)
  556. ),
  557. ...this._errors,
  558. ].sort(sortFrames),
  559. this.getStartTimestampMs(),
  560. this.getStartTimestampMs() + this.getDurationMs()
  561. )
  562. );
  563. getPerfFrames = memoize(() => {
  564. const crumbs = removeDuplicateClicks(
  565. this._sortedBreadcrumbFrames.filter(
  566. frame =>
  567. ['navigation', 'ui.click', 'ui.tap'].includes(frame.category) ||
  568. (frame.category === 'ui.slowClickDetected' &&
  569. (isDeadClick(frame as SlowClickFrame) ||
  570. isDeadRageClick(frame as SlowClickFrame)))
  571. )
  572. );
  573. const spans = this._sortedSpanFrames.filter(frame =>
  574. frame.op.startsWith('navigation.')
  575. );
  576. const uniqueCrumbs = removeDuplicateNavCrumbs(crumbs, spans);
  577. return [...uniqueCrumbs, ...spans].sort(sortFrames);
  578. });
  579. getWebVitalFrames = memoize(() => {
  580. if (this._featureFlags?.includes('session-replay-web-vitals')) {
  581. // sort by largest timestamp first to easily find the last CLS in a burst
  582. const allWebVitals = this._sortedSpanFrames.filter(isWebVitalFrame).reverse();
  583. let lastTimestamp = 0;
  584. const groupedCls: WebVitalFrame[] = [];
  585. for (const frame of allWebVitals) {
  586. if (isCLSFrame(frame)) {
  587. if (lastTimestamp === frame.timestampMs) {
  588. groupedCls.push(frame);
  589. } else {
  590. lastTimestamp = frame.timestampMs;
  591. }
  592. }
  593. }
  594. return allWebVitals
  595. .filter(
  596. frame =>
  597. !groupedCls.includes(frame) && frame.description !== 'first-input-delay'
  598. )
  599. .reverse();
  600. }
  601. return [];
  602. });
  603. getVideoEvents = () => this._videoEvents;
  604. getPaintFrames = memoize(() => this._sortedSpanFrames.filter(isPaintFrame));
  605. getSDKOptions = () => this._optionFrame;
  606. /**
  607. * Checks the replay to see if user has any canvas elements in their
  608. * application. Needed to inform them that we now support canvas in replays.
  609. */
  610. hasCanvasElementInReplay = memoize(() => {
  611. return Boolean(this._sortedRRWebEvents.filter(findCanvas).length);
  612. });
  613. isVideoReplay = () => this.getVideoEvents().length > 0;
  614. isNetworkDetailsSetup = memoize(() => {
  615. const sdkOptions = this.getSDKOptions();
  616. if (sdkOptions) {
  617. return sdkOptions.networkDetailHasUrls;
  618. }
  619. // Network data was added in JS SDK 7.50.0 while sdkConfig was added in v7.51.1
  620. // So even if we don't have the config object, we should still fallback and
  621. // look for spans with network data, as that means things are setup!
  622. return this.getNetworkFrames().some(
  623. frame =>
  624. // We'd need to `filter()` before calling `some()` in order for TS to be happy
  625. // @ts-expect-error
  626. Object.keys(frame?.data?.request?.headers ?? {}).length ||
  627. // @ts-expect-error
  628. Object.keys(frame?.data?.response?.headers ?? {}).length
  629. );
  630. });
  631. }
  632. function findCanvas(event: RecordingFrame) {
  633. if (event.type === EventType.FullSnapshot) {
  634. return findCanvasInSnapshot(event);
  635. }
  636. if (event.type === EventType.IncrementalSnapshot) {
  637. return findCanvasInMutation(event);
  638. }
  639. return false;
  640. }
  641. function findCanvasInMutation(event: incrementalSnapshotEvent) {
  642. if (event.data.source !== IncrementalSource.Mutation) {
  643. return false;
  644. }
  645. return event.data.adds.find(
  646. add => add.node && add.node.type === 2 && add.node.tagName === 'canvas'
  647. );
  648. }
  649. function findCanvasInChildNodes(nodes: serializedNodeWithId[]) {
  650. return nodes.find(
  651. node =>
  652. node.type === 2 &&
  653. (node.tagName === 'canvas' || findCanvasInChildNodes(node.childNodes || []))
  654. );
  655. }
  656. function findCanvasInSnapshot(event: fullSnapshotEvent) {
  657. if (event.data.node.type !== 0) {
  658. return false;
  659. }
  660. return findCanvasInChildNodes(event.data.node.childNodes);
  661. }