Skip to content

Commit 9cd6593

Browse files
authored
fix: DH-20255 Limit maximum depth to serialize redux log data (#2544)
Follow up PR to #2531 which failed testing, with an error from `JSON.stringify()` of `RangeError: Invalid string length`. These errors are caused by linked structures such as a Java LinkedHashMap that's included in the Redux data in various API related keys. We use the blacklist to filter out keys containing these structures, but these keys are not stable and will need to be constantly updated to prevent the log export from outright failing. To preemptively deal with this, the maximum depth to serialize to can be limited in the [safe-stable-stringify](https://www.npmjs.com/package/safe-stable-stringify) library to avoid linked structures from blowing up the log size. With no blacklist entries, the exported log size is only `1.2MB`. With an updated blacklist, exporting the logs with or without the limit both yield log files of almost the exact same size, meaning that no useful data has been lost by limiting the depth. However, I'm open to suggestions to the default depth to serialize to, or whether it should be optional to have a limit on the depth instead of mandating a limit (`Infinity` is not accepted by the library).
1 parent a19bc11 commit 9cd6593

2 files changed

Lines changed: 48 additions & 5 deletions

File tree

packages/log/src/LogExport.test.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -172,4 +172,27 @@ describe('getReduxDataString', () => {
172172
const result = getReduxDataString(reduxData, [['*']]);
173173
expect(result).toBe('{}');
174174
});
175+
176+
it('should respect maximum depth', () => {
177+
const reduxData = {
178+
key1: {
179+
key2: {
180+
key3: 'too deep',
181+
},
182+
key4: ['too deep'],
183+
},
184+
};
185+
const result = getReduxDataString(reduxData, [], 2);
186+
const expected = JSON.stringify(
187+
{
188+
key1: {
189+
key2: '[Object]',
190+
key4: '[Array]',
191+
},
192+
},
193+
null,
194+
2
195+
);
196+
expect(result).toBe(expected);
197+
});
175198
});

packages/log/src/LogExport.ts

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import JSZip from 'jszip';
2-
import stringify from 'safe-stable-stringify';
2+
import { configure } from 'safe-stable-stringify';
33
import type LogHistory from './LogHistory';
44

55
// List of objects to blacklist
@@ -14,11 +14,19 @@ export const DEFAULT_PATH_BLACKLIST: string[][] = [
1414
['dashboardData', '*', 'openedMap'],
1515
['draftManager', 'draftStorage'],
1616
['layoutStorage'],
17+
['storage'],
18+
19+
// Below are confirmed enterprise specific keys, and will be moved in DH-20410
1720
['schemaService', 'client'],
21+
['schemaService', 'clientUtils'],
1822
['schemaService', 'schemaStorage'],
19-
['storage'],
23+
['corePlusManager', 'dheClient'],
24+
['dheClient'],
2025
];
2126

27+
// The default maximum depth to serialize to in the redux data
28+
const DEFAULT_MAXIMUM_DEPTH = 10;
29+
2230
function stringifyReplacer(blacklist: string[][]) {
2331
// modified from:
2432
// https://stackoverflow.com/questions/61681176/json-stringify-replacer-how-to-get-full-path
@@ -63,8 +71,15 @@ function isOnBlackList(currPath: string[], blacklist: string[][]): boolean {
6371

6472
export function getReduxDataString(
6573
reduxData: Record<string, unknown>,
66-
blacklist: string[][] = []
74+
blacklist: string[][] = [],
75+
maximumDepth: number = DEFAULT_MAXIMUM_DEPTH
6776
): string {
77+
// Limit the maximum depth to prevent linked structures from blowing up the log size
78+
// All objects at the maximum depth are replaced with "[Object]" or "[Array]"
79+
const stringify = configure({
80+
maximumDepth,
81+
});
82+
6883
return (
6984
// Using safe-stable-stringify which handles circular references and BigInt
7085
// All circular references are replaced with "[Circular]", and BigInt values are converted to a number
@@ -102,14 +117,16 @@ function formatDate(date: Date): string {
102117
* @param reduxData Redux data to include in the redux.json file
103118
* @param blacklist List of JSON paths to blacklist in redux data. A JSON path is a list representing the path to that value (e.g. client.data would be `['client', 'data']`). Wildcards (*) are accepted in the path.
104119
* @param fileNamePrefix The zip file name without the .zip extension. Ex: test will be saved as test.zip
120+
* @param maximumDepth The maximum depth to serialize the redux data to. Objects at the maximum depth will be replaced with "[Object]" or "[Array]".
105121
* @returns A promise that resolves successfully if the log archive is created and downloaded successfully, rejected if there's an error
106122
*/
107123
export async function exportLogs(
108124
logHistory: LogHistory,
109125
metadata?: Record<string, unknown>,
110126
reduxData?: Record<string, unknown>,
111127
blacklist: string[][] = DEFAULT_PATH_BLACKLIST,
112-
fileNamePrefix = `${formatDate(new Date())}_support_logs`
128+
fileNamePrefix = `${formatDate(new Date())}_support_logs`,
129+
maximumDepth: number = DEFAULT_MAXIMUM_DEPTH
113130
): Promise<void> {
114131
const zip = new JSZip();
115132
const folder = zip.folder(fileNamePrefix) as JSZip;
@@ -118,7 +135,10 @@ export async function exportLogs(
118135
folder.file('metadata.json', getFormattedMetadata(metadata));
119136
}
120137
if (reduxData != null) {
121-
folder.file('redux.json', getReduxDataString(reduxData, blacklist));
138+
folder.file(
139+
'redux.json',
140+
getReduxDataString(reduxData, blacklist, maximumDepth)
141+
);
122142
}
123143

124144
const blob = await zip.generateAsync({ type: 'blob' });

0 commit comments

Comments
 (0)