diff --git a/.changeset/silent-cups-fail.md b/.changeset/silent-cups-fail.md new file mode 100644 index 00000000..7d338704 --- /dev/null +++ b/.changeset/silent-cups-fail.md @@ -0,0 +1,6 @@ +--- +'reassure': minor +'@callstack/reassure-measure': minor +--- + +feat: add support for setup/cleanup functions during each test run diff --git a/README.md b/README.md index d1e73b5b..645976d1 100644 --- a/README.md +++ b/README.md @@ -374,6 +374,8 @@ interface MeasureRendersOptions { wrapper?: React.ComponentType<{ children: ReactElement }>; scenario?: (view?: RenderResult) => Promise; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -383,6 +385,8 @@ interface MeasureRendersOptions { - **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results; only the wrapped component is measured. - **`scenario`**: a custom async function, which defines user interaction within the UI by utilising RNTL or RTL functions - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. #### `measureFunction` function @@ -403,6 +407,8 @@ interface MeasureFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -410,6 +416,8 @@ interface MeasureFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. #### `measureAsyncFunction` function @@ -432,6 +440,8 @@ interface MeasureAsyncFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -439,6 +449,8 @@ interface MeasureAsyncFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. ### Configuration diff --git a/docusaurus/docs/api.md b/docusaurus/docs/api.md index 4ab0e3e1..dc7f6dad 100644 --- a/docusaurus/docs/api.md +++ b/docusaurus/docs/api.md @@ -54,6 +54,8 @@ interface MeasureRendersOptions { wrapper?: React.ComponentType<{ children: ReactElement }>; scenario?: (view?: RenderResult) => Promise; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -63,6 +65,8 @@ interface MeasureRendersOptions { - **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results, only the wrapped component is measured. - **`scenario`**: a custom async function, which defines user interaction within the ui by utilized RNTL functions - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. ### `measureFunction` function {#measure-function} @@ -95,6 +99,8 @@ interface MeasureFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -102,6 +108,8 @@ interface MeasureFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. ### `measureAsyncFunction` function {#measure-async-function} @@ -142,6 +150,8 @@ interface MeasureAsyncFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -149,6 +159,8 @@ interface MeasureAsyncFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: (default `true`) should write output to file +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. ## Configuration diff --git a/packages/measure/src/__tests__/measure-async-function.test.tsx b/packages/measure/src/__tests__/measure-async-function.test.tsx index 26a77ed1..0c4722f0 100644 --- a/packages/measure/src/__tests__/measure-async-function.test.tsx +++ b/packages/measure/src/__tests__/measure-async-function.test.tsx @@ -45,6 +45,26 @@ test('measureAsyncFunction applies "warmupRuns" option', async () => { expect(results.stdevCount).toBe(0); }); +test('measureAsyncFunction executes setup and cleanup functions for each run', async () => { + const fn = jest.fn(() => Promise.resolve().then(() => fib(5))); + const beforeFn = jest.fn(); + const afterFn = jest.fn(); + const results = await measureAsyncFunction(fn, { + runs: 10, + warmupRuns: 1, + writeFile: false, + beforeEach: beforeFn, + afterEach: afterFn, + }); + + expect(beforeFn).toHaveBeenCalledTimes(11); + expect(fn).toHaveBeenCalledTimes(11); + expect(afterFn).toHaveBeenCalledTimes(11); + expect(results.runs).toBe(10); + expect(results.durations.length + (results.outlierDurations?.length ?? 0)).toBe(10); + expect(results.counts).toHaveLength(10); +}); + const errorsToIgnore = ['❌ Measure code is running under incorrect Node.js configuration.']; const realConsole = jest.requireActual('console') as Console; diff --git a/packages/measure/src/__tests__/measure-function.test.tsx b/packages/measure/src/__tests__/measure-function.test.tsx index 7f9d7cdd..0caaecbf 100644 --- a/packages/measure/src/__tests__/measure-function.test.tsx +++ b/packages/measure/src/__tests__/measure-function.test.tsx @@ -57,6 +57,26 @@ test('measureFunction applies "warmupRuns" option', async () => { expect(results.stdevCount).toBe(0); }); +test('measureFunction executes setup and cleanup functions for each run', async () => { + const fn = jest.fn(() => fib(5)); + const beforeFn = jest.fn(); + const afterFn = jest.fn(); + const results = await measureFunction(fn, { + runs: 10, + warmupRuns: 1, + writeFile: false, + beforeEach: beforeFn, + afterEach: afterFn, + }); + + expect(beforeFn).toHaveBeenCalledTimes(11); + expect(fn).toHaveBeenCalledTimes(11); + expect(afterFn).toHaveBeenCalledTimes(11); + expect(results.runs).toBe(10); + expect(results.durations.length + (results.outlierDurations?.length ?? 0)).toBe(10); + expect(results.counts).toHaveLength(10); +}); + const errorsToIgnore = ['❌ Measure code is running under incorrect Node.js configuration.']; const realConsole = jest.requireActual('console') as Console; diff --git a/packages/measure/src/__tests__/measure-renders.test.tsx b/packages/measure/src/__tests__/measure-renders.test.tsx index 0917279c..1a75bef4 100644 --- a/packages/measure/src/__tests__/measure-renders.test.tsx +++ b/packages/measure/src/__tests__/measure-renders.test.tsx @@ -38,6 +38,27 @@ test('measureRenders applies "warmupRuns" option', async () => { expect(results.stdevCount).toBe(0); }); +test('measureRenders executes setup and cleanup functions for each run', async () => { + const scenario = jest.fn(() => Promise.resolve(null)); + const beforeFn = jest.fn(); + const afterFn = jest.fn(); + const results = await measureRenders(, { + runs: 10, + warmupRuns: 1, + scenario, + writeFile: false, + beforeEach: beforeFn, + afterEach: afterFn, + }); + + expect(beforeFn).toHaveBeenCalledTimes(11); + expect(scenario).toHaveBeenCalledTimes(11); + expect(afterFn).toHaveBeenCalledTimes(11); + expect(results.runs).toBe(10); + expect(results.durations.length + (results.outlierDurations?.length ?? 0)).toBe(10); + expect(results.counts).toHaveLength(10); +}); + test('measureRenders should log error when running under incorrect node flags', async () => { jest.spyOn(realConsole, 'error').mockImplementation((message) => { if (!errorsToIgnore.some((error) => message.includes(error))) { diff --git a/packages/measure/src/measure-async-function.tsx b/packages/measure/src/measure-async-function.tsx index 3e27dffc..4afb9fa9 100644 --- a/packages/measure/src/measure-async-function.tsx +++ b/packages/measure/src/measure-async-function.tsx @@ -31,10 +31,14 @@ async function measureAsyncFunctionInternal( const runResults: RunResult[] = []; for (let i = 0; i < runs + warmupRuns; i += 1) { + await options?.beforeEach?.(); + const timeStart = getCurrentTime(); await fn(); const timeEnd = getCurrentTime(); + await options?.afterEach?.(); + const duration = timeEnd - timeStart; runResults.push({ duration, count: 1 }); } diff --git a/packages/measure/src/measure-function.tsx b/packages/measure/src/measure-function.tsx index a9f075b1..65523cfc 100644 --- a/packages/measure/src/measure-function.tsx +++ b/packages/measure/src/measure-function.tsx @@ -8,10 +8,12 @@ export interface MeasureFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } export async function measureFunction(fn: () => void, options?: MeasureFunctionOptions): Promise { - const stats = measureFunctionInternal(fn, options); + const stats = await measureFunctionInternal(fn, options); if (options?.writeFile !== false) { await writeTestStats(stats, 'function'); @@ -20,7 +22,7 @@ export async function measureFunction(fn: () => void, options?: MeasureFunctionO return stats; } -function measureFunctionInternal(fn: () => void, options?: MeasureFunctionOptions): MeasureResults { +async function measureFunctionInternal(fn: () => void, options?: MeasureFunctionOptions): Promise { const runs = options?.runs ?? config.runs; const warmupRuns = options?.warmupRuns ?? config.warmupRuns; const removeOutliers = options?.removeOutliers ?? config.removeOutliers; @@ -29,10 +31,14 @@ function measureFunctionInternal(fn: () => void, options?: MeasureFunctionOption const runResults: RunResult[] = []; for (let i = 0; i < runs + warmupRuns; i += 1) { + await options?.beforeEach?.(); + const timeStart = getCurrentTime(); fn(); const timeEnd = getCurrentTime(); + await options?.afterEach?.(); + const duration = timeEnd - timeStart; runResults.push({ duration, count: 1 }); } diff --git a/packages/measure/src/measure-renders.tsx b/packages/measure/src/measure-renders.tsx index 521a6496..6e746d18 100644 --- a/packages/measure/src/measure-renders.tsx +++ b/packages/measure/src/measure-renders.tsx @@ -20,6 +20,8 @@ export interface MeasureRendersOptions { wrapper?: React.ComponentType<{ children: React.ReactElement }>; scenario?: (screen: any) => Promise; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } export async function measureRenders( @@ -69,6 +71,8 @@ async function measureRendersInternal( let initialRenderCount = 0; for (let iteration = 0; iteration < runs + warmupRuns; iteration += 1) { + await options?.beforeEach?.(); + let duration = 0; let count = 0; let renderResult: any = null; @@ -108,6 +112,8 @@ async function measureRendersInternal( cleanup(); global.gc?.(); + await options?.afterEach?.(); + runResults.push({ duration, count }); } diff --git a/packages/reassure/README.md b/packages/reassure/README.md index d1e73b5b..645976d1 100644 --- a/packages/reassure/README.md +++ b/packages/reassure/README.md @@ -374,6 +374,8 @@ interface MeasureRendersOptions { wrapper?: React.ComponentType<{ children: ReactElement }>; scenario?: (view?: RenderResult) => Promise; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -383,6 +385,8 @@ interface MeasureRendersOptions { - **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results; only the wrapped component is measured. - **`scenario`**: a custom async function, which defines user interaction within the UI by utilising RNTL or RTL functions - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. #### `measureFunction` function @@ -403,6 +407,8 @@ interface MeasureFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -410,6 +416,8 @@ interface MeasureFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. #### `measureAsyncFunction` function @@ -432,6 +440,8 @@ interface MeasureAsyncFunctionOptions { warmupRuns?: number; removeOutliers?: boolean; writeFile?: boolean; + beforeEach?: () => Promise | void; + afterEach?: () => Promise | void; } ``` @@ -439,6 +449,8 @@ interface MeasureAsyncFunctionOptions { - **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs - **`removeOutliers`**: should remove statistical outlier results (default: `true`) - **`writeFile`**: should write output to file (default `true`) +- **`beforeEach`**: function to execute before each test run. +- **`afterEach`**: function to execute after each test run. ### Configuration