Merge pull request from MareStare/feat/add-debounced-cache-utility

[Part 6] Add a DebouncedCache utility for frontend
This commit is contained in:
liamwhite 2025-03-11 20:14:58 -04:00 committed by GitHub
commit bd53311e4b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 278 additions and 0 deletions

View file

@ -0,0 +1,150 @@
import { DebouncedCache } from '../debounced-cache';
describe('DebouncedCache', () => {
beforeAll(() => {
vi.useFakeTimers();
});
const consoleSpy = {
debug: vi.spyOn(console, 'debug'),
error: vi.spyOn(console, 'error'),
};
afterEach(() => {
consoleSpy.debug.mockClear();
consoleSpy.error.mockClear();
});
it('should call the function after a debounce threshold and cache the result', async () => {
const { producer, cache } = createTestCache();
const consumer = vi.fn();
cache.schedule({ a: 1, b: 2 }, consumer);
await vi.runAllTimersAsync();
expect(producer).toHaveBeenCalledWith({ a: 1, b: 2 });
expect(consumer).toHaveBeenCalledWith(3);
cache.schedule({ a: 1, b: 2 }, consumer);
await vi.runAllTimersAsync();
expect(producer).toHaveBeenCalledTimes(1);
expect(consumer).toHaveBeenCalledTimes(2);
expect(consoleSpy.debug).not.toHaveBeenCalled();
expect(consoleSpy.error).not.toHaveBeenCalled();
});
describe('should abort the last scheduled call when a new one is scheduled', () => {
test('scheduling before the debounce threshold is reached', async () => {
const { producer, cache } = createTestCache();
const consumer1 = vi.fn();
const consumer2 = vi.fn();
cache.schedule({ a: 1, b: 2 }, consumer1);
cache.schedule({ a: 1, b: 2 }, consumer2);
await vi.runAllTimersAsync();
expect(consumer1).not.toHaveBeenCalled();
expect(consumer2).toHaveBeenCalledWith(3);
expect(producer).toHaveBeenCalledOnce();
// No logs should be emitted because the `setTimeout` call itself should have been aborted.
expect(consoleSpy.debug.mock.calls).toMatchInlineSnapshot(`[]`);
expect(consoleSpy.error.mock.calls).toMatchInlineSnapshot(`[]`);
});
test('scheduling after the debounce threshold is reached', async () => {
const threshold = 300;
const { producer, cache } = createTestCache(threshold);
const consumer1 = vi.fn();
const consumer2 = vi.fn();
cache.schedule({ a: 1, b: 2 }, consumer1);
vi.advanceTimersByTime(threshold);
cache.schedule({ a: 1, b: 2 }, consumer2);
await vi.runAllTimersAsync();
expect(consumer1).not.toHaveBeenCalled();
expect(consumer2).toHaveBeenCalledWith(3);
expect(producer).toHaveBeenCalledOnce();
expect(consoleSpy.debug.mock.calls).toMatchInlineSnapshot(`
[
[
"A call was aborted after the debounce threshold was reached",
DOMException {},
],
]
`);
expect(consoleSpy.error.mock.calls).toMatchInlineSnapshot(`[]`);
});
});
describe('should handle errors by logging them', () => {
test('error in producer', async () => {
const producer = vi.fn(() => Promise.reject(new Error('producer error')));
const cache = new DebouncedCache(producer);
const consumer = vi.fn();
cache.schedule(undefined, consumer);
await vi.runAllTimersAsync();
expect(consumer).not.toHaveBeenCalled();
expect(consoleSpy.debug).not.toHaveBeenCalled();
expect(consoleSpy.error.mock.calls).toMatchInlineSnapshot(`
[
[
"An error occurred while calling 'spy'.",
[Error: producer error],
],
]
`);
});
test('error in consumer', async () => {
const { producer, cache } = createTestCache();
const consumer = vi.fn(() => {
throw new Error('consumer error');
});
cache.schedule({ a: 1, b: 2 }, consumer);
await vi.runAllTimersAsync();
expect(producer).toHaveBeenCalledOnce();
expect(consoleSpy.debug).not.toHaveBeenCalled();
expect(consoleSpy.error.mock.calls).toMatchInlineSnapshot(`
[
[
"An error occurred while processing the result of 'producerImpl'.",
[Error: consumer error],
],
]
`);
});
});
});
function createTestCache(thresholdMs?: number) {
const producer = vi.fn(producerImpl);
const cache = new DebouncedCache(producer, { thresholdMs });
return { producer, cache };
}
interface ProducerParams {
a: number;
b: number;
}
async function producerImpl(params: ProducerParams): Promise<number> {
return params.a + params.b;
}

View file

@ -0,0 +1,128 @@
export interface DebouncedCacheParams {
/**
* Time in milliseconds to wait before calling the function.
*/
thresholdMs?: number;
}
/**
* Wraps a function, caches its results and debounces calls to it.
*
* *Debouncing* means that if the function is called multiple times within
* the `thresholdMs` interval, then every new call resets the timer
* and only the last call to the function will be executed after the timer
* reaches the `thresholdMs` value. Also, in-progress operation
* will be aborted, however, the result will still be cached, only the
* result processing callback will not be called.
*
* See more details about the concept of debouncing here:
* https://lodash.com/docs/4.17.15#debounce.
*
* If the function is called with the arguments that were already cached,
* then the cached result will be returned immediately and the previous
* scheduled call will be cancelled.
*/
export class DebouncedCache<Params, R> {
private thresholdMs: number;
private cache = new Map<string, Promise<R>>();
private func: (params: Params) => Promise<R>;
private lastSchedule?: {
timeout?: ReturnType<typeof setTimeout>;
abortController: AbortController;
};
constructor(func: (params: Params) => Promise<R>, params?: DebouncedCacheParams) {
this.thresholdMs = params?.thresholdMs ?? 300;
this.func = func;
}
/**
* Schedules a call to the wrapped function, that will take place only after
* a `thresholdMs` delay given no new calls to `schedule` are made within that
* time frame. If they are made, than the scheduled call will be canceled.
*/
schedule(params: Params, onResult: (result: R) => void): void {
this.abortLastSchedule(`[DebouncedCache] A new call to '${this.func.name}' was scheduled`);
const abortController = new AbortController();
const abortSignal = abortController.signal;
const key = JSON.stringify(params);
if (this.cache.has(key)) {
this.subscribe(this.cache.get(key)!, abortSignal, onResult);
this.lastSchedule = { abortController };
return;
}
const afterTimeout = () => {
// This can't be triggered via the public API of this class, because we cancel
// the setTimeout call when abort is triggered, but it's here just in case
/* v8 ignore start */
if (this.shouldAbort(abortSignal)) {
return;
}
/* v8 ignore end */
// In theory, we could pass the abort signal to the function, but we don't
// do that and let the function run even if it was aborted, and then cache
// its result. This works well under the assumption that the function isn't
// too expensive to run (like a quick GET request), so aborting it in the
// middle wouldn't save too much resources. If needed, we can make this
// behavior configurable in the future.
const promise = this.func.call(null, params);
// We don't remove an entry from the cache if the promise is rejected.
// We expect that the underlying function will handle the errors and
// do the retries internally if necessary.
this.cache.set(key, promise);
this.subscribe(promise, abortSignal, onResult);
};
this.lastSchedule = {
timeout: setTimeout(afterTimeout, this.thresholdMs),
abortController,
};
}
private shouldAbort(abortSignal: AbortSignal) {
if (abortSignal.aborted) {
console.debug(`A call was aborted after the debounce threshold was reached`, abortSignal.reason);
}
return abortSignal.aborted;
}
private async subscribe(promise: Promise<R>, abortSignal: AbortSignal, onResult: (result: R) => void): Promise<void> {
if (this.shouldAbort(abortSignal)) {
return;
}
let result;
try {
result = await promise;
} catch (error) {
console.error(`An error occurred while calling '${this.func.name}'.`, error);
return;
}
if (this.shouldAbort(abortSignal)) {
return;
}
try {
onResult(result);
} catch (error) {
console.error(`An error occurred while processing the result of '${this.func.name}'.`, error);
}
}
abortLastSchedule(reason: string): void {
if (!this.lastSchedule) {
return;
}
clearTimeout(this.lastSchedule.timeout);
this.lastSchedule.abortController.abort(new DOMException(reason, 'AbortError'));
}
}