mirror of
https://github.com/Unleash/unleash.git
synced 2025-07-26 13:48:33 +02:00
refactor: move batch execute to oss (#9754)
This commit is contained in:
parent
e6d55ab17a
commit
9a25664a83
76
src/lib/util/batchExecute.test.ts
Normal file
76
src/lib/util/batchExecute.test.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import { jest } from '@jest/globals';
|
||||
import { batchExecute } from './batchExecute';
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
describe('batchExecute', () => {
|
||||
let mockExecuteFn: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteFn = jest.fn();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should process each item in batches of the specified size', async () => {
|
||||
const items = Array.from({ length: 25 }, (_, i) => i);
|
||||
const batchSize = 10;
|
||||
const delayMs = 1000;
|
||||
|
||||
batchExecute(items, batchSize, delayMs, mockExecuteFn);
|
||||
|
||||
for (let i = 0; i < 2; i++) {
|
||||
jest.advanceTimersByTime(delayMs);
|
||||
await Promise.resolve();
|
||||
}
|
||||
|
||||
expect(mockExecuteFn).toHaveBeenCalledTimes(items.length);
|
||||
items.forEach((item, index) => {
|
||||
expect(mockExecuteFn).toHaveBeenNthCalledWith(index + 1, item);
|
||||
});
|
||||
});
|
||||
|
||||
it('should delay between each batch', async () => {
|
||||
const items = Array.from({ length: 15 }, (_, i) => i);
|
||||
const batchSize = 5;
|
||||
const delayMs = 1000;
|
||||
|
||||
batchExecute(items, batchSize, delayMs, mockExecuteFn);
|
||||
|
||||
expect(mockExecuteFn).toHaveBeenCalledTimes(5);
|
||||
|
||||
jest.advanceTimersByTime(delayMs);
|
||||
await Promise.resolve();
|
||||
expect(mockExecuteFn).toHaveBeenCalledTimes(10);
|
||||
|
||||
jest.advanceTimersByTime(delayMs);
|
||||
await Promise.resolve();
|
||||
expect(mockExecuteFn).toHaveBeenCalledTimes(15);
|
||||
});
|
||||
|
||||
it('should handle empty items array without calling executeFn', async () => {
|
||||
const items: number[] = [];
|
||||
const batchSize = 10;
|
||||
const delayMs = 1000;
|
||||
|
||||
await batchExecute(items, batchSize, delayMs, mockExecuteFn);
|
||||
|
||||
expect(mockExecuteFn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle a batch size larger than the number of items', async () => {
|
||||
const items = [1, 2, 3];
|
||||
const batchSize = 10;
|
||||
const delayMs = 1000;
|
||||
|
||||
batchExecute(items, batchSize, delayMs, mockExecuteFn);
|
||||
|
||||
expect(mockExecuteFn).toHaveBeenCalledTimes(items.length);
|
||||
items.forEach((item, index) => {
|
||||
expect(mockExecuteFn).toHaveBeenNthCalledWith(index + 1, item);
|
||||
});
|
||||
});
|
||||
});
|
17
src/lib/util/batchExecute.ts
Normal file
17
src/lib/util/batchExecute.ts
Normal file
@ -0,0 +1,17 @@
|
||||
export const batchExecute = async <T>(
|
||||
items: T[],
|
||||
batchSize: number,
|
||||
delayMs: number,
|
||||
executeFn: (item: T) => void,
|
||||
) => {
|
||||
for (let i = 0; i < items.length; i += batchSize) {
|
||||
const batch = items.slice(i, i + batchSize);
|
||||
|
||||
// Execute function for each item in the batch sequentially, fire-and-forget
|
||||
batch.forEach((item) => executeFn(item));
|
||||
|
||||
if (i + batchSize < items.length) {
|
||||
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
||||
}
|
||||
}
|
||||
};
|
Loading…
Reference in New Issue
Block a user