From 9a25664a8318bc5d12a9c76458dc2154a49924de Mon Sep 17 00:00:00 2001 From: Mateusz Kwasniewski Date: Mon, 14 Apr 2025 15:33:02 +0200 Subject: [PATCH] refactor: move batch execute to oss (#9754) --- src/lib/util/batchExecute.test.ts | 76 +++++++++++++++++++++++++++++++ src/lib/util/batchExecute.ts | 17 +++++++ 2 files changed, 93 insertions(+) create mode 100644 src/lib/util/batchExecute.test.ts create mode 100644 src/lib/util/batchExecute.ts diff --git a/src/lib/util/batchExecute.test.ts b/src/lib/util/batchExecute.test.ts new file mode 100644 index 0000000000..51fb27fc67 --- /dev/null +++ b/src/lib/util/batchExecute.test.ts @@ -0,0 +1,76 @@ +import { jest } from '@jest/globals'; +import { batchExecute } from './batchExecute'; + +jest.useFakeTimers(); + +describe('batchExecute', () => { + let mockExecuteFn: jest.Mock; + + beforeEach(() => { + mockExecuteFn = jest.fn(); + }); + + afterEach(() => { + jest.clearAllTimers(); + jest.clearAllMocks(); + }); + + it('should process each item in batches of the specified size', async () => { + const items = Array.from({ length: 25 }, (_, i) => i); + const batchSize = 10; + const delayMs = 1000; + + batchExecute(items, batchSize, delayMs, mockExecuteFn); + + for (let i = 0; i < 2; i++) { + jest.advanceTimersByTime(delayMs); + await Promise.resolve(); + } + + expect(mockExecuteFn).toHaveBeenCalledTimes(items.length); + items.forEach((item, index) => { + expect(mockExecuteFn).toHaveBeenNthCalledWith(index + 1, item); + }); + }); + + it('should delay between each batch', async () => { + const items = Array.from({ length: 15 }, (_, i) => i); + const batchSize = 5; + const delayMs = 1000; + + batchExecute(items, batchSize, delayMs, mockExecuteFn); + + expect(mockExecuteFn).toHaveBeenCalledTimes(5); + + jest.advanceTimersByTime(delayMs); + await Promise.resolve(); + expect(mockExecuteFn).toHaveBeenCalledTimes(10); + + jest.advanceTimersByTime(delayMs); + await Promise.resolve(); + expect(mockExecuteFn).toHaveBeenCalledTimes(15); + }); + + it('should handle empty items array without calling executeFn', async () => { + const items: number[] = []; + const batchSize = 10; + const delayMs = 1000; + + await batchExecute(items, batchSize, delayMs, mockExecuteFn); + + expect(mockExecuteFn).not.toHaveBeenCalled(); + }); + + it('should handle a batch size larger than the number of items', async () => { + const items = [1, 2, 3]; + const batchSize = 10; + const delayMs = 1000; + + batchExecute(items, batchSize, delayMs, mockExecuteFn); + + expect(mockExecuteFn).toHaveBeenCalledTimes(items.length); + items.forEach((item, index) => { + expect(mockExecuteFn).toHaveBeenNthCalledWith(index + 1, item); + }); + }); +}); diff --git a/src/lib/util/batchExecute.ts b/src/lib/util/batchExecute.ts new file mode 100644 index 0000000000..e4be99e0f5 --- /dev/null +++ b/src/lib/util/batchExecute.ts @@ -0,0 +1,17 @@ +export const batchExecute = async ( + items: T[], + batchSize: number, + delayMs: number, + executeFn: (item: T) => void, +) => { + for (let i = 0; i < items.length; i += batchSize) { + const batch = items.slice(i, i + batchSize); + + // Execute function for each item in the batch sequentially, fire-and-forget + batch.forEach((item) => executeFn(item)); + + if (i + batchSize < items.length) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } +};