1
0
mirror of https://github.com/Unleash/unleash.git synced 2025-07-31 13:47:02 +02:00

chore: prevent unknown flag deadlocks by sorting and batching inserts (#10348)

https://linear.app/unleash/issue/2-3692/prevent-deadlocks-by-sorting-and-batching-inserts

Tries to prevent deadlocks by sorting and batching unknown flag inserts
when flushing.
This commit is contained in:
Nuno Góis 2025-07-15 08:58:42 +01:00 committed by GitHub
parent 998834245c
commit 3d78fbea7f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 31 additions and 13 deletions

View File

@ -205,7 +205,7 @@ export const createStores = (
releasePlanMilestoneStrategyStore:
new ReleasePlanMilestoneStrategyStore(db, config),
featureLinkStore: new FeatureLinkStore(db, config),
unknownFlagsStore: new UnknownFlagsStore(db),
unknownFlagsStore: new UnknownFlagsStore(db, getLogger),
featureLinkReadModel: new FeatureLinksReadModel(db, eventBus),
};
};

View File

@ -46,6 +46,8 @@ export class UnknownFlagsService {
const cached = Array.from(this.unknownFlagsCache.values());
cached.sort((a, b) => this.getKey(a).localeCompare(this.getKey(b)));
await this.unknownFlagsStore.insert(cached);
this.unknownFlagsCache.clear();
}

View File

@ -1,6 +1,8 @@
import type { Db } from '../../../db/db.js';
import type { Logger, LogProvider } from '../../../logger.js';
const TABLE = 'unknown_flags';
const MAX_INSERT_BATCH_SIZE = 100;
export type UnknownFlag = {
name: string;
@ -28,22 +30,36 @@ export interface IUnknownFlagsStore {
export class UnknownFlagsStore implements IUnknownFlagsStore {
private db: Db;
constructor(db: Db) {
private logger: Logger;
constructor(db: Db, getLogger: LogProvider) {
this.db = db;
this.logger = getLogger('unknown-flags-store.ts');
}
async insert(flags: UnknownFlag[]): Promise<void> {
if (flags.length > 0) {
const rows = flags.map((flag) => ({
name: flag.name,
app_name: flag.appName,
seen_at: flag.seenAt,
environment: flag.environment,
}));
await this.db(TABLE)
.insert(rows)
.onConflict(['name', 'app_name', 'environment'])
.merge(['seen_at']);
if (!flags.length) return;
const rows = flags.map(({ name, appName, seenAt, environment }) => ({
name,
app_name: appName,
seen_at: seenAt,
environment,
}));
for (let i = 0; i < rows.length; i += MAX_INSERT_BATCH_SIZE) {
const chunk = rows.slice(i, i + MAX_INSERT_BATCH_SIZE);
try {
await this.db(TABLE)
.insert(chunk)
.onConflict(['name', 'app_name', 'environment'])
.merge(['seen_at']);
} catch (error) {
this.logger.debug(
`unknown_flags: batch ${i / MAX_INSERT_BATCH_SIZE + 1} failed and was skipped.`,
error,
);
}
}
}