1
0
mirror of https://github.com/Unleash/unleash.git synced 2025-01-25 00:07:47 +01:00

chore: integration events store (#7613)

https://linear.app/unleash/issue/2-2437/create-new-integration-event-store

Adds a new `IntegrationEventsStore`.
This commit is contained in:
Nuno Góis 2024-07-18 15:20:35 +01:00 committed by GitHub
parent 242f59ba4e
commit 0ae6af13e9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 327 additions and 0 deletions

View File

@ -49,6 +49,7 @@ import { ProjectFlagCreatorsReadModel } from '../features/project/project-flag-c
import { FeatureStrategiesReadModel } from '../features/feature-toggle/feature-strategies-read-model';
import { FeatureLifecycleReadModel } from '../features/feature-lifecycle/feature-lifecycle-read-model';
import { LargestResourcesReadModel } from '../features/metrics/sizes/largest-resources-read-model';
import { IntegrationEventsStore } from '../features/integration-events/integration-events-store';
export const createStores = (
config: IUnleashConfig,
@ -173,6 +174,7 @@ export const createStores = (
config.flagResolver,
),
largestResourcesReadModel: new LargestResourcesReadModel(db),
integrationEventsStore: new IntegrationEventsStore(db, { eventBus }),
};
};

View File

@ -0,0 +1,45 @@
import { CRUDStore, type CrudStoreConfig } from '../../db/crud/crud-store';
import type { Db } from '../../db/db';
import type { IntegrationEventSchema } from '../../openapi/spec/integration-event-schema';
export type IntegrationEventWriteModel = Omit<
IntegrationEventSchema,
'id' | 'createdAt'
>;
export class IntegrationEventsStore extends CRUDStore<
IntegrationEventSchema,
IntegrationEventWriteModel
> {
constructor(db: Db, config: CrudStoreConfig) {
super('integration_events', db, config);
}
async getPaginatedEvents(
id: number,
limit: number,
offset: number,
): Promise<IntegrationEventSchema[]> {
const rows = await this.db(this.tableName)
.where('integration_id', id)
.limit(limit)
.offset(offset)
.orderBy('id', 'desc');
return rows.map(this.fromRow) as IntegrationEventSchema[];
}
async cleanUpEvents(): Promise<void> {
return this.db
.with('latest_events', (qb) => {
qb.select('id')
.from(this.tableName)
.whereRaw(`created_at >= now() - INTERVAL '2 hours'`)
.orderBy('created_at', 'desc')
.limit(100);
})
.from(this.tableName)
.whereNotIn('id', this.db.select('id').from('latest_events'))
.delete();
}
}

View File

@ -0,0 +1,196 @@
import dbInit, { type ITestDb } from '../../../test/e2e/helpers/database-init';
import {
type IUnleashTest,
setupAppWithAuth,
} from '../../../test/e2e/helpers/test-helper';
import getLogger from '../../../test/fixtures/no-logger';
import { TEST_AUDIT_USER } from '../../types';
import type {
IntegrationEventsStore,
IntegrationEventWriteModel,
} from './integration-events-store';
let app: IUnleashTest;
let db: ITestDb;
let integrationEventsStore: IntegrationEventsStore;
let integrationId: number;
const EVENT_SUCCESS: IntegrationEventWriteModel = {
integrationId: 1,
state: 'success',
stateDetails: 'Saul Goodman',
event: {
id: 7,
type: 'feature-created',
createdAt: new Date().toISOString(),
createdBy: 'Walter White',
},
details: {
featureName: 'heisenberg',
projectName: 'breaking-bad',
},
};
const EVENT_FAILED: IntegrationEventWriteModel = {
...EVENT_SUCCESS,
state: 'failed',
stateDetails: 'Better Call Saul!',
};
beforeAll(async () => {
db = await dbInit('integration_events', getLogger);
app = await setupAppWithAuth(
db.stores,
{
experimental: {
flags: {
integrationEvents: true,
},
},
},
db.rawDatabase,
);
integrationEventsStore = db.stores.integrationEventsStore;
});
beforeEach(async () => {
await db.reset();
const { id } = await app.services.addonService.createAddon(
{
provider: 'webhook',
enabled: true,
parameters: {
url: 'http://some-test-url',
},
events: ['feature-created'],
},
TEST_AUDIT_USER,
);
integrationId = id;
});
afterAll(async () => {
await app.destroy();
await db.destroy();
});
const insertPastEvent = async (
event: IntegrationEventWriteModel,
date: Date,
): Promise<void> => {
const { id } = await integrationEventsStore.insert(event);
await db.rawDatabase.raw(
`UPDATE integration_events SET created_at = ? WHERE id = ?`,
[date, id],
);
};
const getTestEventSuccess = () => ({
...EVENT_SUCCESS,
integrationId,
});
const getTestEventFailed = () => ({
...EVENT_FAILED,
integrationId,
});
test('insert and fetch integration events', async () => {
await integrationEventsStore.insert(getTestEventSuccess());
await integrationEventsStore.insert(getTestEventFailed());
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
10,
0,
);
expect(events).toHaveLength(2);
expect(events[0].state).toBe('failed');
expect(events[1].state).toBe('success');
});
test('paginate to latest event', async () => {
await integrationEventsStore.insert(getTestEventSuccess());
await integrationEventsStore.insert(getTestEventFailed());
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
1,
0,
);
expect(events).toHaveLength(1);
expect(events[0].state).toBe('failed');
});
test('paginate to second most recent event', async () => {
await integrationEventsStore.insert(getTestEventSuccess());
await integrationEventsStore.insert(getTestEventFailed());
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
1,
1,
);
expect(events).toHaveLength(1);
expect(events[0].state).toBe('success');
});
test('paginate to non-existing event, returning empty array', async () => {
await integrationEventsStore.insert(getTestEventSuccess());
await integrationEventsStore.insert(getTestEventFailed());
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
1,
999,
);
expect(events).toHaveLength(0);
});
test('clean up events, keeping events from the last 2 hours', async () => {
const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
const threeHoursAgo = new Date(Date.now() - 3 * 60 * 60 * 1000);
const twoDaysAgo = new Date(Date.now() - 2 * 24 * 60 * 60 * 1000);
const longTimeAgo = new Date('2000-01-01');
await insertPastEvent(getTestEventSuccess(), threeHoursAgo);
await insertPastEvent(getTestEventFailed(), twoDaysAgo);
await insertPastEvent(getTestEventSuccess(), longTimeAgo);
await insertPastEvent(getTestEventFailed(), oneHourAgo);
await integrationEventsStore.insert(getTestEventSuccess());
await integrationEventsStore.cleanUpEvents();
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
10,
0,
);
expect(events).toHaveLength(2);
expect(events[0].state).toBe('success');
expect(events[1].state).toBe('failed');
});
test('clean up events, keeping the last 100 events', async () => {
for (let i = 0; i < 200; i++) {
await integrationEventsStore.insert(getTestEventSuccess());
}
await integrationEventsStore.cleanUpEvents();
const events = await integrationEventsStore.getPaginatedEvents(
integrationId,
200,
0,
);
expect(events).toHaveLength(100);
});

View File

@ -111,6 +111,7 @@ export * from './import-toggles-validate-schema';
export * from './inactive-user-schema';
export * from './inactive-users-schema';
export * from './instance-admin-stats-schema';
export * from './integration-event-schema';
export * from './legal-value-schema';
export * from './login-schema';
export * from './maintenance-schema';

View File

@ -0,0 +1,78 @@
import type { FromSchema } from 'json-schema-to-ts';
import { eventSchema } from './event-schema';
import { tagSchema } from './tag-schema';
import { variantSchema } from './variant-schema';
export const integrationEventSchema = {
$id: '#/components/schemas/integrationEventSchema',
type: 'object',
required: [
'id',
'integrationId',
'createdAt',
'state',
'stateDetails',
'event',
'details',
],
description: 'An object describing an integration event.',
additionalProperties: false,
properties: {
id: {
type: 'integer',
description:
"The integration event's ID. Integration event IDs are incrementing integers. In other words, a more recently created integration event will always have a higher ID than an older one.",
minimum: 1,
example: 7,
},
integrationId: {
type: 'integer',
description:
'The ID of the integration that the integration event belongs to.',
example: 42,
},
createdAt: {
type: 'string',
format: 'date-time',
description:
'The date and time of when the integration event was created. In other words, the date and time of when the integration handled the event.',
example: '2023-12-27T13:37:00+01:00',
},
state: {
type: 'string',
enum: ['success', 'failed', 'successWithErrors'],
description:
'The state of the integration event. Can be one of `success`, `failed` or `successWithErrors`.',
example: 'failed',
},
stateDetails: {
type: 'string',
description: 'Details about the state of the integration event.',
example: 'Status code: 429 - Rate limit reached.',
},
event: {
$ref: eventSchema.$id,
description: 'The event that triggered this integration event.',
},
details: {
type: 'object',
'x-enforcer-exception-skip-codes': 'WSCH006',
description:
'Detailed information about the integration event. The contents vary depending on the type of integration and the specific details.',
example: {
message:
'*user@yourcompany.com* created a new *slack-app* integration configuration',
channels: ['engineering', 'unleash-updates'],
},
},
},
components: {
schemas: {
eventSchema,
tagSchema,
variantSchema,
},
},
} as const;
export type IntegrationEventSchema = FromSchema<typeof integrationEventSchema>;

View File

@ -46,6 +46,7 @@ import { IProjectFlagCreatorsReadModel } from '../features/project/project-flag-
import { IFeatureStrategiesReadModel } from '../features/feature-toggle/types/feature-strategies-read-model-type';
import { IFeatureLifecycleReadModel } from '../features/feature-lifecycle/feature-lifecycle-read-model-type';
import { ILargestResourcesReadModel } from '../features/metrics/sizes/largest-resources-read-model-type';
import type { IntegrationEventsStore } from '../features/integration-events/integration-events-store';
export interface IUnleashStores {
accessStore: IAccessStore;
@ -96,6 +97,7 @@ export interface IUnleashStores {
featureStrategiesReadModel: IFeatureStrategiesReadModel;
featureLifecycleReadModel: IFeatureLifecycleReadModel;
largestResourcesReadModel: ILargestResourcesReadModel;
integrationEventsStore: IntegrationEventsStore;
}
export {
@ -145,4 +147,5 @@ export {
IFeatureStrategiesReadModel,
IFeatureLifecycleReadModel,
ILargestResourcesReadModel,
type IntegrationEventsStore,
};

View File

@ -17,6 +17,7 @@ import FakeEnvironmentStore from '../../lib/features/project-environments/fake-e
import FakeStrategiesStore from './fake-strategies-store';
import type {
IImportTogglesStore,
IntegrationEventsStore,
IPrivateProjectStore,
IUnleashStores,
} from '../../lib/types';
@ -107,6 +108,7 @@ const createStores: () => IUnleashStores = () => {
featureStrategiesReadModel: new FakeFeatureStrategiesReadModel(),
featureLifecycleReadModel: new FakeFeatureLifecycleReadModel(),
largestResourcesReadModel: new FakeLargestResourcesReadModel(),
integrationEventsStore: {} as IntegrationEventsStore,
};
};