mirror of
https://github.com/Unleash/unleash.git
synced 2025-01-25 00:07:47 +01:00
feat: largest projects and features metric (#7459)
This commit is contained in:
parent
e8511789fd
commit
72de574012
@ -48,6 +48,7 @@ import { FeatureLifecycleStore } from '../features/feature-lifecycle/feature-lif
|
||||
import { ProjectFlagCreatorsReadModel } from '../features/project/project-flag-creators-read-model';
|
||||
import { FeatureStrategiesReadModel } from '../features/feature-toggle/feature-strategies-read-model';
|
||||
import { FeatureLifecycleReadModel } from '../features/feature-lifecycle/feature-lifecycle-read-model';
|
||||
import { LargestResourcesReadModel } from '../features/metrics/sizes/largest-resources-read-model';
|
||||
|
||||
export const createStores = (
|
||||
config: IUnleashConfig,
|
||||
@ -166,6 +167,7 @@ export const createStores = (
|
||||
db,
|
||||
config.flagResolver,
|
||||
),
|
||||
largestResourcesReadModel: new LargestResourcesReadModel(db),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -7,7 +7,7 @@ import type { IFeatureToggleStore } from '../feature-toggle/types/feature-toggle
|
||||
import type { IFlagResolver } from '../../types';
|
||||
|
||||
let db: ITestDb;
|
||||
let featureLifeycycleReadModel: IFeatureLifecycleReadModel;
|
||||
let featureLifecycleReadModel: IFeatureLifecycleReadModel;
|
||||
let featureLifecycleStore: IFeatureLifecycleStore;
|
||||
let featureToggleStore: IFeatureToggleStore;
|
||||
|
||||
@ -19,7 +19,7 @@ const alwaysOnFlagResolver = {
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_lifecycle_read_model', getLogger);
|
||||
featureLifeycycleReadModel = new FeatureLifecycleReadModel(
|
||||
featureLifecycleReadModel = new FeatureLifecycleReadModel(
|
||||
db.rawDatabase,
|
||||
alwaysOnFlagResolver,
|
||||
);
|
||||
@ -59,14 +59,14 @@ test('can return stage count', async () => {
|
||||
{ feature: 'featureA', stage: 'pre-live' },
|
||||
]);
|
||||
|
||||
const stageCount = await featureLifeycycleReadModel.getStageCount();
|
||||
const stageCount = await featureLifecycleReadModel.getStageCount();
|
||||
expect(stageCount).toMatchObject([
|
||||
{ stage: 'pre-live', count: 1 },
|
||||
{ stage: 'initial', count: 2 },
|
||||
]);
|
||||
|
||||
const stageCountByProject =
|
||||
await featureLifeycycleReadModel.getStageCountByProject();
|
||||
await featureLifecycleReadModel.getStageCountByProject();
|
||||
expect(stageCountByProject).toMatchObject([
|
||||
{ project: 'default', stage: 'pre-live', count: 1 },
|
||||
{ project: 'default', stage: 'initial', count: 2 },
|
||||
|
@ -0,0 +1,16 @@
|
||||
import type { ILargestResourcesReadModel } from './largest-resources-read-model-type';
|
||||
|
||||
export class FakeLargestResourcesReadModel
|
||||
implements ILargestResourcesReadModel
|
||||
{
|
||||
async getLargestProjectEnvironments(
|
||||
limit: number,
|
||||
): Promise<{ project: string; environment: string; size: number }[]> {
|
||||
return [];
|
||||
}
|
||||
async getLargestFeatureEnvironments(
|
||||
limit: number,
|
||||
): Promise<{ feature: string; environment: string; size: number }[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
export interface ILargestResourcesReadModel {
|
||||
getLargestProjectEnvironments(
|
||||
limit: number,
|
||||
): Promise<Array<{ project: string; environment: string; size: number }>>;
|
||||
getLargestFeatureEnvironments(
|
||||
limit: number,
|
||||
): Promise<Array<{ feature: string; environment: string; size: number }>>;
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
import type { ILargestResourcesReadModel } from './largest-resources-read-model-type';
|
||||
import dbInit, {
|
||||
type ITestDb,
|
||||
} from '../../../../test/e2e/helpers/database-init';
|
||||
import type { IFeatureToggleStore } from '../../feature-toggle/types/feature-toggle-store-type';
|
||||
import getLogger from '../../../../test/fixtures/no-logger';
|
||||
import type { IFeatureStrategiesStore } from '../../feature-toggle/types/feature-toggle-strategies-store-type';
|
||||
import type { IFeatureStrategy } from '../../../types';
|
||||
|
||||
let db: ITestDb;
|
||||
let largestResourcesReadModel: ILargestResourcesReadModel;
|
||||
let featureToggleStore: IFeatureToggleStore;
|
||||
let featureStrategiesStore: IFeatureStrategiesStore;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('largest_resources_read_model', getLogger);
|
||||
featureToggleStore = db.stores.featureToggleStore;
|
||||
featureStrategiesStore = db.stores.featureStrategiesStore;
|
||||
largestResourcesReadModel = db.stores.largestResourcesReadModel;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (db) {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await featureToggleStore.deleteAll();
|
||||
});
|
||||
|
||||
type FeatureConfig = Pick<
|
||||
IFeatureStrategy,
|
||||
'featureName' | 'constraints' | 'parameters' | 'variants'
|
||||
>;
|
||||
const createFeature = async (config: FeatureConfig) => {
|
||||
await featureToggleStore.create('default', {
|
||||
name: config.featureName,
|
||||
createdByUserId: 9999,
|
||||
});
|
||||
await featureStrategiesStore.createStrategyFeatureEnv({
|
||||
strategyName: 'flexibleRollout',
|
||||
projectId: 'default',
|
||||
environment: 'default',
|
||||
featureName: config.featureName,
|
||||
constraints: config.constraints,
|
||||
parameters: config.parameters,
|
||||
variants: config.variants,
|
||||
});
|
||||
};
|
||||
|
||||
test('can calculate resource size', async () => {
|
||||
await createFeature({
|
||||
featureName: 'featureA',
|
||||
parameters: {
|
||||
groupId: 'flag_init_test_1',
|
||||
rollout: '25',
|
||||
stickiness: 'default',
|
||||
},
|
||||
constraints: [
|
||||
{
|
||||
contextName: 'clientId',
|
||||
operator: 'IN',
|
||||
values: ['1', '2', '3', '4', '5', '6'],
|
||||
caseInsensitive: false,
|
||||
inverted: false,
|
||||
},
|
||||
],
|
||||
variants: [
|
||||
{
|
||||
name: 'a',
|
||||
weight: 1000,
|
||||
weightType: 'fix',
|
||||
stickiness: 'default',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await createFeature({
|
||||
featureName: 'featureB',
|
||||
parameters: {},
|
||||
constraints: [],
|
||||
variants: [],
|
||||
});
|
||||
|
||||
const [project] =
|
||||
await largestResourcesReadModel.getLargestProjectEnvironments(1);
|
||||
const [feature1, feature2] =
|
||||
await largestResourcesReadModel.getLargestFeatureEnvironments(2);
|
||||
|
||||
expect(project.size).toBeGreaterThan(400);
|
||||
expect(project.size).toBe(feature1.size + feature2.size);
|
||||
expect(feature1.size).toBeGreaterThan(feature2.size);
|
||||
});
|
@ -0,0 +1,76 @@
|
||||
import type { Db } from '../../../db/db';
|
||||
import type { ILargestResourcesReadModel } from './largest-resources-read-model-type';
|
||||
|
||||
export class LargestResourcesReadModel implements ILargestResourcesReadModel {
|
||||
private db: Db;
|
||||
|
||||
constructor(db: Db) {
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async getLargestProjectEnvironments(
|
||||
limit: number,
|
||||
): Promise<Array<{ project: string; environment: string; size: number }>> {
|
||||
const { rows } = await this.db.raw(`
|
||||
WITH ProjectSizes AS (
|
||||
SELECT
|
||||
project_name,
|
||||
environment,
|
||||
SUM(pg_column_size(constraints) + pg_column_size(variants) + pg_column_size(parameters)) AS total_size
|
||||
FROM
|
||||
feature_strategies
|
||||
GROUP BY
|
||||
project_name,
|
||||
environment
|
||||
)
|
||||
SELECT
|
||||
project_name,
|
||||
environment,
|
||||
total_size
|
||||
FROM
|
||||
ProjectSizes
|
||||
ORDER BY
|
||||
total_size DESC
|
||||
LIMIT ${limit}
|
||||
`);
|
||||
|
||||
return rows.map((row) => ({
|
||||
project: row.project_name,
|
||||
environment: row.environment,
|
||||
size: Number(row.total_size),
|
||||
}));
|
||||
}
|
||||
|
||||
async getLargestFeatureEnvironments(
|
||||
limit: number,
|
||||
): Promise<Array<{ feature: string; environment: string; size: number }>> {
|
||||
const { rows } = await this.db.raw(`
|
||||
WITH FeatureSizes AS (
|
||||
SELECT
|
||||
feature_name,
|
||||
environment,
|
||||
SUM(pg_column_size(constraints) + pg_column_size(variants) + pg_column_size(parameters)) AS total_size
|
||||
FROM
|
||||
feature_strategies
|
||||
GROUP BY
|
||||
feature_name,
|
||||
environment
|
||||
)
|
||||
SELECT
|
||||
feature_name,
|
||||
environment,
|
||||
total_size
|
||||
FROM
|
||||
FeatureSizes
|
||||
ORDER BY
|
||||
total_size DESC
|
||||
LIMIT ${limit}
|
||||
`);
|
||||
|
||||
return rows.map((row) => ({
|
||||
feature: row.feature_name,
|
||||
environment: row.environment,
|
||||
size: Number(row.total_size),
|
||||
}));
|
||||
}
|
||||
}
|
@ -129,6 +129,16 @@ export default class MetricsMonitor {
|
||||
help: 'Maximum number of constraints used on a single strategy',
|
||||
labelNames: ['feature', 'environment'],
|
||||
});
|
||||
const largestProjectEnvironment = createGauge({
|
||||
name: 'largest_project_environment_size',
|
||||
help: 'The largest project environment size (bytes) based on strategies, constraints, variants and parameters',
|
||||
labelNames: ['project', 'environment'],
|
||||
});
|
||||
const largestFeatureEnvironment = createGauge({
|
||||
name: 'largest_feature_environment_size',
|
||||
help: 'The largest feature environment size (bytes) base on strategies, constraints, variants and parameters',
|
||||
labelNames: ['feature', 'environment'],
|
||||
});
|
||||
|
||||
const featureTogglesArchivedTotal = createGauge({
|
||||
name: 'feature_toggles_archived_total',
|
||||
@ -313,6 +323,8 @@ export default class MetricsMonitor {
|
||||
maxConstraintsPerStrategyResult,
|
||||
stageCountByProjectResult,
|
||||
stageDurationByProject,
|
||||
largestProjectEnvironments,
|
||||
largestFeatureEnvironments,
|
||||
] = await Promise.all([
|
||||
stores.featureStrategiesReadModel.getMaxFeatureStrategies(),
|
||||
stores.featureStrategiesReadModel.getMaxFeatureEnvironmentStrategies(),
|
||||
@ -320,6 +332,12 @@ export default class MetricsMonitor {
|
||||
stores.featureStrategiesReadModel.getMaxConstraintsPerStrategy(),
|
||||
stores.featureLifecycleReadModel.getStageCountByProject(),
|
||||
stores.featureLifecycleReadModel.getAllWithStageDuration(),
|
||||
stores.largestResourcesReadModel.getLargestProjectEnvironments(
|
||||
1,
|
||||
),
|
||||
stores.largestResourcesReadModel.getLargestFeatureEnvironments(
|
||||
1,
|
||||
),
|
||||
]);
|
||||
|
||||
featureFlagsTotal.reset();
|
||||
@ -403,6 +421,28 @@ export default class MetricsMonitor {
|
||||
.set(maxConstraintsPerStrategyResult.count);
|
||||
}
|
||||
|
||||
if (largestProjectEnvironments.length > 0) {
|
||||
const projectEnvironment = largestProjectEnvironments[0];
|
||||
largestProjectEnvironment.reset();
|
||||
largestProjectEnvironment
|
||||
.labels({
|
||||
project: projectEnvironment.project,
|
||||
environment: projectEnvironment.environment,
|
||||
})
|
||||
.set(projectEnvironment.size);
|
||||
}
|
||||
|
||||
if (largestFeatureEnvironments.length > 0) {
|
||||
const featureEnvironment = largestFeatureEnvironments[0];
|
||||
largestFeatureEnvironment.reset();
|
||||
largestFeatureEnvironment
|
||||
.labels({
|
||||
feature: featureEnvironment.feature,
|
||||
environment: featureEnvironment.environment,
|
||||
})
|
||||
.set(featureEnvironment.size);
|
||||
}
|
||||
|
||||
enabledMetricsBucketsPreviousDay.reset();
|
||||
enabledMetricsBucketsPreviousDay.set(
|
||||
stats.previousDayMetricsBucketsCount.enabledCount,
|
||||
|
@ -45,6 +45,7 @@ import { IFeatureLifecycleStore } from '../features/feature-lifecycle/feature-li
|
||||
import { IProjectFlagCreatorsReadModel } from '../features/project/project-flag-creators-read-model.type';
|
||||
import { IFeatureStrategiesReadModel } from '../features/feature-toggle/types/feature-strategies-read-model-type';
|
||||
import { IFeatureLifecycleReadModel } from '../features/feature-lifecycle/feature-lifecycle-read-model-type';
|
||||
import { ILargestResourcesReadModel } from '../features/metrics/sizes/largest-resources-read-model-type';
|
||||
|
||||
export interface IUnleashStores {
|
||||
accessStore: IAccessStore;
|
||||
@ -94,6 +95,7 @@ export interface IUnleashStores {
|
||||
featureLifecycleStore: IFeatureLifecycleStore;
|
||||
featureStrategiesReadModel: IFeatureStrategiesReadModel;
|
||||
featureLifecycleReadModel: IFeatureLifecycleReadModel;
|
||||
largestResourcesReadModel: ILargestResourcesReadModel;
|
||||
}
|
||||
|
||||
export {
|
||||
@ -142,4 +144,5 @@ export {
|
||||
IProjectFlagCreatorsReadModel,
|
||||
IFeatureStrategiesReadModel,
|
||||
IFeatureLifecycleReadModel,
|
||||
ILargestResourcesReadModel,
|
||||
};
|
||||
|
2
src/test/fixtures/store.ts
vendored
2
src/test/fixtures/store.ts
vendored
@ -48,6 +48,7 @@ import { FakeFeatureLifecycleStore } from '../../lib/features/feature-lifecycle/
|
||||
import { FakeProjectFlagCreatorsReadModel } from '../../lib/features/project/fake-project-flag-creators-read-model';
|
||||
import { FakeFeatureStrategiesReadModel } from '../../lib/features/feature-toggle/fake-feature-strategies-read-model';
|
||||
import { FakeFeatureLifecycleReadModel } from '../../lib/features/feature-lifecycle/fake-feature-lifecycle-read-model';
|
||||
import { FakeLargestResourcesReadModel } from '../../lib/features/metrics/sizes/fake-largest-resources-read-model';
|
||||
|
||||
const db = {
|
||||
select: () => ({
|
||||
@ -105,6 +106,7 @@ const createStores: () => IUnleashStores = () => {
|
||||
featureLifecycleStore: new FakeFeatureLifecycleStore(),
|
||||
featureStrategiesReadModel: new FakeFeatureStrategiesReadModel(),
|
||||
featureLifecycleReadModel: new FakeFeatureLifecycleReadModel(),
|
||||
largestResourcesReadModel: new FakeLargestResourcesReadModel(),
|
||||
};
|
||||
};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user