1
0
mirror of https://github.com/Unleash/unleash.git synced 2025-09-24 17:51:14 +02:00

feat: using histogram metrics (#10695)

This commit is contained in:
Mateusz Kwasniewski 2025-09-24 17:02:29 +02:00 committed by GitHub
parent 59bf19bf96
commit 413847374e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 116 additions and 7 deletions

View File

@ -16,6 +16,7 @@ export class BatchHistogram {
private help: string;
private registry: Registry;
public labelNames: string[] = [];
public bucketBoundaries: Set<number | '+Inf'> = new Set();
// Store accumulated data for each label combination
private store: Map<
@ -63,6 +64,7 @@ export class BatchHistogram {
for (const bucket of data.buckets) {
const current = entry.buckets.get(bucket.le) || 0;
entry.buckets.set(bucket.le, current + bucket.count);
this.bucketBoundaries.add(bucket.le);
}
}
@ -73,6 +75,7 @@ export class BatchHistogram {
reset(): void {
this.store.clear();
this.bucketBoundaries.clear();
}
get() {

View File

@ -5,6 +5,8 @@ export const CLIENT_ERROR_COUNT = 'client_error_count';
export const SERVER_ERROR_COUNT = 'server_error_count';
export const REQUEST_COUNT = 'request_count';
export const HEAP_MEMORY_TOTAL = 'heap_memory_total';
export const REQUEST_TIME_MS = 'request_time_ms';
export const SCHEDULER_JOB_TIME_SECONDS = 'scheduler_job_time_seconds';
export const defineImpactMetrics = (flagResolver: IFlagResolver) => {
flagResolver.impactMetrics?.defineCounter(
@ -27,4 +29,14 @@ export const defineImpactMetrics = (flagResolver: IFlagResolver) => {
HEAP_MEMORY_TOTAL,
'Total heap memory used by the application process',
);
flagResolver.impactMetrics?.defineHistogram(
REQUEST_TIME_MS,
'Request handling time',
[10, 25, 50, 100, 200, 500, 1000, 2000, 5000],
);
flagResolver.impactMetrics?.defineHistogram(
SCHEDULER_JOB_TIME_SECONDS,
'Job execution time',
[1, 2, 3, 4, 5, 7, 10, 15, 20, 30, 45, 60, 120],
);
};

View File

@ -279,4 +279,68 @@ describe('MetricsTranslator', () => {
'unleash_histogram_histogram_with_labels_count{unleash_origin="sdk",unleash_service="api"} 5',
);
});
it('should handle histogram bucket changes', async () => {
const registry = new Registry();
const translator = new MetricsTranslator(registry);
// Initial histogram with 2 buckets
const metrics1 = [
{
name: 'test_histogram',
help: 'test histogram',
type: 'histogram' as const,
samples: [
{
count: 5,
sum: 2.5,
buckets: [
{ le: 1, count: 3 },
{ le: '+Inf' as const, count: 5 },
],
},
],
},
];
const result1 = await translator.translateAndSerializeMetrics(metrics1);
expect(result1).toContain(
'unleash_histogram_test_histogram_bucket{unleash_origin="sdk",le="1"} 3',
);
expect(result1).toContain(
'unleash_histogram_test_histogram_count{unleash_origin="sdk"} 5',
);
// Same histogram with different bucket (0.5 instead of 1)
const metrics2 = [
{
name: 'test_histogram',
help: 'test histogram',
type: 'histogram' as const,
samples: [
{
count: 7,
sum: 3.5,
buckets: [
{ le: 0.5, count: 4 }, // Different bucket boundary
{ le: '+Inf' as const, count: 7 },
],
},
],
},
];
const result2 = await translator.translateAndSerializeMetrics(metrics2);
expect(result2).toContain(
'unleash_histogram_test_histogram_bucket{unleash_origin="sdk",le="0.5"} 4',
);
expect(result2).toContain(
'unleash_histogram_test_histogram_count{unleash_origin="sdk"} 7',
);
expect(result2).not.toContain(
'unleash_histogram_test_histogram_count{unleash_origin="sdk"} 5',
);
});
});

View File

@ -53,6 +53,25 @@ export class MetricsTranslator {
);
}
private hasNewBuckets(
existingHistogram: BatchHistogram,
newBuckets: Array<{ le: number | '+Inf'; count: number }>,
): boolean {
const existingBoundaries = existingHistogram.bucketBoundaries;
if (existingBoundaries.size !== newBuckets.length) {
return true;
}
for (const bucket of newBuckets) {
if (!existingBoundaries.has(bucket.le)) {
return true;
}
}
return false;
}
private transformLabels(
labels: Record<string, string | number>,
): Record<string, string | number> {
@ -167,7 +186,13 @@ export class MetricsTranslator {
let histogram: BatchHistogram;
if (existingMetric && existingMetric instanceof BatchHistogram) {
if (this.hasNewLabels(existingMetric, labelNames)) {
const firstSample = metric.samples[0]; // all samples should have same buckets
const needsRecreation =
!firstSample?.buckets ||
this.hasNewLabels(existingMetric, labelNames) ||
this.hasNewBuckets(existingMetric, firstSample.buckets);
if (needsRecreation) {
this.registry.removeSingleMetric(prefixedName);
histogram = new BatchHistogram({

View File

@ -41,10 +41,7 @@ import {
import type { SchedulerService } from './services/index.js';
import type { IClientMetricsEnv } from './features/metrics/client-metrics/client-metrics-store-v2-type.js';
import { DbMetricsMonitor } from './metrics-gauge.js';
import {
HEAP_MEMORY_TOTAL,
REQUEST_COUNT,
} from './features/metrics/impact/define-impact-metrics.js';
import * as impactMetrics from './features/metrics/impact/define-impact-metrics.js';
export function registerPrometheusPostgresMetrics(
db: Knex,
@ -798,15 +795,23 @@ export function registerPrometheusMetrics(
})
.observe(time);
config.flagResolver.impactMetrics?.incrementCounter(
REQUEST_COUNT,
impactMetrics.REQUEST_COUNT,
1,
{ flagNames: ['consumptionModel'], context: {} },
);
config.flagResolver.impactMetrics?.observeHistogram(
impactMetrics.REQUEST_TIME_MS,
time,
);
},
);
eventBus.on(events.SCHEDULER_JOB_TIME, ({ jobId, time }) => {
schedulerDuration.labels(jobId).observe(time);
config.flagResolver.impactMetrics?.observeHistogram(
impactMetrics.SCHEDULER_JOB_TIME_SECONDS,
time,
);
});
eventBus.on(events.FUNCTION_TIME, ({ functionName, className, time }) => {
@ -1152,7 +1157,7 @@ export function registerPrometheusMetrics(
collectStaticCounters: async () => {
try {
config.flagResolver.impactMetrics?.updateGauge(
HEAP_MEMORY_TOTAL,
impactMetrics.HEAP_MEMORY_TOTAL,
process.memoryUsage().heapUsed,
{ flagNames: ['consumptionModel'], context: {} },
);