blakeblackshear.frigate/web/__test__/handlers.js
Nicolas Mowen c3b313a70d
Audio events (#6848)
* Initial audio classification model implementation

* fix mypy

* Keep audio labelmap local

* Cleanup

* Start adding config for audio

* Add the detector

* Add audio detection process keypoints

* Build out base config

* Load labelmap correctly

* Fix config bugs

* Start audio process

* Fix startup issues

* Try to cleanup restarting

* Add ffmpeg input args

* Get audio detection working

* Save event to db

* End events if not heard for 30 seconds

* Use not heard config

* Stop ffmpeg when shutting down

* Fixes

* End events correctly

* Use api instead of event queue to save audio events

* Get events working

* Close threads when stop event is sent

* remove unused

* Only start audio process if at least one camera is enabled

* Add const for float

* Cleanup labelmap

* Add audio icon in frontend

* Add ability to toggle audio with mqtt

* Set initial audio value

* Fix audio enabling

* Close logpipe

* Isort

* Formatting

* Fix web tests

* Fix web tests

* Handle cases where args are a string

* Remove log

* Cleanup process close

* Use correct field

* Simplify if statement

* Use var for localhost

* Add audio detectors docs

* Add restream docs to mention audio detection

* Add full config docs

* Fix links to other docs

---------

Co-authored-by: Jason Hunter <hunterjm@gmail.com>
2023-07-01 08:18:33 -05:00

90 lines
2.7 KiB
JavaScript

import { rest } from 'msw';
import { API_HOST } from '../src/env';
export const handlers = [
rest.get(`${API_HOST}api/config`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
mqtt: {
stats_interval: 60,
},
service: {
version: '0.8.3',
},
cameras: {
front: {
name: 'front',
objects: { track: ['taco', 'cat', 'dog'] },
audio: { enabled: false, enabled_in_config: false },
record: { enabled: true, enabled_in_config: true },
detect: { width: 1280, height: 720 },
snapshots: {},
restream: { enabled: true, jsmpeg: { height: 720 } },
ui: { dashboard: true, order: 0 },
},
side: {
name: 'side',
objects: { track: ['taco', 'cat', 'dog'] },
audio: { enabled: false, enabled_in_config: false },
record: { enabled: false, enabled_in_config: true },
detect: { width: 1280, height: 720 },
snapshots: {},
restream: { enabled: true, jsmpeg: { height: 720 } },
ui: { dashboard: true, order: 1 },
},
},
})
);
}),
rest.get(`${API_HOST}api/stats`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
cpu_usages: { 74: {cpu: 6, mem: 6}, 64: { cpu: 5, mem: 5 }, 54: { cpu: 4, mem: 4 }, 71: { cpu: 3, mem: 3}, 60: {cpu: 2, mem: 2}, 72: {cpu: 1, mem: 1} },
detection_fps: 0.0,
detectors: { coral: { detection_start: 0.0, inference_speed: 8.94, pid: 52 } },
front: { camera_fps: 5.0, capture_pid: 64, detection_fps: 0.0, pid: 54, process_fps: 0.0, skipped_fps: 0.0, ffmpeg_pid: 72 },
side: {
camera_fps: 6.9,
capture_pid: 71,
detection_fps: 0.0,
pid: 60,
process_fps: 0.0,
skipped_fps: 0.0,
ffmpeg_pid: 74,
},
service: { uptime: 34812, version: '0.8.1-d376f6b' },
})
);
}),
rest.get(`${API_HOST}api/events`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json(
new Array(12).fill(null).map((v, i) => ({
end_time: 1613257337 + i,
has_clip: true,
has_snapshot: true,
id: i,
label: 'person',
start_time: 1613257326 + i,
top_score: Math.random(),
zones: ['front_patio'],
thumbnail: '/9j/4aa...',
camera: 'camera_name',
}))
)
);
}),
rest.get(`${API_HOST}api/sub_labels`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json([
'one',
'two',
])
);
}),
];