mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-12-19 19:06:16 +01:00
Load labels dynamically for event filters (#6896)
* Load labels dynamically to include custom events and audio, do not include attribute labels * Formatting * Fix sorting * Also filter tracked object list on camera page * isort * Don't fail before load
This commit is contained in:
parent
3d40ed5d47
commit
ece070fee1
@ -12,6 +12,16 @@ PLUS_ENV_VAR = "PLUS_API_KEY"
|
||||
PLUS_API_HOST = "https://api.frigate.video"
|
||||
BTBN_PATH = "/usr/lib/btbn-ffmpeg"
|
||||
|
||||
# Attributes
|
||||
|
||||
ATTRIBUTE_LABEL_MAP = {
|
||||
"person": ["face", "amazon"],
|
||||
"car": ["ups", "fedex", "amazon", "license_plate"],
|
||||
}
|
||||
ALL_ATTRIBUTE_LABELS = [
|
||||
item for sublist in ATTRIBUTE_LABEL_MAP.values() for item in sublist
|
||||
]
|
||||
|
||||
# Regex Consts
|
||||
|
||||
REGEX_CAMERA_NAME = r"^[a-zA-Z0-9_-]+$"
|
||||
|
@ -410,6 +410,24 @@ def set_sub_label(id):
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/labels")
|
||||
def get_labels():
|
||||
camera = request.args.get("camera", type=str, default="")
|
||||
|
||||
try:
|
||||
if camera:
|
||||
events = Event.select(Event.label).where(Event.camera == camera).distinct()
|
||||
else:
|
||||
events = Event.select(Event.label).distinct()
|
||||
except Exception as e:
|
||||
return jsonify(
|
||||
{"success": False, "message": f"Failed to get labels: {e}"}, "404"
|
||||
)
|
||||
|
||||
labels = sorted([e.label for e in events])
|
||||
return jsonify(labels)
|
||||
|
||||
|
||||
@bp.route("/sub_labels")
|
||||
def get_sub_labels():
|
||||
split_joined = request.args.get("split_joined", type=int)
|
||||
|
@ -15,7 +15,7 @@ import numpy as np
|
||||
from setproctitle import setproctitle
|
||||
|
||||
from frigate.config import CameraConfig, DetectConfig
|
||||
from frigate.const import CACHE_DIR
|
||||
from frigate.const import ALL_ATTRIBUTE_LABELS, ATTRIBUTE_LABEL_MAP, CACHE_DIR
|
||||
from frigate.detectors.detector_config import PixelFormatEnum
|
||||
from frigate.log import LogPipe
|
||||
from frigate.motion import MotionDetector
|
||||
@ -723,14 +723,6 @@ def process_frames(
|
||||
stop_event,
|
||||
exit_on_empty: bool = False,
|
||||
):
|
||||
# attribute labels are not tracked and are not assigned regions
|
||||
attribute_label_map = {
|
||||
"person": ["face", "amazon"],
|
||||
"car": ["ups", "fedex", "amazon", "license_plate"],
|
||||
}
|
||||
all_attribute_labels = [
|
||||
item for sublist in attribute_label_map.values() for item in sublist
|
||||
]
|
||||
fps = process_info["process_fps"]
|
||||
detection_fps = process_info["detection_fps"]
|
||||
current_frame_time = process_info["detection_frame"]
|
||||
@ -906,7 +898,7 @@ def process_frames(
|
||||
tracked_detections = [
|
||||
d
|
||||
for d in consolidated_detections
|
||||
if d[0] not in all_attribute_labels
|
||||
if d[0] not in ALL_ATTRIBUTE_LABELS
|
||||
]
|
||||
# now that we have refined our detections, we need to track objects
|
||||
object_tracker.match_and_update(frame_time, tracked_detections)
|
||||
@ -916,7 +908,7 @@ def process_frames(
|
||||
|
||||
# group the attribute detections based on what label they apply to
|
||||
attribute_detections = {}
|
||||
for label, attribute_labels in attribute_label_map.items():
|
||||
for label, attribute_labels in ATTRIBUTE_LABEL_MAP.items():
|
||||
attribute_detections[label] = [
|
||||
d for d in consolidated_detections if d[0] in attribute_labels
|
||||
]
|
||||
|
@ -22,6 +22,7 @@ const emptyObject = Object.freeze({});
|
||||
|
||||
export default function Camera({ camera }) {
|
||||
const { data: config } = useSWR('config');
|
||||
const { data: trackedLabels } = useSWR(['labels', { camera }]);
|
||||
const apiHost = useApiHost();
|
||||
const [showSettings, setShowSettings] = useState(false);
|
||||
const [viewMode, setViewMode] = useState('live');
|
||||
@ -121,7 +122,9 @@ export default function Camera({ camera }) {
|
||||
<div className="max-w-5xl">
|
||||
<video-stream
|
||||
mode="mse"
|
||||
src={new URL(`${baseUrl.replace(/^http/, 'ws')}live/webrtc/api/ws?src=${cameraConfig.live.stream_name}`)}
|
||||
src={
|
||||
new URL(`${baseUrl.replace(/^http/, 'ws')}live/webrtc/api/ws?src=${cameraConfig.live.stream_name}`)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</Fragment>
|
||||
@ -203,7 +206,7 @@ export default function Camera({ camera }) {
|
||||
<div className="space-y-4">
|
||||
<Heading size="sm">Tracked objects</Heading>
|
||||
<div className="flex flex-wrap justify-start">
|
||||
{cameraConfig.objects.track.map((objectType) => (
|
||||
{(trackedLabels || []).map((objectType) => (
|
||||
<Card
|
||||
className="mb-4 mr-4"
|
||||
key={objectType}
|
||||
|
@ -106,6 +106,7 @@ export default function Events({ path, ...props }) {
|
||||
|
||||
const { data: config } = useSWR('config');
|
||||
|
||||
const { data: allLabels } = useSWR(['labels']);
|
||||
const { data: allSubLabels } = useSWR(['sub_labels', { split_joined: 1 }]);
|
||||
|
||||
const filterValues = useMemo(
|
||||
@ -120,15 +121,10 @@ export default function Events({ path, ...props }) {
|
||||
.filter((value, i, self) => self.indexOf(value) === i),
|
||||
'None',
|
||||
],
|
||||
labels: Object.values(config?.cameras || {})
|
||||
.reduce((memo, camera) => {
|
||||
memo = memo.concat(camera?.objects?.track || []);
|
||||
return memo;
|
||||
}, config?.objects?.track || [])
|
||||
.filter((value, i, self) => self.indexOf(value) === i),
|
||||
labels: Object.values(allLabels || {}),
|
||||
sub_labels: (allSubLabels || []).length > 0 ? [...Object.values(allSubLabels), 'None'] : [],
|
||||
}),
|
||||
[config, allSubLabels]
|
||||
[config, allLabels, allSubLabels]
|
||||
);
|
||||
|
||||
const onSave = async (e, eventId, save) => {
|
||||
|
Loading…
Reference in New Issue
Block a user