2020-11-04 13:31:25 +01:00
|
|
|
import datetime
|
|
|
|
import logging
|
2020-07-09 13:57:16 +02:00
|
|
|
import os
|
2020-11-04 13:31:25 +01:00
|
|
|
import queue
|
2020-07-09 13:57:16 +02:00
|
|
|
import threading
|
2020-11-24 14:27:51 +01:00
|
|
|
from pathlib import Path
|
2020-11-04 13:31:25 +01:00
|
|
|
|
2020-11-24 15:09:16 +01:00
|
|
|
from peewee import fn
|
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
from frigate.config import EventsConfig, FrigateConfig
|
2021-09-11 15:34:27 +02:00
|
|
|
from frigate.const import CLIPS_DIR
|
|
|
|
from frigate.models import Event
|
2023-04-23 17:45:19 +02:00
|
|
|
from frigate.timeline import TimelineSourceEnum
|
2022-11-19 14:16:33 +01:00
|
|
|
from frigate.types import CameraMetricsTypes
|
2023-04-24 14:24:28 +02:00
|
|
|
from frigate.util import to_relative_box
|
2022-11-19 14:16:33 +01:00
|
|
|
|
|
|
|
from multiprocessing.queues import Queue
|
|
|
|
from multiprocessing.synchronize import Event as MpEvent
|
|
|
|
from typing import Dict
|
2021-09-11 15:34:27 +02:00
|
|
|
|
2020-11-04 04:26:39 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2022-04-16 15:42:44 +02:00
|
|
|
|
2022-11-19 14:16:33 +01:00
|
|
|
def should_update_db(prev_event: Event, current_event: Event) -> bool:
|
2022-03-17 13:18:43 +01:00
|
|
|
"""If current_event has updated fields and (clip or snapshot)."""
|
2022-11-19 14:16:33 +01:00
|
|
|
if current_event["has_clip"] or current_event["has_snapshot"]:
|
2023-04-24 14:24:28 +02:00
|
|
|
# if this is the first time has_clip or has_snapshot turned true
|
|
|
|
if not prev_event["has_clip"] and not prev_event["has_snapshot"]:
|
|
|
|
return True
|
|
|
|
# or if any of the following values changed
|
2022-11-19 14:16:33 +01:00
|
|
|
if (
|
|
|
|
prev_event["top_score"] != current_event["top_score"]
|
|
|
|
or prev_event["entered_zones"] != current_event["entered_zones"]
|
|
|
|
or prev_event["thumbnail"] != current_event["thumbnail"]
|
2023-04-24 14:24:28 +02:00
|
|
|
or prev_event["end_time"] != current_event["end_time"]
|
2022-11-19 14:16:33 +01:00
|
|
|
):
|
|
|
|
return True
|
|
|
|
return False
|
2022-02-08 14:12:00 +01:00
|
|
|
|
|
|
|
|
2020-07-09 13:57:16 +02:00
|
|
|
class EventProcessor(threading.Thread):
|
2021-02-17 14:23:32 +01:00
|
|
|
def __init__(
|
2022-11-19 14:16:33 +01:00
|
|
|
self,
|
|
|
|
config: FrigateConfig,
|
|
|
|
camera_processes: dict[str, CameraMetricsTypes],
|
|
|
|
event_queue: Queue,
|
|
|
|
event_processed_queue: Queue,
|
2023-04-23 17:45:19 +02:00
|
|
|
timeline_queue: Queue,
|
2022-11-19 14:16:33 +01:00
|
|
|
stop_event: MpEvent,
|
2021-02-17 14:23:32 +01:00
|
|
|
):
|
2020-07-09 13:57:16 +02:00
|
|
|
threading.Thread.__init__(self)
|
2021-02-17 14:23:32 +01:00
|
|
|
self.name = "event_processor"
|
2020-07-09 13:57:16 +02:00
|
|
|
self.config = config
|
|
|
|
self.camera_processes = camera_processes
|
|
|
|
self.event_queue = event_queue
|
2020-11-25 17:37:41 +01:00
|
|
|
self.event_processed_queue = event_processed_queue
|
2023-04-23 17:45:19 +02:00
|
|
|
self.timeline_queue = timeline_queue
|
2022-11-19 14:16:33 +01:00
|
|
|
self.events_in_process: Dict[str, Event] = {}
|
2020-08-02 15:46:36 +02:00
|
|
|
self.stop_event = stop_event
|
2021-02-05 04:44:44 +01:00
|
|
|
|
2022-11-19 14:16:33 +01:00
|
|
|
def run(self) -> None:
|
2021-10-23 23:18:13 +02:00
|
|
|
# set an end_time on events without an end_time on startup
|
|
|
|
Event.update(end_time=Event.start_time + 30).where(
|
|
|
|
Event.end_time == None
|
|
|
|
).execute()
|
|
|
|
|
2021-05-21 17:39:14 +02:00
|
|
|
while not self.stop_event.is_set():
|
2020-07-09 13:57:16 +02:00
|
|
|
try:
|
2023-02-04 03:15:47 +01:00
|
|
|
event_type, camera, event_data = self.event_queue.get(timeout=1)
|
2020-07-09 13:57:16 +02:00
|
|
|
except queue.Empty:
|
|
|
|
continue
|
|
|
|
|
2020-12-23 15:54:08 +01:00
|
|
|
logger.debug(f"Event received: {event_type} {camera} {event_data['id']}")
|
2020-07-09 13:57:16 +02:00
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
self.timeline_queue.put(
|
|
|
|
(
|
|
|
|
camera,
|
|
|
|
TimelineSourceEnum.tracked_object,
|
|
|
|
event_type,
|
|
|
|
self.events_in_process.get(event_data["id"]),
|
|
|
|
event_data,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
# if this is the first message, just store it and continue, its not time to insert it in the db
|
2021-02-17 14:23:32 +01:00
|
|
|
if event_type == "start":
|
|
|
|
self.events_in_process[event_data["id"]] = event_data
|
2023-04-24 14:24:28 +02:00
|
|
|
continue
|
2020-07-09 13:57:16 +02:00
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
if should_update_db(self.events_in_process[event_data["id"]], event_data):
|
|
|
|
camera_config = self.config.cameras[camera]
|
|
|
|
event_config: EventsConfig = camera_config.record.events
|
|
|
|
width = camera_config.detect.width
|
|
|
|
height = camera_config.detect.height
|
|
|
|
first_detector = list(self.config.detectors.values())[0]
|
|
|
|
|
|
|
|
start_time = event_data["start_time"] - event_config.pre_capture
|
|
|
|
end_time = (
|
|
|
|
None
|
|
|
|
if event_data["end_time"] is None
|
|
|
|
else event_data["end_time"] + event_config.post_capture
|
|
|
|
)
|
|
|
|
# score of the snapshot
|
|
|
|
score = (
|
|
|
|
None
|
|
|
|
if event_data["snapshot"] is None
|
|
|
|
else event_data["snapshot"]["score"]
|
|
|
|
)
|
|
|
|
# detection region in the snapshot
|
|
|
|
region = (
|
|
|
|
None
|
|
|
|
if event_data["snapshot"] is None
|
|
|
|
else to_relative_box(
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
event_data["snapshot"]["region"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# bounding box for the snapshot
|
|
|
|
box = (
|
|
|
|
None
|
|
|
|
if event_data["snapshot"] is None
|
|
|
|
else to_relative_box(
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
event_data["snapshot"]["box"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# keep these from being set back to false because the event
|
|
|
|
# may have started while recordings and snapshots were enabled
|
|
|
|
# this would be an issue for long running events
|
|
|
|
if self.events_in_process[event_data["id"]]["has_clip"]:
|
|
|
|
event_data["has_clip"] = True
|
|
|
|
if self.events_in_process[event_data["id"]]["has_snapshot"]:
|
|
|
|
event_data["has_snapshot"] = True
|
|
|
|
|
|
|
|
event = {
|
|
|
|
Event.id: event_data["id"],
|
|
|
|
Event.label: event_data["label"],
|
|
|
|
Event.camera: camera,
|
|
|
|
Event.start_time: start_time,
|
|
|
|
Event.end_time: end_time,
|
|
|
|
Event.top_score: event_data["top_score"],
|
|
|
|
Event.score: score,
|
|
|
|
Event.zones: list(event_data["entered_zones"]),
|
|
|
|
Event.thumbnail: event_data["thumbnail"],
|
|
|
|
Event.region: region,
|
|
|
|
Event.box: box,
|
|
|
|
Event.has_clip: event_data["has_clip"],
|
|
|
|
Event.has_snapshot: event_data["has_snapshot"],
|
|
|
|
Event.model_hash: first_detector.model.model_hash,
|
|
|
|
Event.model_type: first_detector.model.model_type,
|
|
|
|
Event.detector_type: first_detector.type,
|
|
|
|
}
|
|
|
|
|
|
|
|
(
|
|
|
|
Event.insert(event)
|
|
|
|
.on_conflict(
|
|
|
|
conflict_target=[Event.id],
|
|
|
|
update=event,
|
|
|
|
)
|
|
|
|
.execute()
|
|
|
|
)
|
|
|
|
|
|
|
|
# update the stored copy for comparison on future update messages
|
2021-10-23 23:18:13 +02:00
|
|
|
self.events_in_process[event_data["id"]] = event_data
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
if event_type == "end":
|
2021-02-17 14:23:32 +01:00
|
|
|
del self.events_in_process[event_data["id"]]
|
2021-09-15 14:16:52 +02:00
|
|
|
self.event_processed_queue.put((event_data["id"], camera))
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-10-23 23:18:13 +02:00
|
|
|
# set an end_time on events without an end_time before exiting
|
|
|
|
Event.update(end_time=datetime.datetime.now().timestamp()).where(
|
|
|
|
Event.end_time == None
|
|
|
|
).execute()
|
2021-05-21 17:39:14 +02:00
|
|
|
logger.info(f"Exiting event processor...")
|
|
|
|
|
2020-07-09 13:57:16 +02:00
|
|
|
|
2020-11-24 14:27:51 +01:00
|
|
|
class EventCleanup(threading.Thread):
|
2022-11-19 14:16:33 +01:00
|
|
|
def __init__(self, config: FrigateConfig, stop_event: MpEvent):
|
2020-11-24 14:27:51 +01:00
|
|
|
threading.Thread.__init__(self)
|
2021-02-17 14:23:32 +01:00
|
|
|
self.name = "event_cleanup"
|
2020-11-24 14:27:51 +01:00
|
|
|
self.config = config
|
|
|
|
self.stop_event = stop_event
|
2021-01-13 13:49:05 +01:00
|
|
|
self.camera_keys = list(self.config.cameras.keys())
|
2020-11-24 14:27:51 +01:00
|
|
|
|
2022-11-19 14:16:33 +01:00
|
|
|
def expire(self, media_type: str) -> None:
|
|
|
|
# TODO: Refactor media_type to enum
|
2021-01-13 13:49:05 +01:00
|
|
|
## Expire events from unlisted cameras based on the global config
|
2021-06-19 15:44:25 +02:00
|
|
|
if media_type == "clips":
|
2021-08-15 16:14:13 +02:00
|
|
|
retain_config = self.config.record.events.retain
|
2021-02-17 14:23:32 +01:00
|
|
|
file_extension = "mp4"
|
|
|
|
update_params = {"has_clip": False}
|
2021-01-13 13:49:05 +01:00
|
|
|
else:
|
|
|
|
retain_config = self.config.snapshots.retain
|
2021-02-17 14:23:32 +01:00
|
|
|
file_extension = "jpg"
|
|
|
|
update_params = {"has_snapshot": False}
|
|
|
|
|
|
|
|
distinct_labels = (
|
|
|
|
Event.select(Event.label)
|
|
|
|
.where(Event.camera.not_in(self.camera_keys))
|
|
|
|
.distinct()
|
|
|
|
)
|
|
|
|
|
2021-01-13 13:49:05 +01:00
|
|
|
# loop over object types in db
|
|
|
|
for l in distinct_labels:
|
|
|
|
# get expiration time for this label
|
|
|
|
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
2021-02-17 14:23:32 +01:00
|
|
|
expire_after = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
|
|
).timestamp()
|
2021-01-13 13:49:05 +01:00
|
|
|
# grab all events after specific time
|
2021-02-17 14:23:32 +01:00
|
|
|
expired_events = Event.select().where(
|
|
|
|
Event.camera.not_in(self.camera_keys),
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == l.label,
|
2022-02-22 05:03:01 +01:00
|
|
|
Event.retain_indefinitely == False,
|
2021-01-13 13:49:05 +01:00
|
|
|
)
|
|
|
|
# delete the media from disk
|
|
|
|
for event in expired_events:
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
2021-06-19 15:44:25 +02:00
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
|
|
)
|
2021-06-19 14:11:43 +02:00
|
|
|
media_path.unlink(missing_ok=True)
|
2021-06-19 15:44:25 +02:00
|
|
|
if file_extension == "jpg":
|
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
|
|
)
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
|
2021-01-13 13:49:05 +01:00
|
|
|
# update the clips attribute for the db entry
|
2021-02-17 14:23:32 +01:00
|
|
|
update_query = Event.update(update_params).where(
|
|
|
|
Event.camera.not_in(self.camera_keys),
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == l.label,
|
2022-02-22 05:03:01 +01:00
|
|
|
Event.retain_indefinitely == False,
|
2021-01-13 13:49:05 +01:00
|
|
|
)
|
|
|
|
update_query.execute()
|
|
|
|
|
|
|
|
## Expire events from cameras based on the camera config
|
|
|
|
for name, camera in self.config.cameras.items():
|
2021-06-19 15:44:25 +02:00
|
|
|
if media_type == "clips":
|
2021-08-15 16:14:13 +02:00
|
|
|
retain_config = camera.record.events.retain
|
2021-01-13 13:49:05 +01:00
|
|
|
else:
|
|
|
|
retain_config = camera.snapshots.retain
|
|
|
|
# get distinct objects in database for this camera
|
2021-02-17 14:23:32 +01:00
|
|
|
distinct_labels = (
|
|
|
|
Event.select(Event.label).where(Event.camera == name).distinct()
|
|
|
|
)
|
2021-01-13 13:49:05 +01:00
|
|
|
|
2020-11-24 15:09:16 +01:00
|
|
|
# loop over object types in db
|
|
|
|
for l in distinct_labels:
|
|
|
|
# get expiration time for this label
|
|
|
|
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
2021-02-17 14:23:32 +01:00
|
|
|
expire_after = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
|
|
).timestamp()
|
2020-11-24 15:09:16 +01:00
|
|
|
# grab all events after specific time
|
2021-02-17 14:23:32 +01:00
|
|
|
expired_events = Event.select().where(
|
|
|
|
Event.camera == name,
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == l.label,
|
2022-02-22 05:03:01 +01:00
|
|
|
Event.retain_indefinitely == False,
|
2020-11-24 15:09:16 +01:00
|
|
|
)
|
|
|
|
# delete the grabbed clips from disk
|
|
|
|
for event in expired_events:
|
2021-01-13 13:49:05 +01:00
|
|
|
media_name = f"{event.camera}-{event.id}"
|
2021-06-19 15:44:25 +02:00
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
|
|
)
|
2021-06-19 14:11:43 +02:00
|
|
|
media_path.unlink(missing_ok=True)
|
2021-06-19 15:44:25 +02:00
|
|
|
if file_extension == "jpg":
|
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
|
|
)
|
|
|
|
media_path.unlink(missing_ok=True)
|
2021-01-13 13:49:05 +01:00
|
|
|
# update the clips attribute for the db entry
|
2021-02-17 14:23:32 +01:00
|
|
|
update_query = Event.update(update_params).where(
|
|
|
|
Event.camera == name,
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == l.label,
|
2022-02-22 05:03:01 +01:00
|
|
|
Event.retain_indefinitely == False,
|
2020-11-24 15:09:16 +01:00
|
|
|
)
|
2021-01-13 13:49:05 +01:00
|
|
|
update_query.execute()
|
2021-02-07 15:38:17 +01:00
|
|
|
|
2022-11-19 14:16:33 +01:00
|
|
|
def purge_duplicates(self) -> None:
|
2021-02-07 15:38:17 +01:00
|
|
|
duplicate_query = """with grouped_events as (
|
|
|
|
select id,
|
2021-02-17 14:23:32 +01:00
|
|
|
label,
|
|
|
|
camera,
|
2021-02-07 15:38:17 +01:00
|
|
|
has_snapshot,
|
|
|
|
has_clip,
|
|
|
|
row_number() over (
|
|
|
|
partition by label, camera, round(start_time/5,0)*5
|
|
|
|
order by end_time-start_time desc
|
|
|
|
) as copy_number
|
|
|
|
from event
|
|
|
|
)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
select distinct id, camera, has_snapshot, has_clip from grouped_events
|
2021-02-07 15:38:17 +01:00
|
|
|
where copy_number > 1;"""
|
|
|
|
|
|
|
|
duplicate_events = Event.raw(duplicate_query)
|
|
|
|
for event in duplicate_events:
|
|
|
|
logger.debug(f"Removing duplicate: {event.id}")
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
2021-10-02 13:59:02 +02:00
|
|
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
|
|
media_path.unlink(missing_ok=True)
|
2021-02-07 15:38:17 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
(
|
|
|
|
Event.delete()
|
|
|
|
.where(Event.id << [event.id for event in duplicate_events])
|
|
|
|
.execute()
|
|
|
|
)
|
|
|
|
|
2022-11-19 14:16:33 +01:00
|
|
|
def run(self) -> None:
|
2021-05-21 17:39:14 +02:00
|
|
|
# only expire events every 5 minutes
|
|
|
|
while not self.stop_event.wait(300):
|
2021-02-17 14:23:32 +01:00
|
|
|
self.expire("clips")
|
|
|
|
self.expire("snapshots")
|
2021-02-07 15:38:17 +01:00
|
|
|
self.purge_duplicates()
|
2021-01-13 13:49:05 +01:00
|
|
|
|
|
|
|
# drop events from db where has_clip and has_snapshot are false
|
2021-02-17 14:23:32 +01:00
|
|
|
delete_query = Event.delete().where(
|
|
|
|
Event.has_clip == False, Event.has_snapshot == False
|
2021-01-13 13:49:05 +01:00
|
|
|
)
|
|
|
|
delete_query.execute()
|
2021-05-21 17:39:14 +02:00
|
|
|
|
|
|
|
logger.info(f"Exiting event cleanup...")
|