2023-05-19 12:16:11 +02:00
|
|
|
"""Cleanup events based on configured retention."""
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import threading
|
2023-07-21 14:29:50 +02:00
|
|
|
from enum import Enum
|
2023-05-29 12:31:17 +02:00
|
|
|
from multiprocessing.synchronize import Event as MpEvent
|
2023-05-19 12:16:11 +02:00
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
from frigate.config import FrigateConfig
|
|
|
|
from frigate.const import CLIPS_DIR
|
2024-10-10 23:37:43 +02:00
|
|
|
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
2023-07-21 14:29:50 +02:00
|
|
|
from frigate.models import Event, Timeline
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
class EventCleanupType(str, Enum):
|
|
|
|
clips = "clips"
|
|
|
|
snapshots = "snapshots"
|
|
|
|
|
|
|
|
|
2024-11-06 18:05:44 +01:00
|
|
|
CHUNK_SIZE = 50
|
|
|
|
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
class EventCleanup(threading.Thread):
|
2024-10-07 22:30:45 +02:00
|
|
|
def __init__(
|
2024-10-10 23:37:43 +02:00
|
|
|
self, config: FrigateConfig, stop_event: MpEvent, db: SqliteVecQueueDatabase
|
2024-10-07 22:30:45 +02:00
|
|
|
):
|
2024-10-03 03:35:46 +02:00
|
|
|
super().__init__(name="event_cleanup")
|
2023-05-19 12:16:11 +02:00
|
|
|
self.config = config
|
|
|
|
self.stop_event = stop_event
|
2024-10-07 22:30:45 +02:00
|
|
|
self.db = db
|
2023-05-19 12:16:11 +02:00
|
|
|
self.camera_keys = list(self.config.cameras.keys())
|
2023-07-21 14:29:50 +02:00
|
|
|
self.removed_camera_labels: list[str] = None
|
|
|
|
self.camera_labels: dict[str, dict[str, any]] = {}
|
|
|
|
|
|
|
|
def get_removed_camera_labels(self) -> list[Event]:
|
|
|
|
"""Get a list of distinct labels for removed cameras."""
|
|
|
|
if self.removed_camera_labels is None:
|
|
|
|
self.removed_camera_labels = list(
|
|
|
|
Event.select(Event.label)
|
|
|
|
.where(Event.camera.not_in(self.camera_keys))
|
|
|
|
.distinct()
|
|
|
|
.execute()
|
|
|
|
)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
return self.removed_camera_labels
|
|
|
|
|
|
|
|
def get_camera_labels(self, camera: str) -> list[Event]:
|
|
|
|
"""Get a list of distinct labels for each camera, updating once a day."""
|
|
|
|
if (
|
|
|
|
self.camera_labels.get(camera) is None
|
|
|
|
or self.camera_labels[camera]["last_update"]
|
|
|
|
< (datetime.datetime.now() - datetime.timedelta(days=1)).timestamp()
|
|
|
|
):
|
|
|
|
self.camera_labels[camera] = {
|
|
|
|
"last_update": datetime.datetime.now().timestamp(),
|
|
|
|
"labels": list(
|
|
|
|
Event.select(Event.label)
|
|
|
|
.where(Event.camera == camera)
|
|
|
|
.distinct()
|
|
|
|
.execute()
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
return self.camera_labels[camera]["labels"]
|
|
|
|
|
|
|
|
def expire(self, media_type: EventCleanupType) -> list[str]:
|
2023-05-19 12:16:11 +02:00
|
|
|
## Expire events from unlisted cameras based on the global config
|
2023-07-21 14:29:50 +02:00
|
|
|
if media_type == EventCleanupType.clips:
|
2024-09-02 15:22:53 +02:00
|
|
|
expire_days = max(
|
|
|
|
self.config.record.alerts.retain.days,
|
|
|
|
self.config.record.detections.retain.days,
|
|
|
|
)
|
2023-07-21 14:29:50 +02:00
|
|
|
file_extension = None # mp4 clips are no longer stored in /clips
|
2023-05-19 12:16:11 +02:00
|
|
|
update_params = {"has_clip": False}
|
|
|
|
else:
|
|
|
|
retain_config = self.config.snapshots.retain
|
|
|
|
file_extension = "jpg"
|
|
|
|
update_params = {"has_snapshot": False}
|
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
distinct_labels = self.get_removed_camera_labels()
|
2023-05-19 12:16:11 +02:00
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
## Expire events from cameras no longer in the config
|
2023-05-19 12:16:11 +02:00
|
|
|
# loop over object types in db
|
2023-05-29 12:31:17 +02:00
|
|
|
for event in distinct_labels:
|
2023-05-19 12:16:11 +02:00
|
|
|
# get expiration time for this label
|
2024-09-02 15:22:53 +02:00
|
|
|
if media_type == EventCleanupType.snapshots:
|
|
|
|
expire_days = retain_config.objects.get(
|
|
|
|
event.label, retain_config.default
|
|
|
|
)
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
expire_after = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
|
|
).timestamp()
|
|
|
|
# grab all events after specific time
|
2024-04-18 18:35:16 +02:00
|
|
|
expired_events: list[Event] = (
|
2023-10-21 17:53:33 +02:00
|
|
|
Event.select(
|
|
|
|
Event.id,
|
|
|
|
Event.camera,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Event.camera.not_in(self.camera_keys),
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == event.label,
|
|
|
|
Event.retain_indefinitely == False,
|
|
|
|
)
|
|
|
|
.namedtuples()
|
|
|
|
.iterator()
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
2024-11-06 18:05:44 +01:00
|
|
|
logger.debug(f"{len(expired_events)} events can be expired")
|
2023-05-19 12:16:11 +02:00
|
|
|
# delete the media from disk
|
2023-10-26 13:20:06 +02:00
|
|
|
for expired in expired_events:
|
|
|
|
media_name = f"{expired.camera}-{expired.id}"
|
2023-05-19 12:16:11 +02:00
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
|
|
)
|
2024-04-18 18:35:16 +02:00
|
|
|
|
|
|
|
try:
|
2023-05-19 12:16:11 +02:00
|
|
|
media_path.unlink(missing_ok=True)
|
2024-04-18 18:35:16 +02:00
|
|
|
if file_extension == "jpg":
|
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
|
|
)
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
except OSError as e:
|
|
|
|
logger.warning(f"Unable to delete event images: {e}")
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
# update the clips attribute for the db entry
|
2024-11-06 18:05:44 +01:00
|
|
|
query = Event.select(Event.id).where(
|
2023-05-19 12:16:11 +02:00
|
|
|
Event.camera.not_in(self.camera_keys),
|
|
|
|
Event.start_time < expire_after,
|
2023-05-29 12:31:17 +02:00
|
|
|
Event.label == event.label,
|
2023-06-11 14:18:47 +02:00
|
|
|
Event.retain_indefinitely == False,
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
2024-11-06 18:05:44 +01:00
|
|
|
|
|
|
|
events_to_update = []
|
|
|
|
|
|
|
|
for batch in query.iterator():
|
|
|
|
events_to_update.extend([event.id for event in batch])
|
|
|
|
if len(events_to_update) >= CHUNK_SIZE:
|
|
|
|
logger.debug(
|
|
|
|
f"Updating {update_params} for {len(events_to_update)} events"
|
|
|
|
)
|
|
|
|
Event.update(update_params).where(
|
|
|
|
Event.id << events_to_update
|
|
|
|
).execute()
|
|
|
|
events_to_update = []
|
|
|
|
|
|
|
|
# Update any remaining events
|
|
|
|
if events_to_update:
|
|
|
|
logger.debug(
|
|
|
|
f"Updating clips/snapshots attribute for {len(events_to_update)} events"
|
|
|
|
)
|
|
|
|
Event.update(update_params).where(
|
|
|
|
Event.id << events_to_update
|
|
|
|
).execute()
|
2023-05-19 12:16:11 +02:00
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
events_to_update = []
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
## Expire events from cameras based on the camera config
|
|
|
|
for name, camera in self.config.cameras.items():
|
2023-07-21 14:29:50 +02:00
|
|
|
if media_type == EventCleanupType.clips:
|
2024-09-02 15:22:53 +02:00
|
|
|
expire_days = max(
|
|
|
|
camera.record.alerts.retain.days,
|
|
|
|
camera.record.detections.retain.days,
|
|
|
|
)
|
2023-05-19 12:16:11 +02:00
|
|
|
else:
|
|
|
|
retain_config = camera.snapshots.retain
|
2023-07-21 14:29:50 +02:00
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
# get distinct objects in database for this camera
|
2023-07-21 14:29:50 +02:00
|
|
|
distinct_labels = self.get_camera_labels(name)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
# loop over object types in db
|
2023-05-29 12:31:17 +02:00
|
|
|
for event in distinct_labels:
|
2023-05-19 12:16:11 +02:00
|
|
|
# get expiration time for this label
|
2024-09-02 15:22:53 +02:00
|
|
|
if media_type == EventCleanupType.snapshots:
|
|
|
|
expire_days = retain_config.objects.get(
|
|
|
|
event.label, retain_config.default
|
|
|
|
)
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
expire_after = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
|
|
).timestamp()
|
|
|
|
# grab all events after specific time
|
2023-10-21 17:53:33 +02:00
|
|
|
expired_events = (
|
|
|
|
Event.select(
|
|
|
|
Event.id,
|
|
|
|
Event.camera,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Event.camera == name,
|
|
|
|
Event.start_time < expire_after,
|
|
|
|
Event.label == event.label,
|
|
|
|
Event.retain_indefinitely == False,
|
|
|
|
)
|
|
|
|
.namedtuples()
|
|
|
|
.iterator()
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
2023-07-21 14:29:50 +02:00
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
# delete the grabbed clips from disk
|
2023-07-21 14:29:50 +02:00
|
|
|
# only snapshots are stored in /clips
|
|
|
|
# so no need to delete mp4 files
|
2023-05-19 12:16:11 +02:00
|
|
|
for event in expired_events:
|
2023-07-21 14:29:50 +02:00
|
|
|
events_to_update.append(event.id)
|
|
|
|
|
|
|
|
if media_type == EventCleanupType.snapshots:
|
2024-04-18 18:35:16 +02:00
|
|
|
try:
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
|
|
)
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
media_path = Path(
|
|
|
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
|
|
)
|
|
|
|
media_path.unlink(missing_ok=True)
|
|
|
|
except OSError as e:
|
|
|
|
logger.warning(f"Unable to delete event images: {e}")
|
2023-07-21 14:29:50 +02:00
|
|
|
|
|
|
|
# update the clips attribute for the db entry
|
2024-11-06 18:05:44 +01:00
|
|
|
for i in range(0, len(events_to_update), CHUNK_SIZE):
|
|
|
|
batch = events_to_update[i : i + CHUNK_SIZE]
|
|
|
|
logger.debug(f"Updating {update_params} for {len(batch)} events")
|
|
|
|
Event.update(update_params).where(Event.id << batch).execute()
|
|
|
|
|
2023-07-21 14:29:50 +02:00
|
|
|
return events_to_update
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
def run(self) -> None:
|
|
|
|
# only expire events every 5 minutes
|
|
|
|
while not self.stop_event.wait(300):
|
2023-07-21 14:29:50 +02:00
|
|
|
events_with_expired_clips = self.expire(EventCleanupType.clips)
|
|
|
|
|
|
|
|
# delete timeline entries for events that have expired recordings
|
2024-08-09 23:42:51 +02:00
|
|
|
# delete up to 100,000 at a time
|
|
|
|
max_deletes = 100000
|
|
|
|
deleted_events_list = list(events_with_expired_clips)
|
|
|
|
for i in range(0, len(deleted_events_list), max_deletes):
|
|
|
|
Timeline.delete().where(
|
|
|
|
Timeline.source_id << deleted_events_list[i : i + max_deletes]
|
|
|
|
).execute()
|
2023-07-21 14:29:50 +02:00
|
|
|
|
|
|
|
self.expire(EventCleanupType.snapshots)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
# drop events from db where has_clip and has_snapshot are false
|
2024-06-21 23:30:19 +02:00
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.has_clip == False, Event.has_snapshot == False)
|
|
|
|
.iterator()
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
2024-06-21 23:30:19 +02:00
|
|
|
events_to_delete = [e.id for e in events]
|
2024-11-06 18:05:44 +01:00
|
|
|
logger.debug(f"Found {len(events_to_delete)} events that can be expired")
|
2024-06-21 23:30:19 +02:00
|
|
|
if len(events_to_delete) > 0:
|
2024-11-06 18:05:44 +01:00
|
|
|
for i in range(0, len(events_to_delete), CHUNK_SIZE):
|
|
|
|
chunk = events_to_delete[i : i + CHUNK_SIZE]
|
|
|
|
logger.debug(f"Deleting {len(chunk)} events from the database")
|
2024-06-21 23:30:19 +02:00
|
|
|
Event.delete().where(Event.id << chunk).execute()
|
|
|
|
|
|
|
|
if self.config.semantic_search.enabled:
|
2024-10-11 01:53:11 +02:00
|
|
|
self.db.delete_embeddings_description(event_ids=chunk)
|
|
|
|
self.db.delete_embeddings_thumbnail(event_ids=chunk)
|
2024-10-07 22:30:45 +02:00
|
|
|
logger.debug(f"Deleted {len(events_to_delete)} embeddings")
|
2023-05-19 12:16:11 +02:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
logger.info("Exiting event cleanup...")
|