2023-04-26 15:25:26 +02:00
|
|
|
"""Cleanup recordings that are expired based on retention config."""
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import itertools
|
|
|
|
import logging
|
2024-04-07 22:37:12 +02:00
|
|
|
import os
|
2023-04-26 15:25:26 +02:00
|
|
|
import threading
|
2023-05-29 12:31:17 +02:00
|
|
|
from multiprocessing.synchronize import Event as MpEvent
|
2023-04-26 15:25:26 +02:00
|
|
|
from pathlib import Path
|
|
|
|
|
2024-04-07 22:37:12 +02:00
|
|
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
|
|
|
|
2023-12-03 15:16:01 +01:00
|
|
|
from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
|
2024-06-09 20:45:26 +02:00
|
|
|
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR
|
2024-09-02 15:22:53 +02:00
|
|
|
from frigate.models import Previews, Recordings, ReviewSegment
|
2023-11-04 03:21:29 +01:00
|
|
|
from frigate.record.util import remove_empty_directories, sync_recordings
|
2023-11-08 00:21:43 +01:00
|
|
|
from frigate.util.builtin import clear_and_unlink, get_tomorrow_at_time
|
2023-04-26 15:25:26 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class RecordingCleanup(threading.Thread):
|
|
|
|
"""Cleanup existing recordings based on retention config."""
|
|
|
|
|
|
|
|
def __init__(self, config: FrigateConfig, stop_event: MpEvent) -> None:
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.name = "recording_cleanup"
|
|
|
|
self.config = config
|
|
|
|
self.stop_event = stop_event
|
|
|
|
|
2024-06-09 20:45:26 +02:00
|
|
|
def clean_tmp_previews(self) -> None:
|
|
|
|
"""delete any previews in the cache that are more than 1 hour old."""
|
|
|
|
for p in Path(CACHE_DIR).rglob("preview_*.mp4"):
|
|
|
|
logger.debug(f"Checking preview {p}.")
|
|
|
|
if p.stat().st_mtime < (datetime.datetime.now().timestamp() - 60 * 60):
|
|
|
|
logger.debug("Deleting preview.")
|
|
|
|
clear_and_unlink(p)
|
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
def clean_tmp_clips(self) -> None:
|
2024-06-09 20:45:26 +02:00
|
|
|
"""delete any clips in the cache that are more than 1 hour old."""
|
|
|
|
for p in Path(os.path.join(CLIPS_DIR, "cache")).rglob("clip_*.mp4"):
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.debug(f"Checking tmp clip {p}.")
|
2024-06-09 20:45:26 +02:00
|
|
|
if p.stat().st_mtime < (datetime.datetime.now().timestamp() - 60 * 60):
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.debug("Deleting tmp clip.")
|
2023-11-08 00:21:43 +01:00
|
|
|
clear_and_unlink(p)
|
2023-04-26 15:25:26 +02:00
|
|
|
|
2024-04-07 22:37:12 +02:00
|
|
|
def truncate_wal(self) -> None:
|
|
|
|
"""check if the WAL needs to be manually truncated."""
|
|
|
|
|
|
|
|
# by default the WAL should be check-pointed automatically
|
|
|
|
# however, high levels of activity can prevent an opportunity
|
|
|
|
# for the checkpoint to be finished which means the WAL will grow
|
|
|
|
# without bound
|
|
|
|
|
|
|
|
# with auto checkpoint most users should never hit this
|
|
|
|
|
|
|
|
if (
|
|
|
|
os.stat(f"{self.config.database.path}-wal").st_size / (1024 * 1024)
|
|
|
|
) > MAX_WAL_SIZE:
|
|
|
|
db = SqliteExtDatabase(self.config.database.path)
|
|
|
|
db.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);")
|
|
|
|
db.close()
|
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
def expire_review_segments(self, config: CameraConfig, now: datetime) -> None:
|
|
|
|
"""Delete review segments that are expired"""
|
|
|
|
alert_expire_date = (
|
|
|
|
now - datetime.timedelta(days=config.record.alerts.retain.days)
|
|
|
|
).timestamp()
|
|
|
|
detection_expire_date = (
|
|
|
|
now - datetime.timedelta(days=config.record.detections.retain.days)
|
|
|
|
).timestamp()
|
|
|
|
expired_reviews: ReviewSegment = (
|
|
|
|
ReviewSegment.select(ReviewSegment.id)
|
2024-09-03 18:14:30 +02:00
|
|
|
.where(ReviewSegment.camera == config.name)
|
2024-09-02 15:22:53 +02:00
|
|
|
.where(
|
2024-09-03 18:14:30 +02:00
|
|
|
(
|
|
|
|
ReviewSegment.severity
|
|
|
|
== "alert" & ReviewSegment.end_time
|
|
|
|
< alert_expire_date
|
|
|
|
)
|
|
|
|
| (
|
|
|
|
ReviewSegment.severity
|
|
|
|
== "detection" & ReviewSegment.end_time
|
|
|
|
< detection_expire_date
|
2024-09-02 15:22:53 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
.namedtuples()
|
|
|
|
)
|
|
|
|
|
|
|
|
max_deletes = 100000
|
|
|
|
deleted_reviews_list = list(map(lambda x: x[0], expired_reviews))
|
|
|
|
for i in range(0, len(deleted_reviews_list), max_deletes):
|
|
|
|
ReviewSegment.delete().where(
|
|
|
|
ReviewSegment.id << deleted_reviews_list[i : i + max_deletes]
|
|
|
|
).execute()
|
|
|
|
|
2023-12-03 15:16:01 +01:00
|
|
|
def expire_existing_camera_recordings(
|
2024-09-02 15:22:53 +02:00
|
|
|
self, expire_date: float, config: CameraConfig, reviews: ReviewSegment
|
2023-12-03 15:16:01 +01:00
|
|
|
) -> None:
|
|
|
|
"""Delete recordings for existing camera based on retention config."""
|
|
|
|
# Get the timestamp for cutoff of retained days
|
|
|
|
|
|
|
|
# Get recordings to check for expiration
|
|
|
|
recordings: Recordings = (
|
|
|
|
Recordings.select(
|
|
|
|
Recordings.id,
|
|
|
|
Recordings.start_time,
|
|
|
|
Recordings.end_time,
|
|
|
|
Recordings.path,
|
|
|
|
Recordings.objects,
|
|
|
|
Recordings.motion,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Recordings.camera == config.name,
|
|
|
|
Recordings.end_time < expire_date,
|
|
|
|
)
|
|
|
|
.order_by(Recordings.start_time)
|
|
|
|
.namedtuples()
|
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
# loop over recordings and see if they overlap with any non-expired reviews
|
2023-12-03 15:16:01 +01:00
|
|
|
# TODO: expire segments based on segment stats according to config
|
2024-09-02 15:22:53 +02:00
|
|
|
review_start = 0
|
2023-12-03 15:16:01 +01:00
|
|
|
deleted_recordings = set()
|
|
|
|
kept_recordings: list[tuple[float, float]] = []
|
|
|
|
for recording in recordings:
|
|
|
|
keep = False
|
2024-09-02 15:22:53 +02:00
|
|
|
mode = None
|
2023-12-03 15:16:01 +01:00
|
|
|
# Now look for a reason to keep this recording segment
|
2024-09-02 15:22:53 +02:00
|
|
|
for idx in range(review_start, len(reviews)):
|
|
|
|
review: ReviewSegment = reviews[idx]
|
2023-12-03 15:16:01 +01:00
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
# if the review starts in the future, stop checking reviews
|
2023-12-03 15:16:01 +01:00
|
|
|
# and let this recording segment expire
|
2024-09-02 15:22:53 +02:00
|
|
|
if review.start_time > recording.end_time:
|
2023-12-03 15:16:01 +01:00
|
|
|
keep = False
|
|
|
|
break
|
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
# if the review is in progress or ends after the recording starts, keep it
|
|
|
|
# and stop looking at reviews
|
|
|
|
if review.end_time is None or review.end_time >= recording.start_time:
|
2023-12-03 15:16:01 +01:00
|
|
|
keep = True
|
2024-09-02 15:22:53 +02:00
|
|
|
mode = (
|
|
|
|
config.record.alerts.retain.mode
|
|
|
|
if review.severity == "alert"
|
|
|
|
else config.record.detections.retain.mode
|
|
|
|
)
|
2023-12-03 15:16:01 +01:00
|
|
|
break
|
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
# if the review ends before this recording segment starts, skip
|
|
|
|
# this review and check the next review for an overlap.
|
|
|
|
# since the review and recordings are sorted, we can skip review
|
2023-12-03 15:16:01 +01:00
|
|
|
# that end before the previous recording segment started on future segments
|
2024-09-02 15:22:53 +02:00
|
|
|
if review.end_time < recording.start_time:
|
|
|
|
review_start = idx
|
2023-12-03 15:16:01 +01:00
|
|
|
|
|
|
|
# Delete recordings outside of the retention window or based on the retention mode
|
|
|
|
if (
|
|
|
|
not keep
|
2024-09-02 15:22:53 +02:00
|
|
|
or (mode == RetainModeEnum.motion and recording.motion == 0)
|
|
|
|
or (mode == RetainModeEnum.active_objects and recording.objects == 0)
|
2023-12-03 15:16:01 +01:00
|
|
|
):
|
|
|
|
Path(recording.path).unlink(missing_ok=True)
|
|
|
|
deleted_recordings.add(recording.id)
|
|
|
|
else:
|
|
|
|
kept_recordings.append((recording.start_time, recording.end_time))
|
|
|
|
|
|
|
|
# expire recordings
|
|
|
|
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
|
|
|
|
# delete up to 100,000 at a time
|
|
|
|
max_deletes = 100000
|
|
|
|
deleted_recordings_list = list(deleted_recordings)
|
|
|
|
for i in range(0, len(deleted_recordings_list), max_deletes):
|
|
|
|
Recordings.delete().where(
|
|
|
|
Recordings.id << deleted_recordings_list[i : i + max_deletes]
|
|
|
|
).execute()
|
|
|
|
|
|
|
|
previews: Previews = (
|
|
|
|
Previews.select(
|
|
|
|
Previews.id,
|
|
|
|
Previews.start_time,
|
|
|
|
Previews.end_time,
|
|
|
|
Previews.path,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Previews.camera == config.name,
|
|
|
|
Previews.end_time < expire_date,
|
|
|
|
)
|
|
|
|
.order_by(Previews.start_time)
|
|
|
|
.namedtuples()
|
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
|
|
|
|
# expire previews
|
|
|
|
recording_start = 0
|
|
|
|
deleted_previews = set()
|
|
|
|
for preview in previews:
|
|
|
|
keep = False
|
|
|
|
# look for a reason to keep this preview
|
|
|
|
for idx in range(recording_start, len(kept_recordings)):
|
|
|
|
start_time, end_time = kept_recordings[idx]
|
|
|
|
|
|
|
|
# if the recording starts in the future, stop checking recordings
|
|
|
|
# and let this preview expire
|
|
|
|
if start_time > preview.end_time:
|
|
|
|
keep = False
|
|
|
|
break
|
|
|
|
|
|
|
|
# if the recording ends after the preview starts, keep it
|
|
|
|
# and stop looking at recordings
|
|
|
|
if end_time >= preview.start_time:
|
|
|
|
keep = True
|
|
|
|
break
|
|
|
|
|
|
|
|
# if the recording ends before this preview starts, skip
|
|
|
|
# this recording and check the next recording for an overlap.
|
|
|
|
# since the kept recordings and previews are sorted, we can skip recordings
|
|
|
|
# that end before the current preview started
|
|
|
|
if end_time < preview.start_time:
|
|
|
|
recording_start = idx
|
|
|
|
|
|
|
|
# Delete previews without any relevant recordings
|
|
|
|
if not keep:
|
|
|
|
Path(preview.path).unlink(missing_ok=True)
|
|
|
|
deleted_previews.add(preview.id)
|
|
|
|
|
|
|
|
# expire previews
|
|
|
|
logger.debug(f"Expiring {len(deleted_previews)} previews")
|
|
|
|
# delete up to 100,000 at a time
|
|
|
|
max_deletes = 100000
|
|
|
|
deleted_previews_list = list(deleted_previews)
|
|
|
|
for i in range(0, len(deleted_previews_list), max_deletes):
|
|
|
|
Previews.delete().where(
|
|
|
|
Previews.id << deleted_previews_list[i : i + max_deletes]
|
|
|
|
).execute()
|
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
def expire_recordings(self) -> None:
|
2023-06-30 14:13:00 +02:00
|
|
|
"""Delete recordings based on retention config."""
|
|
|
|
logger.debug("Start expire recordings.")
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.debug("Start deleted cameras.")
|
2023-12-03 15:16:01 +01:00
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
# Handle deleted cameras
|
|
|
|
expire_days = self.config.record.retain.days
|
|
|
|
expire_before = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
|
|
).timestamp()
|
2023-10-21 17:53:33 +02:00
|
|
|
no_camera_recordings: Recordings = (
|
|
|
|
Recordings.select(
|
|
|
|
Recordings.id,
|
|
|
|
Recordings.path,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Recordings.camera.not_in(list(self.config.cameras.keys())),
|
|
|
|
Recordings.end_time < expire_before,
|
|
|
|
)
|
|
|
|
.namedtuples()
|
|
|
|
.iterator()
|
2023-04-26 15:25:26 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
deleted_recordings = set()
|
|
|
|
for recording in no_camera_recordings:
|
|
|
|
Path(recording.path).unlink(missing_ok=True)
|
|
|
|
deleted_recordings.add(recording.id)
|
|
|
|
|
|
|
|
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
|
2023-07-28 12:49:52 +02:00
|
|
|
# delete up to 100,000 at a time
|
|
|
|
max_deletes = 100000
|
|
|
|
deleted_recordings_list = list(deleted_recordings)
|
|
|
|
for i in range(0, len(deleted_recordings_list), max_deletes):
|
|
|
|
Recordings.delete().where(
|
|
|
|
Recordings.id << deleted_recordings_list[i : i + max_deletes]
|
|
|
|
).execute()
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.debug("End deleted cameras.")
|
|
|
|
|
|
|
|
logger.debug("Start all cameras.")
|
|
|
|
for camera, config in self.config.cameras.items():
|
|
|
|
logger.debug(f"Start camera: {camera}.")
|
2024-09-02 15:22:53 +02:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
|
|
|
|
self.expire_review_segments(config, now)
|
2023-12-03 15:16:01 +01:00
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
expire_days = config.record.retain.days
|
2024-09-02 15:22:53 +02:00
|
|
|
expire_date = (now - datetime.timedelta(days=expire_days)).timestamp()
|
|
|
|
|
|
|
|
# Get all the reviews to check against
|
|
|
|
reviews: ReviewSegment = (
|
|
|
|
ReviewSegment.select(
|
|
|
|
ReviewSegment.start_time,
|
|
|
|
ReviewSegment.end_time,
|
|
|
|
ReviewSegment.severity,
|
2023-09-12 00:07:04 +02:00
|
|
|
)
|
2023-04-26 15:25:26 +02:00
|
|
|
.where(
|
2024-09-02 15:22:53 +02:00
|
|
|
ReviewSegment.camera == camera,
|
|
|
|
# need to ensure segments for all reviews starting
|
2023-04-26 15:25:26 +02:00
|
|
|
# before the expire date are included
|
2024-09-02 15:22:53 +02:00
|
|
|
ReviewSegment.start_time < expire_date,
|
2023-04-26 15:25:26 +02:00
|
|
|
)
|
2024-09-02 15:22:53 +02:00
|
|
|
.order_by(ReviewSegment.start_time)
|
2023-10-21 17:53:33 +02:00
|
|
|
.namedtuples()
|
2023-04-26 15:25:26 +02:00
|
|
|
)
|
|
|
|
|
2024-09-02 15:22:53 +02:00
|
|
|
self.expire_existing_camera_recordings(expire_date, config, reviews)
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.debug(f"End camera: {camera}.")
|
|
|
|
|
|
|
|
logger.debug("End all cameras.")
|
2023-06-30 14:13:00 +02:00
|
|
|
logger.debug("End expire recordings.")
|
2023-04-26 15:25:26 +02:00
|
|
|
|
|
|
|
def run(self) -> None:
|
2023-07-15 15:38:21 +02:00
|
|
|
# on startup sync recordings with disk if enabled
|
2023-11-18 13:06:00 +01:00
|
|
|
if self.config.record.sync_recordings:
|
2023-11-04 03:21:29 +01:00
|
|
|
sync_recordings(limited=False)
|
2023-11-18 13:06:00 +01:00
|
|
|
next_sync = get_tomorrow_at_time(3)
|
2023-04-26 15:25:26 +02:00
|
|
|
|
|
|
|
# Expire tmp clips every minute, recordings and clean directories every hour.
|
|
|
|
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
|
|
|
if self.stop_event.wait(60):
|
2023-05-29 12:31:17 +02:00
|
|
|
logger.info("Exiting recording cleanup...")
|
2023-04-26 15:25:26 +02:00
|
|
|
break
|
2023-11-04 03:21:29 +01:00
|
|
|
|
2024-06-09 20:45:26 +02:00
|
|
|
self.clean_tmp_previews()
|
2023-04-26 15:25:26 +02:00
|
|
|
|
2023-11-18 13:06:00 +01:00
|
|
|
if (
|
|
|
|
self.config.record.sync_recordings
|
|
|
|
and datetime.datetime.now().astimezone(datetime.timezone.utc)
|
|
|
|
> next_sync
|
|
|
|
):
|
2023-11-04 03:21:29 +01:00
|
|
|
sync_recordings(limited=True)
|
|
|
|
next_sync = get_tomorrow_at_time(3)
|
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
if counter == 0:
|
2024-06-09 20:45:26 +02:00
|
|
|
self.clean_tmp_clips()
|
2023-04-26 15:25:26 +02:00
|
|
|
self.expire_recordings()
|
|
|
|
remove_empty_directories(RECORD_DIR)
|
2024-04-07 22:37:12 +02:00
|
|
|
self.truncate_wal()
|