2020-11-30 04:31:02 +01:00
import datetime
2021-05-21 17:39:14 +02:00
import itertools
2020-11-30 04:31:02 +01:00
import logging
2021-12-11 05:56:29 +01:00
import multiprocessing as mp
2020-11-30 04:31:02 +01:00
import os
2021-12-11 05:56:29 +01:00
import queue
2021-06-07 03:24:36 +02:00
import random
import string
2020-11-30 04:31:02 +01:00
import subprocess as sp
import threading
2021-10-23 23:18:13 +02:00
from collections import defaultdict
2020-11-30 04:31:02 +01:00
from pathlib import Path
import psutil
2021-08-30 13:58:50 +02:00
from peewee import JOIN , DoesNotExist
2021-07-09 22:14:16 +02:00
2021-12-11 20:11:39 +01:00
from frigate . config import RetainModeEnum , FrigateConfig
2022-12-18 00:53:34 +01:00
from frigate . const import CACHE_DIR , MAX_SEGMENT_DURATION , RECORD_DIR
2021-07-09 22:14:16 +02:00
from frigate . models import Event , Recordings
2021-12-11 20:11:39 +01:00
from frigate . util import area
2020-11-30 04:31:02 +01:00
logger = logging . getLogger ( __name__ )
SECONDS_IN_DAY = 60 * 60 * 24
2021-02-17 14:23:32 +01:00
2020-12-01 04:08:47 +01:00
def remove_empty_directories ( directory ) :
2021-02-17 14:23:32 +01:00
# list all directories recursively and sort them by path,
# longest first
paths = sorted (
[ x [ 0 ] for x in os . walk ( RECORD_DIR ) ] ,
key = lambda p : len ( str ( p ) ) ,
reverse = True ,
)
for path in paths :
# don't delete the parent
if path == RECORD_DIR :
continue
if len ( os . listdir ( path ) ) == 0 :
os . rmdir ( path )
2020-12-01 04:08:47 +01:00
2020-11-30 04:31:02 +01:00
class RecordingMaintainer ( threading . Thread ) :
2021-12-11 05:56:29 +01:00
def __init__ (
self , config : FrigateConfig , recordings_info_queue : mp . Queue , stop_event
) :
2020-11-30 04:31:02 +01:00
threading . Thread . __init__ ( self )
2021-02-17 14:23:32 +01:00
self . name = " recording_maint "
2020-11-30 04:31:02 +01:00
self . config = config
2021-12-11 05:56:29 +01:00
self . recordings_info_queue = recordings_info_queue
2020-11-30 04:31:02 +01:00
self . stop_event = stop_event
2021-12-11 05:56:29 +01:00
self . recordings_info = defaultdict ( list )
2021-11-19 14:19:45 +01:00
self . end_time_cache = { }
2020-11-30 04:31:02 +01:00
def move_files ( self ) :
2021-12-11 05:56:29 +01:00
cache_files = sorted (
[
d
for d in os . listdir ( CACHE_DIR )
if os . path . isfile ( os . path . join ( CACHE_DIR , d ) )
and d . endswith ( " .mp4 " )
and not d . startswith ( " clip_ " )
]
)
2020-11-30 04:31:02 +01:00
files_in_use = [ ]
for process in psutil . process_iter ( ) :
try :
2021-02-17 14:23:32 +01:00
if process . name ( ) != " ffmpeg " :
2020-12-24 21:23:59 +01:00
continue
2020-11-30 04:31:02 +01:00
flist = process . open_files ( )
if flist :
for nt in flist :
2021-07-09 22:14:16 +02:00
if nt . path . startswith ( CACHE_DIR ) :
2021-02-17 14:23:32 +01:00
files_in_use . append ( nt . path . split ( " / " ) [ - 1 ] )
2020-11-30 04:31:02 +01:00
except :
continue
2021-10-23 23:18:13 +02:00
# group recordings by camera
grouped_recordings = defaultdict ( list )
for f in cache_files :
2021-07-11 21:34:48 +02:00
# Skip files currently in use
2020-11-30 04:31:02 +01:00
if f in files_in_use :
continue
2021-07-09 22:14:16 +02:00
cache_path = os . path . join ( CACHE_DIR , f )
2021-05-22 05:35:25 +02:00
basename = os . path . splitext ( f ) [ 0 ]
camera , date = basename . rsplit ( " - " , maxsplit = 1 )
start_time = datetime . datetime . strptime ( date , " % Y % m %d % H % M % S " )
2021-10-23 23:18:13 +02:00
grouped_recordings [ camera ] . append (
{
" cache_path " : cache_path ,
" start_time " : start_time ,
}
2021-02-17 14:23:32 +01:00
)
2020-11-30 04:31:02 +01:00
2021-11-19 14:16:29 +01:00
# delete all cached files past the most recent 5
2021-11-19 14:19:45 +01:00
keep_count = 5
2021-11-17 15:57:57 +01:00
for camera in grouped_recordings . keys ( ) :
2022-07-19 14:24:44 +02:00
segment_count = len ( grouped_recordings [ camera ] )
if segment_count > keep_count :
2023-01-31 00:42:53 +01:00
logger . warning (
f " Unable to keep up with recording segments in cache for { camera } . Keeping the { keep_count } most recent segments out of { segment_count } and discarding the rest... "
)
2021-12-11 05:56:29 +01:00
to_remove = grouped_recordings [ camera ] [ : - keep_count ]
2021-11-17 15:57:57 +01:00
for f in to_remove :
2022-07-19 14:24:44 +02:00
cache_path = f [ " cache_path " ]
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
2021-12-11 05:56:29 +01:00
grouped_recordings [ camera ] = grouped_recordings [ camera ] [ - keep_count : ]
2021-11-11 04:12:41 +01:00
2021-10-23 23:18:13 +02:00
for camera , recordings in grouped_recordings . items ( ) :
2021-12-11 05:56:29 +01:00
# clear out all the recording info for old frames
while (
len ( self . recordings_info [ camera ] ) > 0
and self . recordings_info [ camera ] [ 0 ] [ 0 ]
< recordings [ 0 ] [ " start_time " ] . timestamp ( )
) :
self . recordings_info [ camera ] . pop ( 0 )
2021-10-23 23:18:13 +02:00
# get all events with the end time after the start of the oldest cache file
# or with end_time None
events : Event = (
Event . select ( )
. where (
Event . camera == camera ,
( Event . end_time == None )
2021-11-21 16:43:37 +01:00
| ( Event . end_time > = recordings [ 0 ] [ " start_time " ] . timestamp ( ) ) ,
2021-10-23 23:18:13 +02:00
Event . has_clip ,
)
. order_by ( Event . start_time )
)
for r in recordings :
cache_path = r [ " cache_path " ]
start_time = r [ " start_time " ]
# Just delete files if recordings are turned off
if (
not camera in self . config . cameras
or not self . config . cameras [ camera ] . record . enabled
) :
Path ( cache_path ) . unlink ( missing_ok = True )
2021-11-19 14:19:45 +01:00
self . end_time_cache . pop ( cache_path , None )
2021-10-23 23:18:13 +02:00
continue
2020-11-30 04:31:02 +01:00
2021-11-19 14:19:45 +01:00
if cache_path in self . end_time_cache :
2021-11-19 23:56:00 +01:00
end_time , duration = self . end_time_cache [ cache_path ]
2021-10-23 23:18:13 +02:00
else :
2021-11-19 14:19:45 +01:00
ffprobe_cmd = [
" ffprobe " ,
" -v " ,
" error " ,
" -show_entries " ,
" format=duration " ,
" -of " ,
" default=noprint_wrappers=1:nokey=1 " ,
f " { cache_path } " ,
]
p = sp . run ( ffprobe_cmd , capture_output = True )
2022-10-05 02:56:04 +02:00
if p . returncode == 0 and p . stdout . decode ( ) :
2021-11-19 14:19:45 +01:00
duration = float ( p . stdout . decode ( ) . strip ( ) )
2022-11-02 12:37:27 +01:00
else :
duration = - 1
# ensure duration is within expected length
2022-12-18 00:53:34 +01:00
if 0 < duration < MAX_SEGMENT_DURATION :
2021-11-19 23:56:00 +01:00
end_time = start_time + datetime . timedelta ( seconds = duration )
self . end_time_cache [ cache_path ] = ( end_time , duration )
2021-11-19 14:19:45 +01:00
else :
2022-11-02 12:37:27 +01:00
if duration == - 1 :
logger . warning (
2023-02-25 02:13:33 +01:00
f " Failed to probe corrupt segment { cache_path } : { p . returncode } - { p . stderr } "
2022-11-02 12:37:27 +01:00
)
2023-02-25 02:13:33 +01:00
logger . warning (
f " Discarding a corrupt recording segment: { cache_path } "
)
2021-11-19 14:19:45 +01:00
Path ( cache_path ) . unlink ( missing_ok = True )
continue
2021-10-22 14:23:18 +02:00
2021-12-11 16:22:44 +01:00
# if cached file's start_time is earlier than the retain days for the camera
2021-10-23 23:18:13 +02:00
if start_time < = (
(
datetime . datetime . now ( )
- datetime . timedelta (
2021-12-11 16:22:44 +01:00
days = self . config . cameras [ camera ] . record . retain . days
2021-10-23 23:18:13 +02:00
)
)
) :
# if the cached segment overlaps with the events:
overlaps = False
for event in events :
# if the event starts in the future, stop checking events
2021-11-17 14:44:58 +01:00
# and remove this segment
2021-10-23 23:18:13 +02:00
if event . start_time > end_time . timestamp ( ) :
overlaps = False
2021-12-11 05:56:29 +01:00
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
2021-10-23 23:18:13 +02:00
break
# if the event is in progress or ends after the recording starts, keep it
# and stop looking at events
2021-11-21 16:43:37 +01:00
if (
event . end_time is None
or event . end_time > = start_time . timestamp ( )
) :
2021-10-23 23:18:13 +02:00
overlaps = True
break
if overlaps :
2021-12-11 20:11:39 +01:00
record_mode = self . config . cameras [
camera
] . record . events . retain . mode
2021-10-23 23:18:13 +02:00
# move from cache to recordings immediately
self . store_segment (
camera ,
start_time ,
end_time ,
duration ,
cache_path ,
2021-12-11 20:11:39 +01:00
record_mode ,
2021-10-23 23:18:13 +02:00
)
2023-01-31 00:42:53 +01:00
# if it doesn't overlap with an event, go ahead and drop the segment
# if it ends more than the configured pre_capture for the camera
else :
pre_capture = self . config . cameras [
camera
] . record . events . pre_capture
most_recently_processed_frame_time = self . recordings_info [
camera
] [ - 1 ] [ 0 ]
retain_cutoff = most_recently_processed_frame_time - pre_capture
if end_time . timestamp ( ) < retain_cutoff :
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
2021-12-11 16:22:44 +01:00
# else retain days includes this segment
2021-10-23 23:18:13 +02:00
else :
2021-12-11 20:11:39 +01:00
record_mode = self . config . cameras [ camera ] . record . retain . mode
2021-10-23 23:18:13 +02:00
self . store_segment (
2021-12-11 20:11:39 +01:00
camera , start_time , end_time , duration , cache_path , record_mode
2021-10-23 23:18:13 +02:00
)
2021-12-11 20:11:39 +01:00
def segment_stats ( self , camera , start_time , end_time ) :
active_count = 0
motion_count = 0
for frame in self . recordings_info [ camera ] :
# frame is after end time of segment
if frame [ 0 ] > end_time . timestamp ( ) :
break
# frame is before start time of segment
if frame [ 0 ] < start_time . timestamp ( ) :
continue
active_count + = len (
[
o
for o in frame [ 1 ]
2022-02-06 16:56:06 +01:00
if not o [ " false_positive " ] and o [ " motionless_count " ] == 0
2021-12-11 20:11:39 +01:00
]
)
motion_count + = sum ( [ area ( box ) for box in frame [ 2 ] ] )
return ( motion_count , active_count )
def store_segment (
self ,
camera ,
2022-12-11 14:45:32 +01:00
start_time : datetime . datetime ,
end_time : datetime . datetime ,
2021-12-11 20:11:39 +01:00
duration ,
cache_path ,
store_mode : RetainModeEnum ,
) :
motion_count , active_count = self . segment_stats ( camera , start_time , end_time )
# check if the segment shouldn't be stored
if ( store_mode == RetainModeEnum . motion and motion_count == 0 ) or (
store_mode == RetainModeEnum . active_objects and active_count == 0
) :
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
return
2022-12-11 14:45:32 +01:00
directory = os . path . join (
RECORD_DIR ,
2023-01-12 12:53:38 +01:00
start_time . astimezone ( tz = datetime . timezone . utc ) . strftime ( " % Y- % m- %d / % H " ) ,
2022-12-11 14:45:32 +01:00
camera ,
)
2021-10-23 23:18:13 +02:00
if not os . path . exists ( directory ) :
os . makedirs ( directory )
2022-12-11 14:45:32 +01:00
file_name = (
f " { start_time . replace ( tzinfo = datetime . timezone . utc ) . strftime ( ' % M. % S.mp4 ' ) } "
)
2021-10-23 23:18:13 +02:00
file_path = os . path . join ( directory , file_name )
2021-11-09 14:05:21 +01:00
try :
2022-10-02 01:11:29 +02:00
if not os . path . exists ( file_path ) :
start_frame = datetime . datetime . now ( ) . timestamp ( )
2022-12-18 00:53:34 +01:00
# add faststart to kept segments to improve metadata reading
ffmpeg_cmd = [
" ffmpeg " ,
" -y " ,
" -i " ,
cache_path ,
" -c " ,
" copy " ,
" -movflags " ,
" +faststart " ,
file_path ,
]
p = sp . run (
ffmpeg_cmd ,
encoding = " ascii " ,
capture_output = True ,
2022-10-02 01:11:29 +02:00
)
2021-11-09 14:05:21 +01:00
2022-12-18 00:53:34 +01:00
if p . returncode != 0 :
logger . error ( f " Unable to convert { cache_path } to { file_path } " )
logger . error ( p . stderr )
return
else :
logger . debug (
f " Copied { file_path } in { datetime . datetime . now ( ) . timestamp ( ) - start_frame } seconds. "
)
2022-10-09 13:28:26 +02:00
try :
2022-12-18 00:53:34 +01:00
# get the segment size of the cache file
# file without faststart is same size
2022-10-09 13:28:26 +02:00
segment_size = round (
float ( os . path . getsize ( cache_path ) ) / 1000000 , 1
)
except OSError :
segment_size = 0
os . remove ( cache_path )
2022-10-02 01:11:29 +02:00
rand_id = " " . join (
random . choices ( string . ascii_lowercase + string . digits , k = 6 )
)
Recordings . create (
id = f " { start_time . timestamp ( ) } - { rand_id } " ,
camera = camera ,
path = file_path ,
start_time = start_time . timestamp ( ) ,
end_time = end_time . timestamp ( ) ,
duration = duration ,
motion = motion_count ,
# TODO: update this to store list of active objects at some point
objects = active_count ,
2022-10-09 13:28:26 +02:00
segment_size = segment_size ,
2022-10-02 01:11:29 +02:00
)
2021-11-09 14:05:21 +01:00
except Exception as e :
logger . error ( f " Unable to store recording segment { cache_path } " )
Path ( cache_path ) . unlink ( missing_ok = True )
logger . error ( e )
2020-11-30 04:31:02 +01:00
2021-11-19 14:19:45 +01:00
# clear end_time cache
self . end_time_cache . pop ( cache_path , None )
2021-07-11 21:34:48 +02:00
def run ( self ) :
# Check for new files every 5 seconds
2021-10-22 14:23:18 +02:00
wait_time = 5
while not self . stop_event . wait ( wait_time ) :
run_start = datetime . datetime . now ( ) . timestamp ( )
2021-12-11 05:56:29 +01:00
# empty the recordings info queue
while True :
try :
(
camera ,
frame_time ,
current_tracked_objects ,
motion_boxes ,
regions ,
) = self . recordings_info_queue . get ( False )
if self . config . cameras [ camera ] . record . enabled :
self . recordings_info [ camera ] . append (
(
frame_time ,
current_tracked_objects ,
motion_boxes ,
regions ,
)
)
except queue . Empty :
break
2021-11-09 14:05:21 +01:00
try :
self . move_files ( )
except Exception as e :
logger . error (
" Error occurred when attempting to maintain recording cache "
)
logger . error ( e )
2021-11-17 15:57:57 +01:00
duration = datetime . datetime . now ( ) . timestamp ( ) - run_start
wait_time = max ( 0 , 5 - duration )
2021-07-11 21:34:48 +02:00
logger . info ( f " Exiting recording maintenance... " )
class RecordingCleanup ( threading . Thread ) :
def __init__ ( self , config : FrigateConfig , stop_event ) :
threading . Thread . __init__ ( self )
self . name = " recording_cleanup "
self . config = config
self . stop_event = stop_event
2021-08-11 14:39:03 +02:00
def clean_tmp_clips ( self ) :
# delete any clips more than 5 minutes old
for p in Path ( " /tmp/cache " ) . rglob ( " clip_*.mp4 " ) :
logger . debug ( f " Checking tmp clip { p } . " )
if p . stat ( ) . st_mtime < ( datetime . datetime . now ( ) . timestamp ( ) - 60 * 1 ) :
logger . debug ( " Deleting tmp clip. " )
2023-01-27 13:16:40 +01:00
# empty contents of file before unlinking https://github.com/blakeblackshear/frigate/issues/4769
with open ( p , " w " ) :
pass
2021-08-11 14:39:03 +02:00
p . unlink ( missing_ok = True )
2021-07-09 22:14:16 +02:00
def expire_recordings ( self ) :
2021-07-11 06:22:45 +02:00
logger . debug ( " Start expire recordings (new). " )
2021-07-09 22:14:16 +02:00
2021-07-11 06:22:45 +02:00
logger . debug ( " Start deleted cameras. " )
2021-07-09 22:14:16 +02:00
# Handle deleted cameras
2021-12-11 16:22:44 +01:00
expire_days = self . config . record . retain . days
2021-09-01 13:44:05 +02:00
expire_before = (
datetime . datetime . now ( ) - datetime . timedelta ( days = expire_days )
) . timestamp ( )
2021-07-09 22:14:16 +02:00
no_camera_recordings : Recordings = Recordings . select ( ) . where (
Recordings . camera . not_in ( list ( self . config . cameras . keys ( ) ) ) ,
2021-09-01 13:44:05 +02:00
Recordings . end_time < expire_before ,
2021-07-09 22:14:16 +02:00
)
2021-09-18 00:12:03 +02:00
deleted_recordings = set ( )
2021-07-09 22:14:16 +02:00
for recording in no_camera_recordings :
2021-09-01 13:44:05 +02:00
Path ( recording . path ) . unlink ( missing_ok = True )
2021-09-18 00:12:03 +02:00
deleted_recordings . add ( recording . id )
logger . debug ( f " Expiring { len ( deleted_recordings ) } recordings " )
Recordings . delete ( ) . where ( Recordings . id << deleted_recordings ) . execute ( )
2021-07-11 06:22:45 +02:00
logger . debug ( " End deleted cameras. " )
2021-07-09 22:14:16 +02:00
2021-07-11 06:22:45 +02:00
logger . debug ( " Start all cameras. " )
2021-07-09 22:14:16 +02:00
for camera , config in self . config . cameras . items ( ) :
2021-07-11 06:22:45 +02:00
logger . debug ( f " Start camera: { camera } . " )
2022-06-01 13:18:26 +02:00
# Get the timestamp for cutoff of retained days
2021-12-11 16:22:44 +01:00
expire_days = config . record . retain . days
2022-06-01 13:18:26 +02:00
expire_date = (
2021-07-11 06:22:45 +02:00
datetime . datetime . now ( ) - datetime . timedelta ( days = expire_days )
) . timestamp ( )
2021-09-01 13:44:05 +02:00
# Get recordings to check for expiration
2021-09-02 15:24:53 +02:00
recordings : Recordings = (
Recordings . select ( )
. where (
Recordings . camera == camera ,
Recordings . end_time < expire_date ,
)
2021-09-18 00:15:02 +02:00
. order_by ( Recordings . start_time )
2021-07-09 22:14:16 +02:00
)
2021-09-01 13:44:05 +02:00
# Get all the events to check against
2021-09-02 15:24:53 +02:00
events : Event = (
Event . select ( )
. where (
2021-10-15 14:30:55 +02:00
Event . camera == camera ,
# need to ensure segments for all events starting
# before the expire date are included
Event . start_time < expire_date ,
Event . has_clip ,
2021-09-02 15:24:53 +02:00
)
2021-09-18 00:15:02 +02:00
. order_by ( Event . start_time )
2021-09-02 15:24:53 +02:00
. objects ( )
2021-09-01 13:44:05 +02:00
)
# loop over recordings and see if they overlap with any non-expired events
2021-12-11 20:11:39 +01:00
# TODO: expire segments based on segment stats according to config
2021-09-02 15:24:53 +02:00
event_start = 0
2021-09-03 03:40:38 +02:00
deleted_recordings = set ( )
for recording in recordings . objects ( ) . iterator ( ) :
2021-07-11 06:22:45 +02:00
keep = False
2021-09-18 00:15:02 +02:00
# Now look for a reason to keep this recording segment
2021-09-02 15:24:53 +02:00
for idx in range ( event_start , len ( events ) ) :
event = events [ idx ]
2021-09-03 13:31:06 +02:00
2021-09-18 00:15:02 +02:00
# if the event starts in the future, stop checking events
# and let this recording segment expire
if event . start_time > recording . end_time :
keep = False
2021-09-01 13:44:05 +02:00
break
2021-07-11 06:22:45 +02:00
2021-10-23 23:18:13 +02:00
# if the event is in progress or ends after the recording starts, keep it
2021-09-18 00:15:02 +02:00
# and stop looking at events
2021-10-23 23:18:13 +02:00
if event . end_time is None or event . end_time > = recording . start_time :
2021-09-18 00:15:02 +02:00
keep = True
break
2021-09-03 13:31:06 +02:00
2021-09-18 00:15:02 +02:00
# if the event ends before this recording segment starts, skip
# this event and check the next event for an overlap.
# since the events and recordings are sorted, we can skip events
# that end before the previous recording segment started on future segments
if event . end_time < recording . start_time :
event_start = idx
2021-09-02 15:24:53 +02:00
2021-12-11 21:09:30 +01:00
# Delete recordings outside of the retention window or based on the retention mode
if (
not keep
or (
config . record . events . retain . mode == RetainModeEnum . motion
and recording . motion == 0
)
or (
config . record . events . retain . mode
== RetainModeEnum . active_objects
and recording . objects == 0
)
) :
2021-07-09 22:14:16 +02:00
Path ( recording . path ) . unlink ( missing_ok = True )
2021-09-03 03:40:38 +02:00
deleted_recordings . add ( recording . id )
2021-09-03 13:31:06 +02:00
logger . debug ( f " Expiring { len ( deleted_recordings ) } recordings " )
2022-04-27 13:49:59 +02:00
# delete up to 100,000 at a time
max_deletes = 100000
deleted_recordings_list = list ( deleted_recordings )
for i in range ( 0 , len ( deleted_recordings_list ) , max_deletes ) :
Recordings . delete ( ) . where (
Recordings . id << deleted_recordings_list [ i : i + max_deletes ]
) . execute ( )
2021-07-11 06:22:45 +02:00
logger . debug ( f " End camera: { camera } . " )
logger . debug ( " End all cameras. " )
logger . debug ( " End expire recordings (new). " )
2021-07-09 22:14:16 +02:00
2020-11-30 04:31:02 +01:00
def expire_files ( self ) :
2021-07-11 06:22:45 +02:00
logger . debug ( " Start expire files (legacy). " )
2021-08-23 14:21:27 +02:00
2021-07-09 22:14:16 +02:00
default_expire = (
datetime . datetime . now ( ) . timestamp ( )
2021-12-11 16:22:44 +01:00
- SECONDS_IN_DAY * self . config . record . retain . days
2021-07-09 22:14:16 +02:00
)
2020-11-30 04:31:02 +01:00
delete_before = { }
2021-08-24 13:50:04 +02:00
2020-11-30 04:31:02 +01:00
for name , camera in self . config . cameras . items ( ) :
2021-02-17 14:23:32 +01:00
delete_before [ name ] = (
datetime . datetime . now ( ) . timestamp ( )
2021-12-11 16:22:44 +01:00
- SECONDS_IN_DAY * camera . record . retain . days
2021-02-17 14:23:32 +01:00
)
2021-08-23 14:21:27 +02:00
2021-08-24 13:50:04 +02:00
# find all the recordings older than the oldest recording in the db
2021-08-30 13:58:50 +02:00
try :
2021-10-22 03:46:25 +02:00
oldest_recording = Recordings . select ( ) . order_by ( Recordings . start_time ) . get ( )
2021-08-24 13:50:04 +02:00
2021-10-16 04:56:03 +02:00
p = Path ( oldest_recording . path )
oldest_timestamp = p . stat ( ) . st_mtime - 1
2021-08-30 13:58:50 +02:00
except DoesNotExist :
oldest_timestamp = datetime . datetime . now ( ) . timestamp ( )
2021-11-11 04:12:41 +01:00
except FileNotFoundError :
logger . warning ( f " Unable to find file from recordings database: { p } " )
2022-02-02 14:29:45 +01:00
Recordings . delete ( ) . where ( Recordings . id == oldest_recording . id ) . execute ( )
return
2021-08-24 13:50:04 +02:00
logger . debug ( f " Oldest recording in the db: { oldest_timestamp } " )
2021-08-23 14:21:27 +02:00
process = sp . run (
2021-10-22 03:46:25 +02:00
[ " find " , RECORD_DIR , " -type " , " f " , " ! " , " -newermt " , f " @ { oldest_timestamp } " ] ,
2021-08-23 14:21:27 +02:00
capture_output = True ,
text = True ,
)
files_to_check = process . stdout . splitlines ( )
2020-11-30 04:31:02 +01:00
2021-08-23 14:21:27 +02:00
for f in files_to_check :
p = Path ( f )
2021-11-21 14:26:31 +01:00
try :
if p . stat ( ) . st_mtime < delete_before . get ( p . parent . name , default_expire ) :
p . unlink ( missing_ok = True )
except FileNotFoundError :
logger . warning ( f " Attempted to expire missing file: { f } " )
2020-11-30 04:31:02 +01:00
2021-07-11 06:22:45 +02:00
logger . debug ( " End expire files (legacy). " )
2021-11-21 14:55:35 +01:00
def sync_recordings ( self ) :
logger . debug ( " Start sync recordings. " )
# get all recordings in the db
recordings : Recordings = Recordings . select ( )
# get all recordings files on disk
process = sp . run (
[ " find " , RECORD_DIR , " -type " , " f " ] ,
capture_output = True ,
text = True ,
)
files_on_disk = process . stdout . splitlines ( )
recordings_to_delete = [ ]
for recording in recordings . objects ( ) . iterator ( ) :
if not recording . path in files_on_disk :
recordings_to_delete . append ( recording . id )
logger . debug (
f " Deleting { len ( recordings_to_delete ) } recordings with missing files "
)
2022-04-27 13:49:59 +02:00
# delete up to 100,000 at a time
max_deletes = 100000
for i in range ( 0 , len ( recordings_to_delete ) , max_deletes ) :
Recordings . delete ( ) . where (
Recordings . id << recordings_to_delete [ i : i + max_deletes ]
) . execute ( )
2021-11-21 14:55:35 +01:00
logger . debug ( " End sync recordings. " )
2020-11-30 04:31:02 +01:00
def run ( self ) :
2021-12-13 13:51:03 +01:00
# on startup sync recordings with disk (disabled due to too much CPU usage)
# self.sync_recordings()
2021-12-05 18:06:11 +01:00
2021-12-05 18:05:03 +01:00
# Expire tmp clips every minute, recordings and clean directories every hour.
2022-02-05 15:51:18 +01:00
for counter in itertools . cycle ( range ( self . config . record . expire_interval ) ) :
2021-07-11 21:34:48 +02:00
if self . stop_event . wait ( 60 ) :
logger . info ( f " Exiting recording cleanup... " )
2020-11-30 04:31:02 +01:00
break
2021-08-11 14:39:03 +02:00
self . clean_tmp_clips ( )
2021-07-09 22:14:16 +02:00
2021-05-21 17:39:14 +02:00
if counter == 0 :
2021-12-05 18:05:03 +01:00
self . expire_recordings ( )
2020-11-30 04:31:02 +01:00
self . expire_files ( )
2020-12-01 14:22:23 +01:00
remove_empty_directories ( RECORD_DIR )