blakeblackshear.frigate/frigate/events.py

414 lines
15 KiB
Python
Raw Normal View History

2020-11-04 13:31:25 +01:00
import datetime
import json
import logging
2020-07-09 13:57:16 +02:00
import os
2020-11-04 13:31:25 +01:00
import queue
import subprocess as sp
2020-07-09 13:57:16 +02:00
import threading
2020-11-04 13:31:25 +01:00
import time
2020-07-09 13:57:16 +02:00
from collections import defaultdict
2020-11-24 14:27:51 +01:00
from pathlib import Path
2020-11-04 13:31:25 +01:00
import psutil
import shutil
2020-07-09 13:57:16 +02:00
2020-11-24 14:27:51 +01:00
from frigate.config import FrigateConfig
2020-12-01 14:22:23 +01:00
from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
2020-11-01 21:58:51 +01:00
from frigate.models import Event
2020-11-24 15:09:16 +01:00
from peewee import fn
2020-11-04 04:26:39 +01:00
logger = logging.getLogger(__name__)
2021-02-17 14:23:32 +01:00
2020-07-09 13:57:16 +02:00
class EventProcessor(threading.Thread):
2021-02-17 14:23:32 +01:00
def __init__(
self, config, camera_processes, event_queue, event_processed_queue, stop_event
):
2020-07-09 13:57:16 +02:00
threading.Thread.__init__(self)
2021-02-17 14:23:32 +01:00
self.name = "event_processor"
2020-07-09 13:57:16 +02:00
self.config = config
self.camera_processes = camera_processes
self.cached_clips = {}
self.event_queue = event_queue
2020-11-25 17:37:41 +01:00
self.event_processed_queue = event_processed_queue
2020-07-09 13:57:16 +02:00
self.events_in_process = {}
2020-08-02 15:46:36 +02:00
self.stop_event = stop_event
def should_create_clip(self, camera, event_data):
2021-02-17 14:23:32 +01:00
if event_data["false_positive"]:
return False
2021-02-17 14:23:32 +01:00
# if there are required zones and there is no overlap
required_zones = self.config.cameras[camera].clips.required_zones
2021-02-17 14:23:32 +01:00
if len(required_zones) > 0 and not set(event_data["entered_zones"]) & set(
required_zones
):
logger.debug(
f"Not creating clip for {event_data['id']} because it did not enter required zones"
)
return False
return True
2021-02-17 14:23:32 +01:00
2020-07-09 13:57:16 +02:00
def refresh_cache(self):
2020-12-01 14:22:23 +01:00
cached_files = os.listdir(CACHE_DIR)
2020-07-09 13:57:16 +02:00
files_in_use = []
2020-11-29 22:55:53 +01:00
for process in psutil.process_iter():
2020-07-09 13:57:16 +02:00
try:
2021-02-17 14:23:32 +01:00
if process.name() != "ffmpeg":
2020-12-07 14:16:48 +01:00
continue
2020-11-29 22:55:53 +01:00
flist = process.open_files()
2020-07-09 13:57:16 +02:00
if flist:
for nt in flist:
2020-12-01 14:22:23 +01:00
if nt.path.startswith(CACHE_DIR):
2021-02-17 14:23:32 +01:00
files_in_use.append(nt.path.split("/")[-1])
2020-07-09 13:57:16 +02:00
except:
continue
for f in cached_files:
if f in files_in_use or f in self.cached_clips:
continue
basename = os.path.splitext(f)[0]
camera, date = basename.rsplit("-", maxsplit=1)
start_time = datetime.datetime.strptime(date, "%Y%m%d%H%M%S")
ffprobe_cmd = [
"ffprobe",
"-v",
"error",
"-show_entries",
"format=duration",
"-of",
"default=noprint_wrappers=1:nokey=1",
f"{os.path.join(CACHE_DIR, f)}",
]
p = sp.run(ffprobe_cmd, capture_output=True)
if p.returncode == 0:
duration = float(p.stdout.decode().strip())
2020-07-09 13:57:16 +02:00
else:
2020-11-04 04:26:39 +01:00
logger.info(f"bad file: {f}")
2021-02-17 14:23:32 +01:00
os.remove(os.path.join(CACHE_DIR, f))
2020-07-09 13:57:16 +02:00
continue
self.cached_clips[f] = {
2021-02-17 14:23:32 +01:00
"path": f,
"camera": camera,
"start_time": start_time.timestamp(),
"duration": duration,
2020-07-09 13:57:16 +02:00
}
if len(self.events_in_process) > 0:
2021-02-17 14:23:32 +01:00
earliest_event = min(
self.events_in_process.values(), key=lambda x: x["start_time"]
)["start_time"]
2020-07-09 13:57:16 +02:00
else:
earliest_event = datetime.datetime.now().timestamp()
# if the earliest event is more tha max seconds ago, cap it
max_seconds = self.config.clips.max_seconds
earliest_event = max(
2021-05-23 15:16:41 +02:00
earliest_event,
datetime.datetime.now().timestamp() - self.config.clips.max_seconds,
)
2021-02-17 14:23:32 +01:00
2020-07-09 13:57:16 +02:00
for f, data in list(self.cached_clips.items()):
2021-02-17 14:23:32 +01:00
if earliest_event - 90 > data["start_time"] + data["duration"]:
2020-07-09 13:57:16 +02:00
del self.cached_clips[f]
2021-01-23 00:54:30 +01:00
logger.debug(f"Cleaning up cached file {f}")
2021-02-17 14:23:32 +01:00
os.remove(os.path.join(CACHE_DIR, f))
# if we are still using more than 90% of the cache, proactively cleanup
cache_usage = shutil.disk_usage("/tmp/cache")
2021-02-17 14:23:32 +01:00
if (
cache_usage.used / cache_usage.total > 0.9
and cache_usage.free < 200000000
and len(self.cached_clips) > 0
):
logger.warning("More than 90% of the cache is used.")
2021-02-17 14:23:32 +01:00
logger.warning(
"Consider increasing space available at /tmp/cache or reducing max_seconds in your clips config."
)
logger.warning("Proactively cleaning up the cache...")
2021-02-17 14:23:32 +01:00
while cache_usage.used / cache_usage.total > 0.9:
oldest_clip = min(
self.cached_clips.values(), key=lambda x: x["start_time"]
)
del self.cached_clips[oldest_clip["path"]]
os.remove(os.path.join(CACHE_DIR, oldest_clip["path"]))
cache_usage = shutil.disk_usage("/tmp/cache")
2020-07-09 13:57:16 +02:00
2020-12-19 16:06:06 +01:00
def create_clip(self, camera, event_data, pre_capture, post_capture):
2020-07-09 13:57:16 +02:00
# get all clips from the camera with the event sorted
2021-02-17 14:23:32 +01:00
sorted_clips = sorted(
[c for c in self.cached_clips.values() if c["camera"] == camera],
key=lambda i: i["start_time"],
)
2020-07-09 13:57:16 +02:00
2021-01-22 00:26:53 +01:00
# if there are no clips in the cache or we are still waiting on a needed file check every 5 seconds
wait_count = 0
2021-02-17 14:23:32 +01:00
while (
len(sorted_clips) == 0
or sorted_clips[-1]["start_time"] + sorted_clips[-1]["duration"]
< event_data["end_time"] + post_capture
):
2021-01-22 00:26:53 +01:00
if wait_count > 4:
2021-02-17 14:23:32 +01:00
logger.warning(
f"Unable to create clip for {camera} and event {event_data['id']}. There were no cache files for this event."
)
2021-01-22 00:26:53 +01:00
return False
logger.debug(f"No cache clips for {camera}. Waiting...")
2020-07-09 13:57:16 +02:00
time.sleep(5)
self.refresh_cache()
# get all clips from the camera with the event sorted
2021-02-17 14:23:32 +01:00
sorted_clips = sorted(
[c for c in self.cached_clips.values() if c["camera"] == camera],
key=lambda i: i["start_time"],
)
2021-01-22 00:26:53 +01:00
wait_count += 1
2021-02-17 14:23:32 +01:00
playlist_start = event_data["start_time"] - pre_capture
playlist_end = event_data["end_time"] + post_capture
2020-07-09 13:57:16 +02:00
playlist_lines = []
for clip in sorted_clips:
# clip ends before playlist start time, skip
2021-02-17 14:23:32 +01:00
if clip["start_time"] + clip["duration"] < playlist_start:
2020-07-09 13:57:16 +02:00
continue
# clip starts after playlist ends, finish
2021-02-17 14:23:32 +01:00
if clip["start_time"] > playlist_end:
2020-07-09 13:57:16 +02:00
break
2020-12-01 14:22:23 +01:00
playlist_lines.append(f"file '{os.path.join(CACHE_DIR,clip['path'])}'")
2020-07-09 13:57:16 +02:00
# if this is the starting clip, add an inpoint
2021-02-17 14:23:32 +01:00
if clip["start_time"] < playlist_start:
playlist_lines.append(
f"inpoint {int(playlist_start-clip['start_time'])}"
)
2020-07-09 13:57:16 +02:00
# if this is the ending clip, add an outpoint
2021-02-17 14:23:32 +01:00
if clip["start_time"] + clip["duration"] > playlist_end:
playlist_lines.append(
f"outpoint {int(playlist_end-clip['start_time'])}"
)
2020-07-09 13:57:16 +02:00
clip_name = f"{camera}-{event_data['id']}"
ffmpeg_cmd = [
2021-02-17 14:23:32 +01:00
"ffmpeg",
"-y",
"-protocol_whitelist",
"pipe,file",
"-f",
"concat",
"-safe",
"0",
"-i",
"-",
"-c",
"copy",
"-movflags",
"+faststart",
f"{os.path.join(CLIPS_DIR, clip_name)}.mp4",
2020-07-09 13:57:16 +02:00
]
2021-02-17 14:23:32 +01:00
p = sp.run(
ffmpeg_cmd,
input="\n".join(playlist_lines),
encoding="ascii",
capture_output=True,
)
2020-07-09 13:57:16 +02:00
if p.returncode != 0:
2020-11-04 04:26:39 +01:00
logger.error(p.stderr)
return False
return True
2020-07-09 13:57:16 +02:00
def run(self):
while not self.stop_event.is_set():
2020-07-09 13:57:16 +02:00
try:
event_type, camera, event_data = self.event_queue.get(timeout=10)
except queue.Empty:
2020-08-08 14:39:57 +02:00
if not self.stop_event.is_set():
self.refresh_cache()
2020-07-09 13:57:16 +02:00
continue
logger.debug(f"Event received: {event_type} {camera} {event_data['id']}")
2020-07-09 13:57:16 +02:00
self.refresh_cache()
2021-02-17 14:23:32 +01:00
if event_type == "start":
self.events_in_process[event_data["id"]] = event_data
2020-07-09 13:57:16 +02:00
2021-02-17 14:23:32 +01:00
if event_type == "end":
clips_config = self.config.cameras[camera].clips
clip_created = False
if self.should_create_clip(camera, event_data):
2021-02-17 14:23:32 +01:00
if clips_config.enabled and (
clips_config.objects is None
or event_data["label"] in clips_config.objects
):
clip_created = self.create_clip(
camera,
event_data,
clips_config.pre_capture,
clips_config.post_capture,
)
if clip_created or event_data["has_snapshot"]:
Event.create(
2021-02-17 14:23:32 +01:00
id=event_data["id"],
label=event_data["label"],
camera=camera,
2021-02-17 14:23:32 +01:00
start_time=event_data["start_time"],
end_time=event_data["end_time"],
top_score=event_data["top_score"],
false_positive=event_data["false_positive"],
zones=list(event_data["entered_zones"]),
thumbnail=event_data["thumbnail"],
has_clip=clip_created,
2021-02-17 14:23:32 +01:00
has_snapshot=event_data["has_snapshot"],
)
2021-02-17 14:23:32 +01:00
del self.events_in_process[event_data["id"]]
2021-06-15 01:16:41 +02:00
self.event_processed_queue.put((event_data["id"], camera, clip_created))
2021-02-17 14:23:32 +01:00
logger.info(f"Exiting event processor...")
2020-07-09 13:57:16 +02:00
2020-11-24 14:27:51 +01:00
class EventCleanup(threading.Thread):
def __init__(self, config: FrigateConfig, stop_event):
threading.Thread.__init__(self)
2021-02-17 14:23:32 +01:00
self.name = "event_cleanup"
2020-11-24 14:27:51 +01:00
self.config = config
self.stop_event = stop_event
2021-01-13 13:49:05 +01:00
self.camera_keys = list(self.config.cameras.keys())
2020-11-24 14:27:51 +01:00
2021-01-13 13:49:05 +01:00
def expire(self, media):
## Expire events from unlisted cameras based on the global config
2021-02-17 14:23:32 +01:00
if media == "clips":
2020-12-23 14:16:37 +01:00
retain_config = self.config.clips.retain
2021-02-17 14:23:32 +01:00
file_extension = "mp4"
update_params = {"has_clip": False}
2021-01-13 13:49:05 +01:00
else:
retain_config = self.config.snapshots.retain
2021-02-17 14:23:32 +01:00
file_extension = "jpg"
update_params = {"has_snapshot": False}
distinct_labels = (
Event.select(Event.label)
.where(Event.camera.not_in(self.camera_keys))
.distinct()
)
2021-01-13 13:49:05 +01:00
# loop over object types in db
for l in distinct_labels:
# get expiration time for this label
expire_days = retain_config.objects.get(l.label, retain_config.default)
2021-02-17 14:23:32 +01:00
expire_after = (
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
2021-01-13 13:49:05 +01:00
# grab all events after specific time
2021-02-17 14:23:32 +01:00
expired_events = Event.select().where(
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.label == l.label,
2021-01-13 13:49:05 +01:00
)
# delete the media from disk
for event in expired_events:
media_name = f"{event.camera}-{event.id}"
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}")
media.unlink(missing_ok=True)
# update the clips attribute for the db entry
2021-02-17 14:23:32 +01:00
update_query = Event.update(update_params).where(
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.label == l.label,
2021-01-13 13:49:05 +01:00
)
update_query.execute()
## Expire events from cameras based on the camera config
for name, camera in self.config.cameras.items():
2021-02-17 14:23:32 +01:00
if media == "clips":
2021-01-13 13:49:05 +01:00
retain_config = camera.clips.retain
else:
retain_config = camera.snapshots.retain
# get distinct objects in database for this camera
2021-02-17 14:23:32 +01:00
distinct_labels = (
Event.select(Event.label).where(Event.camera == name).distinct()
)
2021-01-13 13:49:05 +01:00
2020-11-24 15:09:16 +01:00
# loop over object types in db
for l in distinct_labels:
# get expiration time for this label
expire_days = retain_config.objects.get(l.label, retain_config.default)
2021-02-17 14:23:32 +01:00
expire_after = (
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
2020-11-24 15:09:16 +01:00
# grab all events after specific time
2021-02-17 14:23:32 +01:00
expired_events = Event.select().where(
Event.camera == name,
Event.start_time < expire_after,
Event.label == l.label,
2020-11-24 15:09:16 +01:00
)
# delete the grabbed clips from disk
for event in expired_events:
2021-01-13 13:49:05 +01:00
media_name = f"{event.camera}-{event.id}"
2021-02-17 14:23:32 +01:00
media = Path(
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
)
2021-01-13 13:49:05 +01:00
media.unlink(missing_ok=True)
# update the clips attribute for the db entry
2021-02-17 14:23:32 +01:00
update_query = Event.update(update_params).where(
Event.camera == name,
Event.start_time < expire_after,
Event.label == l.label,
2020-11-24 15:09:16 +01:00
)
2021-01-13 13:49:05 +01:00
update_query.execute()
2021-02-07 15:38:17 +01:00
def purge_duplicates(self):
duplicate_query = """with grouped_events as (
select id,
2021-02-17 14:23:32 +01:00
label,
camera,
2021-02-07 15:38:17 +01:00
has_snapshot,
has_clip,
row_number() over (
partition by label, camera, round(start_time/5,0)*5
order by end_time-start_time desc
) as copy_number
from event
)
2021-02-17 14:23:32 +01:00
select distinct id, camera, has_snapshot, has_clip from grouped_events
2021-02-07 15:38:17 +01:00
where copy_number > 1;"""
duplicate_events = Event.raw(duplicate_query)
for event in duplicate_events:
logger.debug(f"Removing duplicate: {event.id}")
media_name = f"{event.camera}-{event.id}"
if event.has_snapshot:
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
media.unlink(missing_ok=True)
if event.has_clip:
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
media.unlink(missing_ok=True)
2021-02-17 14:23:32 +01:00
(
Event.delete()
.where(Event.id << [event.id for event in duplicate_events])
.execute()
)
2021-01-13 13:49:05 +01:00
def run(self):
# only expire events every 5 minutes
while not self.stop_event.wait(300):
2021-02-17 14:23:32 +01:00
self.expire("clips")
self.expire("snapshots")
2021-02-07 15:38:17 +01:00
self.purge_duplicates()
2021-01-13 13:49:05 +01:00
# drop events from db where has_clip and has_snapshot are false
2021-02-17 14:23:32 +01:00
delete_query = Event.delete().where(
Event.has_clip == False, Event.has_snapshot == False
2021-01-13 13:49:05 +01:00
)
delete_query.execute()
logger.info(f"Exiting event cleanup...")