initial implementation of recordings table

This commit is contained in:
Jason Hunter 2021-06-06 21:24:36 -04:00 committed by Blake Blackshear
parent 8dfff83447
commit 055bd22138
8 changed files with 235 additions and 50 deletions

View File

@ -20,7 +20,7 @@ from frigate.edgetpu import EdgeTPUProcess
from frigate.events import EventProcessor, EventCleanup
from frigate.http import create_app
from frigate.log import log_process, root_configurer
from frigate.models import Event
from frigate.models import Event, Recordings
from frigate.mqtt import create_mqtt_client
from frigate.object_processing import TrackedObjectProcessor
from frigate.record import RecordingMaintainer
@ -134,6 +134,14 @@ class FrigateApp:
)
def init_database(self):
# Migrate DB location
old_db_path = os.path.join(CLIPS_DIR, "frigate.db")
if not os.path.isfile(self.config.database.path) and os.path.isfile(
old_db_path
):
os.rename(old_db_path, self.config.database.path)
# Migrate DB schema
migrate_db = SqliteExtDatabase(self.config.database.path)
# Run migrations
@ -144,7 +152,7 @@ class FrigateApp:
migrate_db.close()
self.db = SqliteQueueDatabase(self.config.database.path)
models = [Event]
models = [Event, Recordings]
self.db.bind(models)
def init_stats(self):

View File

@ -13,7 +13,7 @@ import numpy as np
import voluptuous as vol
import yaml
from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.const import BASE_DIR, RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.util import create_mask
logger = logging.getLogger(__name__)
@ -873,7 +873,7 @@ class CameraConfig:
FRIGATE_CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("database", default={}): {
vol.Optional("path", default=os.path.join(CLIPS_DIR, "frigate.db")): str
vol.Optional("path", default=os.path.join(BASE_DIR, "frigate.db")): str
},
vol.Optional("model", default={"width": 320, "height": 320}): {
vol.Required("width"): int,

View File

@ -1,3 +1,4 @@
CLIPS_DIR = "/media/frigate/clips"
RECORD_DIR = "/media/frigate/recordings"
BASE_DIR = "/media/frigate"
CLIPS_DIR = f"{BASE_DIR}/clips"
RECORD_DIR = f"{BASE_DIR}/recordings"
CACHE_DIR = "/tmp/cache"

View File

@ -27,7 +27,7 @@ from peewee import SqliteDatabase, operator, fn, DoesNotExist, Value
from playhouse.shortcuts import model_to_dict
from frigate.const import CLIPS_DIR, RECORD_DIR
from frigate.models import Event
from frigate.models import Event, Recordings
from frigate.stats import stats_snapshot
from frigate.util import calculate_region
from frigate.version import VERSION
@ -453,26 +453,45 @@ def latest_frame(camera_name):
@bp.route("/<camera_name>/recordings")
def recordings(camera_name):
files = glob.glob(f"{RECORD_DIR}/*/*/*/{camera_name}")
if len(files) == 0:
return jsonify([])
files.sort()
dates = OrderedDict()
for path in files:
first = glob.glob(f"{path}/00.*.mp4")
delay = 0
if len(first) > 0:
delay = int(first[0].strip(path).split(".")[1])
search = re.search(r".+/(\d{4}[-]\d{2})/(\d{2})/(\d{2}).+", path)
if not search:
continue
date = f"{search.group(1)}-{search.group(2)}"
if date not in dates:
dates[date] = OrderedDict()
dates[date][search.group(3)] = {"delay": delay, "events": []}
# Retrieve all recordings for this camera
recordings = (
Recordings.select()
.where(Recordings.camera == camera_name)
.order_by(Recordings.start_time.asc())
)
last_end = 0
recording: Recordings
for recording in recordings:
date = datetime.fromtimestamp(recording.start_time)
key = date.strftime("%Y-%m-%d")
hour = date.strftime("%H")
# Create Day Record
if key not in dates:
dates[key] = OrderedDict()
# Create Hour Record
if hour not in dates[key]:
dates[key][hour] = {"delay": {}, "events": []}
# Check for delay
the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp()
# diff current recording start time and the greater of the previous end time or top of the hour
diff = recording.start_time - max(last_end, the_hour)
# Determine seconds into recording
seconds = 0
if datetime.fromtimestamp(last_end).strftime("%H") == hour:
seconds = int(last_end - the_hour)
# Determine the delay
delay = min(int(diff), 3600 - seconds)
if delay > 1:
# Add an offset for any delay greater than a second
dates[key][hour]["delay"][seconds] = delay
last_end = recording.end_time
# Packing intervals to return all events with same label and overlapping times as one row.
# See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
@ -511,15 +530,15 @@ def recordings(camera_name):
camera_name,
)
e: Event
for e in events:
date = datetime.fromtimestamp(e.start_time)
event: Event
for event in events:
date = datetime.fromtimestamp(event.start_time)
key = date.strftime("%Y-%m-%d")
hour = date.strftime("%H")
if key in dates and hour in dates[key]:
dates[key][hour]["events"].append(
model_to_dict(
e,
event,
exclude=[
Event.false_positive,
Event.zones,
@ -547,29 +566,50 @@ def recordings(camera_name):
@bp.route("/vod/<path:path>")
def vod(path):
# Make sure we actually have recordings
if not os.path.isdir(f"{RECORD_DIR}/{path}"):
return "Recordings not found.", 404
files = glob.glob(f"{RECORD_DIR}/{path}/*.mp4")
files.sort()
# Break up path
parts = path.split("/")
start_date = datetime.strptime(f"{parts[0]}-{parts[1]} {parts[2]}", "%Y-%m-%d %H")
end_date = start_date + timedelta(hours=1)
start_ts = start_date.timestamp()
end_ts = end_date.timestamp()
camera = parts[3]
# Select all recordings where either the start or end dates fall in the requested hour
recordings = (
Recordings.select()
.where(
(Recordings.start_time.between(start_ts, end_ts))
| (Recordings.end_time.between(start_ts, end_ts))
)
.where(Recordings.camera == camera)
.order_by(Recordings.start_time.asc())
)
clips = []
durations = []
for filename in files:
clips.append({"type": "source", "path": filename})
video = cv2.VideoCapture(filename)
duration = int(
video.get(cv2.CAP_PROP_FRAME_COUNT) / video.get(cv2.CAP_PROP_FPS) * 1000
)
durations.append(duration)
# Should we cache?
parts = path.split("/", 4)
date = datetime.strptime(f"{parts[0]}-{parts[1]} {parts[2]}", "%Y-%m-%d %H")
recording: Recordings
for recording in recordings:
clip = {"type": "source", "path": recording.path}
duration = int(recording.duration * 1000)
# Determine if offset is needed for first clip
if recording.start_time < start_ts:
offset = int((start_ts - recording.start_time) * 1000)
clip["clipFrom"] = offset
duration -= offset
# Determine if we need to end the last clip early
if recording.end_time > end_ts:
duration -= int((recording.end_time - end_ts) * 1000)
clips.append(clip)
durations.append(duration)
return jsonify(
{
"cache": datetime.now() - timedelta(hours=2) > date,
"cache": datetime.now() - timedelta(hours=1) > start_date,
"discontinuity": False,
"durations": durations,
"sequences": [{"clips": clips}],

View File

@ -1,3 +1,4 @@
from numpy import unique
from peewee import *
from playhouse.sqlite_ext import *
@ -14,3 +15,12 @@ class Event(Model):
thumbnail = TextField()
has_clip = BooleanField(default=True)
has_snapshot = BooleanField(default=True)
class Recordings(Model):
id = CharField(null=False, primary_key=True, max_length=30)
camera = CharField(index=True, max_length=20)
path = CharField(unique=True)
start_time = DateTimeField()
end_time = DateTimeField()
duration = FloatField()

View File

@ -1,19 +1,18 @@
import datetime
import itertools
import json
import logging
import os
import queue
import random
import string
import subprocess as sp
import threading
import time
from collections import defaultdict
from pathlib import Path
import psutil
from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.const import RECORD_DIR
from frigate.models import Recordings
logger = logging.getLogger(__name__)
@ -84,6 +83,7 @@ class RecordingMaintainer(threading.Thread):
p = sp.run(ffprobe_cmd, capture_output=True)
if p.returncode == 0:
duration = float(p.stdout.decode().strip())
end_time = start_time + datetime.timedelta(seconds=duration)
else:
logger.info(f"bad file: {f}")
os.remove(os.path.join(RECORD_DIR, f))
@ -97,8 +97,21 @@ class RecordingMaintainer(threading.Thread):
os.makedirs(directory)
file_name = f"{start_time.strftime('%M.%S.mp4')}"
file_path = os.path.join(directory, file_name)
os.rename(os.path.join(RECORD_DIR, f), os.path.join(directory, file_name))
os.rename(os.path.join(RECORD_DIR, f), file_path)
rand_id = "".join(
random.choices(string.ascii_lowercase + string.digits, k=6)
)
Recordings.create(
id=f"{start_time.timestamp()}-{rand_id}",
camera=camera,
path=file_path,
start_time=start_time.timestamp(),
end_time=end_time.timestamp(),
duration=duration,
)
def expire_files(self):
delete_before = {}
@ -112,6 +125,7 @@ class RecordingMaintainer(threading.Thread):
if not p.parent.name in delete_before:
continue
if p.stat().st_mtime < delete_before[p.parent.name]:
Recordings.delete().where(Recordings.path == str(p)).execute()
p.unlink(missing_ok=True)
def run(self):

View File

@ -0,0 +1,108 @@
"""Peewee migrations -- 003_create_recordings_table.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
from concurrent.futures import as_completed, ThreadPoolExecutor
import datetime as dt
import peewee as pw
from decimal import ROUND_HALF_EVEN
import random
import string
import os
import subprocess as sp
import glob
import re
try:
import playhouse.postgres_ext as pw_pext
except ImportError:
pass
from frigate.const import RECORD_DIR
from frigate.models import Recordings
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.create_model(Recordings)
def backfill():
# First add the index here, because there is a bug in peewee_migrate
# when trying to create an multi-column index in the same migration
# as the table: https://github.com/klen/peewee_migrate/issues/19
Recordings.add_index("start_time", "end_time")
Recordings.create_table()
# Backfill existing recordings
files = glob.glob(f"{RECORD_DIR}/*/*/*/*/*.mp4")
def probe(path):
ffprobe_cmd = [
"ffprobe",
"-v",
"error",
"-show_entries",
"format=duration",
"-of",
"default=noprint_wrappers=1:nokey=1",
path,
]
p = sp.run(ffprobe_cmd, capture_output=True)
if p.returncode == 0:
return float(p.stdout.decode().strip())
else:
os.remove(path)
return 0
with ThreadPoolExecutor() as executor:
future_to_path = {executor.submit(probe, path): path for path in files}
for future in as_completed(future_to_path):
path = future_to_path[future]
duration = future.result()
rand_id = "".join(
random.choices(string.ascii_lowercase + string.digits, k=6)
)
search = re.search(
r".+/(\d{4}[-]\d{2})/(\d{2})/(\d{2})/(.+)/(\d{2})\.(\d{2}).mp4",
path,
)
if not search:
return False
date = f"{search.group(1)}-{search.group(2)} {search.group(3)}:{search.group(5)}:{search.group(6)}"
start = dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
end = start + dt.timedelta(seconds=duration)
Recordings.create(
id=f"{start.timestamp()}-{rand_id}",
camera=search.group(4),
path=path,
start_time=start.timestamp(),
end_time=end.timestamp(),
duration=duration,
)
migrator.python(backfill)
def rollback(migrator, database, fake=False, **kwargs):
migrator.remove_model(Recordings)

View File

@ -85,7 +85,11 @@ export function EventCard({ camera, event, delay }) {
const start = fromUnixTime(event.start_time);
const end = fromUnixTime(event.end_time);
const duration = addSeconds(new Date(0), differenceInSeconds(end, start));
const seconds = Math.max(differenceInSeconds(start, startOfHour(start)) - delay - 10, 0);
const position = differenceInSeconds(start, startOfHour(start));
const offset = Object.entries(delay)
.map(([p, d]) => (position > p ? d : 0))
.reduce((p, c) => p + c);
const seconds = Math.max(position - offset - 10, 0);
return (
<Link className="" href={`/recording/${camera}/${format(start, 'yyyy-MM-dd')}/${format(start, 'HH')}/${seconds}`}>
<div className="flex flex-row mb-2">