2020-11-25 03:36:56 +01:00
|
|
|
import base64
|
2022-12-11 14:45:32 +01:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2021-11-09 02:05:39 +01:00
|
|
|
import copy
|
2022-11-29 04:47:20 +01:00
|
|
|
import glob
|
2020-11-09 14:32:30 +01:00
|
|
|
import logging
|
2022-11-13 19:48:14 +01:00
|
|
|
import json
|
2020-11-01 15:06:15 +01:00
|
|
|
import os
|
2021-07-09 22:14:16 +02:00
|
|
|
import subprocess as sp
|
2022-12-11 14:45:32 +01:00
|
|
|
import pytz
|
2020-11-01 22:37:51 +01:00
|
|
|
import time
|
2022-12-07 14:36:56 +01:00
|
|
|
import traceback
|
2022-12-11 14:45:32 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
from functools import reduce
|
2021-05-12 17:19:02 +02:00
|
|
|
from pathlib import Path
|
2022-12-11 14:45:32 +01:00
|
|
|
from tzlocal import get_localzone_name
|
2022-08-25 14:32:30 +02:00
|
|
|
from urllib.parse import unquote
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
import cv2
|
2021-06-14 14:31:13 +02:00
|
|
|
|
2020-11-01 22:37:51 +01:00
|
|
|
import numpy as np
|
2021-02-17 14:23:32 +01:00
|
|
|
from flask import (
|
|
|
|
Blueprint,
|
|
|
|
Flask,
|
|
|
|
Response,
|
|
|
|
current_app,
|
|
|
|
jsonify,
|
|
|
|
make_response,
|
|
|
|
request,
|
|
|
|
)
|
2021-06-14 14:31:13 +02:00
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
from peewee import SqliteDatabase, operator, fn, DoesNotExist
|
2020-11-01 15:06:15 +01:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
from frigate.config import FrigateConfig
|
2022-12-18 00:53:34 +01:00
|
|
|
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
2021-06-07 03:24:36 +02:00
|
|
|
from frigate.models import Event, Recordings
|
2022-11-13 19:48:14 +01:00
|
|
|
from frigate.object_processing import TrackedObject
|
2021-01-04 00:35:58 +01:00
|
|
|
from frigate.stats import stats_snapshot
|
2022-12-07 14:36:56 +01:00
|
|
|
from frigate.util import (
|
|
|
|
clean_camera_user_pass,
|
|
|
|
ffprobe_stream,
|
|
|
|
restart_frigate,
|
|
|
|
vainfo_hwaccel,
|
2023-01-17 00:17:03 +01:00
|
|
|
get_tz_modifiers,
|
2022-12-07 14:36:56 +01:00
|
|
|
)
|
2022-11-30 02:59:56 +01:00
|
|
|
from frigate.storage import StorageMaintainer
|
2020-12-19 13:51:10 +01:00
|
|
|
from frigate.version import VERSION
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2020-11-25 17:37:41 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
bp = Blueprint("frigate", __name__)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
def create_app(
|
|
|
|
frigate_config,
|
|
|
|
database: SqliteDatabase,
|
|
|
|
stats_tracking,
|
|
|
|
detected_frames_processor,
|
2022-11-30 02:59:56 +01:00
|
|
|
storage_maintainer: StorageMaintainer,
|
2022-04-03 22:00:11 +02:00
|
|
|
plus_api,
|
2021-02-17 14:23:32 +01:00
|
|
|
):
|
2020-11-01 15:06:15 +01:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
|
|
|
@app.before_request
|
|
|
|
def _db_connect():
|
2021-06-27 01:41:26 +02:00
|
|
|
if database.is_closed():
|
|
|
|
database.connect()
|
2020-11-01 15:06:15 +01:00
|
|
|
|
|
|
|
@app.teardown_request
|
|
|
|
def _db_close(exc):
|
|
|
|
if not database.is_closed():
|
|
|
|
database.close()
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
app.frigate_config = frigate_config
|
2021-01-04 00:35:58 +01:00
|
|
|
app.stats_tracking = stats_tracking
|
2020-11-01 22:37:51 +01:00
|
|
|
app.detected_frames_processor = detected_frames_processor
|
2022-11-30 02:59:56 +01:00
|
|
|
app.storage_maintainer = storage_maintainer
|
2022-04-03 22:00:11 +02:00
|
|
|
app.plus_api = plus_api
|
2022-11-29 04:47:20 +01:00
|
|
|
app.camera_error_image = None
|
2023-01-07 02:31:25 +01:00
|
|
|
app.hwaccel_errors = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
app.register_blueprint(bp)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
return app
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/")
|
2020-11-01 15:06:15 +01:00
|
|
|
def is_healthy():
|
|
|
|
return "Frigate is running. Alive and healthy!"
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/summary")
|
2020-11-18 04:11:19 +01:00
|
|
|
def events_summary():
|
2022-12-11 14:45:32 +01:00
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
2023-01-17 00:17:03 +01:00
|
|
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
2021-02-17 14:23:32 +01:00
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2021-01-16 16:26:26 +01:00
|
|
|
|
|
|
|
clauses = []
|
|
|
|
|
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2021-01-16 16:26:26 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2021-01-16 16:26:26 +01:00
|
|
|
|
2020-11-18 04:11:19 +01:00
|
|
|
groups = (
|
2021-02-17 14:23:32 +01:00
|
|
|
Event.select(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
2023-03-05 18:39:41 +01:00
|
|
|
Event.sub_label,
|
2021-02-17 14:23:32 +01:00
|
|
|
fn.strftime(
|
2023-01-17 00:17:03 +01:00
|
|
|
"%Y-%m-%d",
|
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2021-02-17 14:23:32 +01:00
|
|
|
).alias("day"),
|
|
|
|
Event.zones,
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.group_by(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
2023-03-05 18:39:41 +01:00
|
|
|
Event.sub_label,
|
2021-02-17 14:23:32 +01:00
|
|
|
fn.strftime(
|
2023-01-17 00:17:03 +01:00
|
|
|
"%Y-%m-%d",
|
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2021-02-17 14:23:32 +01:00
|
|
|
),
|
|
|
|
Event.zones,
|
2020-11-18 04:11:19 +01:00
|
|
|
)
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
|
|
|
return jsonify([e for e in groups.dicts()])
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-12 17:19:02 +02:00
|
|
|
@bp.route("/events/<id>", methods=("GET",))
|
2020-11-24 22:44:59 +01:00
|
|
|
def event(id):
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
return model_to_dict(Event.get(Event.id == id))
|
|
|
|
except DoesNotExist:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("POST",))
|
|
|
|
def set_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = True
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
@bp.route("/events/<id>/plus", methods=("POST",))
|
|
|
|
def send_to_plus(id):
|
2022-05-11 13:54:46 +02:00
|
|
|
if not current_app.plus_api.is_active():
|
|
|
|
message = "PLUS_API_KEY environment variable is not set"
|
|
|
|
logger.error(message)
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
2022-05-11 13:54:46 +02:00
|
|
|
"message": message,
|
2022-04-10 15:09:41 +02:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-04-03 22:00:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2022-05-11 13:54:46 +02:00
|
|
|
message = f"Event {id} not found"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2023-02-19 14:38:16 +01:00
|
|
|
if event.end_time is None:
|
|
|
|
logger.error(f"Unable to load clean png for in-progress event: {event.id}")
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Unable to load clean png for in-progress event",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
if event.plus_id:
|
2022-05-11 13:54:46 +02:00
|
|
|
message = "Already submitted to plus"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
|
|
|
# load clean.png
|
|
|
|
try:
|
|
|
|
filename = f"{event.camera}-{event.id}-clean.png"
|
|
|
|
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
|
|
|
|
except Exception:
|
2022-04-16 15:43:49 +02:00
|
|
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Unable to load clean png for event"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2023-02-19 14:38:16 +01:00
|
|
|
if image is None or image.size == 0:
|
|
|
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Unable to load clean png for event"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
try:
|
|
|
|
plus_id = current_app.plus_api.upload_image(image, event.camera)
|
|
|
|
except Exception as ex:
|
2022-05-11 13:54:46 +02:00
|
|
|
logger.exception(ex)
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": str(ex)}),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
# store image id in the database
|
|
|
|
event.plus_id = plus_id
|
|
|
|
event.save()
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("DELETE",))
|
|
|
|
def delete_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = False
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
@bp.route("/events/<id>/sub_label", methods=("POST",))
|
|
|
|
def set_sub_label(id):
|
|
|
|
try:
|
2022-10-01 16:00:56 +02:00
|
|
|
event: Event = Event.get(Event.id == id)
|
2022-03-17 13:18:43 +01:00
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
|
|
|
)
|
|
|
|
|
|
|
|
if request.json:
|
|
|
|
new_sub_label = request.json.get("subLabel")
|
|
|
|
else:
|
|
|
|
new_sub_label = None
|
|
|
|
|
|
|
|
if new_sub_label and len(new_sub_label) > 20:
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": new_sub_label
|
|
|
|
+ " exceeds the 20 character limit for sub_label",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
|
|
|
|
2022-10-01 16:00:56 +02:00
|
|
|
if not event.end_time:
|
|
|
|
tracked_obj: TrackedObject = (
|
2022-10-05 01:49:37 +02:00
|
|
|
current_app.detected_frames_processor.camera_states[
|
|
|
|
event.camera
|
|
|
|
].tracked_objects.get(event.id)
|
2022-10-01 16:00:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if tracked_obj:
|
|
|
|
tracked_obj.obj_data["sub_label"] = new_sub_label
|
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
event.sub_label = new_sub_label
|
|
|
|
event.save()
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Event " + id + " sub label set to " + new_sub_label,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
2022-02-22 05:03:01 +01:00
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
@bp.route("/sub_labels")
|
|
|
|
def get_sub_labels():
|
2022-12-12 13:30:34 +01:00
|
|
|
split_joined = request.args.get("split_joined", type=int)
|
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
try:
|
|
|
|
events = Event.select(Event.sub_label).distinct()
|
|
|
|
except Exception as e:
|
|
|
|
return jsonify(
|
|
|
|
{"success": False, "message": f"Failed to get sub_labels: {e}"}, "404"
|
|
|
|
)
|
|
|
|
|
|
|
|
sub_labels = [e.sub_label for e in events]
|
2022-06-30 14:53:46 +02:00
|
|
|
|
|
|
|
if None in sub_labels:
|
|
|
|
sub_labels.remove(None)
|
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
if split_joined:
|
2023-03-04 00:43:27 +01:00
|
|
|
original_labels = sub_labels.copy()
|
|
|
|
|
|
|
|
for label in original_labels:
|
2022-12-12 13:30:34 +01:00
|
|
|
if "," in label:
|
|
|
|
sub_labels.remove(label)
|
|
|
|
parts = label.split(",")
|
|
|
|
|
|
|
|
for part in parts:
|
|
|
|
if not (part.strip()) in sub_labels:
|
|
|
|
sub_labels.append(part.strip())
|
|
|
|
|
2023-03-04 00:43:27 +01:00
|
|
|
sub_labels.sort()
|
2022-05-29 16:47:43 +02:00
|
|
|
return jsonify(sub_labels)
|
|
|
|
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>", methods=("DELETE",))
|
2021-05-12 17:19:02 +02:00
|
|
|
def delete_event(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
|
|
|
if event.has_snapshot:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
|
|
media.unlink(missing_ok=True)
|
2022-02-02 14:29:01 +01:00
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
|
|
media.unlink(missing_ok=True)
|
2021-05-12 17:19:02 +02:00
|
|
|
if event.has_clip:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
|
|
media.unlink(missing_ok=True)
|
|
|
|
|
|
|
|
event.delete_instance()
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>/thumbnail.jpg")
|
2022-04-27 02:42:07 +02:00
|
|
|
def event_thumbnail(id, max_cache_age=2592000):
|
2021-02-17 14:23:32 +01:00
|
|
|
format = request.args.get("format", "ios")
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = None
|
2022-02-26 20:11:00 +01:00
|
|
|
event_complete = False
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
2022-02-26 20:11:00 +01:00
|
|
|
if not event.end_time is None:
|
|
|
|
event_complete = True
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
2020-11-25 03:36:56 +01:00
|
|
|
except DoesNotExist:
|
2020-11-25 17:37:41 +01:00
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2020-11-25 17:37:41 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
2021-01-22 00:28:49 +01:00
|
|
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
2020-11-25 17:37:41 +01:00
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
if thumbnail_bytes is None:
|
2020-11-25 03:36:56 +01:00
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
# android notifications prefer a 2:1 ratio
|
2021-02-17 14:23:32 +01:00
|
|
|
if format == "android":
|
2020-12-02 02:28:26 +01:00
|
|
|
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
|
|
|
|
img = cv2.imdecode(jpg_as_np, flags=1)
|
2021-02-17 14:23:32 +01:00
|
|
|
thumbnail = cv2.copyMakeBorder(
|
|
|
|
img,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
cv2.BORDER_CONSTANT,
|
|
|
|
(0, 0, 0),
|
|
|
|
)
|
|
|
|
ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = jpg.tobytes()
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
response = make_response(thumbnail_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-02-26 20:11:00 +01:00
|
|
|
if event_complete:
|
2022-04-27 02:42:07 +02:00
|
|
|
response.headers["Cache-Control"] = f"private, max-age={max_cache_age}"
|
2022-04-25 15:00:01 +02:00
|
|
|
else:
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2020-12-02 02:28:26 +01:00
|
|
|
return response
|
2020-11-24 22:44:59 +01:00
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/best.jpg")
|
|
|
|
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
|
|
|
|
def label_thumbnail(camera_name, label):
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(label)
|
2022-03-11 14:56:39 +01:00
|
|
|
if label == "any":
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.label == label)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = event_query.get()
|
|
|
|
|
2022-04-27 02:42:07 +02:00
|
|
|
return event_thumbnail(event.id, 60)
|
2022-03-11 14:56:39 +01:00
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((175, 175, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2022-03-11 14:56:39 +01:00
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/<id>/snapshot.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_snapshot(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
2022-04-25 15:00:01 +02:00
|
|
|
event_complete = False
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = None
|
|
|
|
try:
|
2021-10-23 23:18:13 +02:00
|
|
|
event = Event.get(Event.id == id, Event.end_time != None)
|
2022-04-25 15:00:01 +02:00
|
|
|
event_complete = True
|
2021-01-22 01:49:20 +01:00
|
|
|
if not event.has_snapshot:
|
|
|
|
return "Snapshot not available", 404
|
|
|
|
# read snapshot from disk
|
2021-02-17 14:23:32 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
|
|
|
|
) as image_file:
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = image_file.read()
|
|
|
|
except DoesNotExist:
|
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2021-01-22 01:49:20 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
|
|
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
2021-02-17 14:23:32 +01:00
|
|
|
timestamp=request.args.get("timestamp", type=int),
|
|
|
|
bounding_box=request.args.get("bbox", type=int),
|
|
|
|
crop=request.args.get("crop", type=int),
|
|
|
|
height=request.args.get("h", type=int),
|
2021-07-02 14:47:03 +02:00
|
|
|
quality=request.args.get("quality", default=70, type=int),
|
2021-01-22 01:49:20 +01:00
|
|
|
)
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-06-27 01:42:09 +02:00
|
|
|
if jpg_bytes is None:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-01-22 01:49:20 +01:00
|
|
|
response = make_response(jpg_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
if event_complete:
|
|
|
|
response.headers["Cache-Control"] = "private, max-age=31536000"
|
|
|
|
else:
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
|
|
|
response.headers[
|
|
|
|
"Content-Disposition"
|
|
|
|
] = f"attachment; filename=snapshot-{id}.jpg"
|
2021-01-22 01:49:20 +01:00
|
|
|
return response
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/snapshot.jpg")
|
|
|
|
def label_snapshot(camera_name, label):
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(label)
|
2022-03-11 14:56:39 +01:00
|
|
|
if label == "any":
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.label == label)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = event_query.get()
|
|
|
|
return event_snapshot(event.id)
|
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((720, 1280, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
@bp.route("/events/<id>/clip.mp4")
|
|
|
|
def event_clip(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-07-09 22:14:16 +02:00
|
|
|
return "Event not found.", 404
|
|
|
|
|
|
|
|
if not event.has_clip:
|
|
|
|
return "Clip not available", 404
|
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
file_name = f"{event.camera}-{id}.mp4"
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, file_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2022-02-09 22:41:38 +01:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
|
|
|
return recording_clip(event.camera, event.start_time, end_ts)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
|
|
|
if download:
|
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(clip_path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/clips/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
|
|
|
return response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@bp.route("/events")
|
2020-11-01 15:06:15 +01:00
|
|
|
def events():
|
2022-02-26 20:11:00 +01:00
|
|
|
camera = request.args.get("camera", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
cameras = request.args.get("cameras", "all")
|
|
|
|
|
|
|
|
# handle old camera arg
|
|
|
|
if cameras == "all" and camera != "all":
|
|
|
|
cameras = camera
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(request.args.get("label", "all"))
|
2022-12-12 13:30:34 +01:00
|
|
|
labels = request.args.get("labels", "all")
|
|
|
|
|
|
|
|
# handle old label arg
|
|
|
|
if labels == "all" and label != "all":
|
|
|
|
labels = label
|
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
sub_label = request.args.get("sub_label", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
sub_labels = request.args.get("sub_labels", "all")
|
|
|
|
|
|
|
|
# handle old sub_label arg
|
|
|
|
if sub_labels == "all" and sub_label != "all":
|
|
|
|
sub_labels = sub_label
|
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
zone = request.args.get("zone", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
zones = request.args.get("zones", "all")
|
|
|
|
|
|
|
|
# handle old label arg
|
|
|
|
if zones == "all" and zone != "all":
|
|
|
|
zones = zone
|
|
|
|
|
|
|
|
limit = request.args.get("limit", 100)
|
2021-02-17 14:23:32 +01:00
|
|
|
after = request.args.get("after", type=float)
|
|
|
|
before = request.args.get("before", type=float)
|
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2023-01-12 00:22:39 +01:00
|
|
|
in_progress = request.args.get("in_progress", type=int)
|
2021-02-17 14:23:32 +01:00
|
|
|
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
2022-12-31 15:56:30 +01:00
|
|
|
favorites = request.args.get("favorites", type=int)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
|
|
|
clauses = []
|
2021-01-30 14:19:52 +01:00
|
|
|
excluded_fields = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-04-30 15:38:54 +02:00
|
|
|
selected_columns = [
|
|
|
|
Event.id,
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
Event.zones,
|
|
|
|
Event.start_time,
|
|
|
|
Event.end_time,
|
|
|
|
Event.has_clip,
|
|
|
|
Event.has_snapshot,
|
|
|
|
Event.plus_id,
|
|
|
|
Event.retain_indefinitely,
|
|
|
|
Event.sub_label,
|
|
|
|
Event.top_score,
|
|
|
|
]
|
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
if camera != "all":
|
2020-11-16 14:27:56 +01:00
|
|
|
clauses.append((Event.camera == camera))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((Event.camera << camera_list))
|
|
|
|
|
|
|
|
if labels != "all":
|
|
|
|
label_list = labels.split(",")
|
|
|
|
clauses.append((Event.label << label_list))
|
|
|
|
|
|
|
|
if sub_labels != "all":
|
|
|
|
# use matching so joined sub labels are included
|
|
|
|
# for example a sub label 'bob' would get events
|
|
|
|
# with sub labels 'bob' and 'bob, john'
|
|
|
|
sub_label_clauses = []
|
2023-01-10 02:44:26 +01:00
|
|
|
filtered_sub_labels = sub_labels.split(",")
|
2022-12-12 13:30:34 +01:00
|
|
|
|
2023-01-10 02:44:26 +01:00
|
|
|
if "None" in filtered_sub_labels:
|
|
|
|
filtered_sub_labels.remove("None")
|
|
|
|
sub_label_clauses.append((Event.sub_label.is_null()))
|
|
|
|
|
|
|
|
for label in filtered_sub_labels:
|
2023-03-04 00:43:27 +01:00
|
|
|
sub_label_clauses.append(
|
|
|
|
(Event.sub_label.cast("text") == label)
|
|
|
|
) # include exact matches
|
|
|
|
|
|
|
|
# include this label when part of a list
|
|
|
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
|
|
|
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
|
2022-12-12 13:30:34 +01:00
|
|
|
|
|
|
|
sub_label_clause = reduce(operator.or_, sub_label_clauses)
|
|
|
|
clauses.append((sub_label_clause))
|
|
|
|
|
|
|
|
if zones != "all":
|
|
|
|
# use matching so events with multiple zones
|
|
|
|
# still match on a search where any zone matches
|
|
|
|
zone_clauses = []
|
2023-01-10 02:44:26 +01:00
|
|
|
filtered_zones = zones.split(",")
|
|
|
|
|
|
|
|
if "None" in filtered_zones:
|
|
|
|
filtered_zones.remove("None")
|
|
|
|
zone_clauses.append((Event.zones.length() == 0))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2023-01-10 02:44:26 +01:00
|
|
|
for zone in filtered_zones:
|
2022-12-12 13:30:34 +01:00
|
|
|
zone_clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
|
2022-05-29 16:47:43 +02:00
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
zone_clause = reduce(operator.or_, zone_clauses)
|
|
|
|
clauses.append((zone_clause))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if after:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time > after))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if before:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time < before))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
2023-01-12 00:22:39 +01:00
|
|
|
if not in_progress is None:
|
|
|
|
clauses.append((Event.end_time.is_null(in_progress)))
|
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
if not include_thumbnails:
|
|
|
|
excluded_fields.append(Event.thumbnail)
|
2022-04-30 15:38:54 +02:00
|
|
|
else:
|
|
|
|
selected_columns.append(Event.thumbnail)
|
2021-01-30 14:19:52 +01:00
|
|
|
|
2022-12-31 15:56:30 +01:00
|
|
|
if favorites:
|
|
|
|
clauses.append((Event.retain_indefinitely == favorites))
|
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
events = (
|
2022-04-30 15:38:54 +02:00
|
|
|
Event.select(*selected_columns)
|
2021-02-17 14:23:32 +01:00
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.limit(limit)
|
|
|
|
)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/config")
|
2020-11-18 04:11:19 +01:00
|
|
|
def config():
|
2021-07-10 03:22:11 +02:00
|
|
|
config = current_app.frigate_config.dict()
|
|
|
|
|
|
|
|
for camera_name, camera in current_app.frigate_config.cameras.items():
|
|
|
|
camera_dict = config["cameras"][camera_name]
|
2022-11-27 02:18:33 +01:00
|
|
|
|
|
|
|
# clean paths
|
|
|
|
for input in camera_dict.get("ffmpeg", {}).get("inputs", []):
|
|
|
|
input["path"] = clean_camera_user_pass(input["path"])
|
|
|
|
|
|
|
|
# add clean ffmpeg_cmds
|
2021-11-09 02:05:39 +01:00
|
|
|
camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
|
2021-07-10 03:22:11 +02:00
|
|
|
for cmd in camera_dict["ffmpeg_cmds"]:
|
2022-11-02 13:00:54 +01:00
|
|
|
cmd["cmd"] = clean_camera_user_pass(" ".join(cmd["cmd"]))
|
2021-07-10 03:22:11 +02:00
|
|
|
|
2022-07-22 14:56:28 +02:00
|
|
|
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2021-07-10 03:22:11 +02:00
|
|
|
return jsonify(config)
|
2021-06-24 07:45:27 +02:00
|
|
|
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
@bp.route("/config/raw")
|
|
|
|
def config_raw():
|
|
|
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
|
|
|
if not os.path.isfile(config_file):
|
|
|
|
return "Could not find file", 410
|
|
|
|
|
|
|
|
with open(config_file, "r") as f:
|
|
|
|
raw_config = f.read()
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
return raw_config, 200
|
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/config/save", methods=["POST"])
|
|
|
|
def config_save():
|
2023-01-15 18:25:49 +01:00
|
|
|
save_option = request.args.get("save_option")
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
new_config = request.get_data().decode()
|
|
|
|
|
|
|
|
if not new_config:
|
|
|
|
return "Config with body param is required", 400
|
|
|
|
|
|
|
|
# Validate the config schema
|
|
|
|
try:
|
|
|
|
new_yaml = FrigateConfig.parse_raw(new_config)
|
|
|
|
except Exception as e:
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": f"\nConfig Error:\n\n{str(traceback.format_exc())}",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Save the config to file
|
|
|
|
try:
|
|
|
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
|
|
|
with open(config_file, "w") as f:
|
|
|
|
f.write(new_config)
|
|
|
|
f.close()
|
|
|
|
except Exception as e:
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
2023-01-13 14:18:15 +01:00
|
|
|
"message": f"Could not write config file, be sure that Frigate has write permission on the config file.",
|
2022-12-07 14:36:56 +01:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2023-01-15 18:25:49 +01:00
|
|
|
if save_option == "restart":
|
|
|
|
try:
|
|
|
|
restart_frigate()
|
|
|
|
except Exception as e:
|
|
|
|
logging.error(f"Error restarting Frigate: {e}")
|
|
|
|
return "Config successfully saved, unable to restart Frigate", 200
|
2022-12-07 14:36:56 +01:00
|
|
|
|
2023-01-18 14:53:53 +01:00
|
|
|
return (
|
|
|
|
"Config successfully saved, restarting (this can take up to one minute)...",
|
|
|
|
200,
|
|
|
|
)
|
2023-01-15 18:25:49 +01:00
|
|
|
else:
|
|
|
|
return "Config successfully saved.", 200
|
2022-12-07 14:36:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/config/schema.json")
|
2021-06-24 07:45:27 +02:00
|
|
|
def config_schema():
|
|
|
|
return current_app.response_class(
|
|
|
|
current_app.frigate_config.schema_json(), mimetype="application/json"
|
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/version")
|
2020-12-19 13:51:10 +01:00
|
|
|
def version():
|
|
|
|
return VERSION
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/stats")
|
2020-11-01 22:37:51 +01:00
|
|
|
def stats():
|
2023-01-07 02:31:25 +01:00
|
|
|
stats = stats_snapshot(
|
|
|
|
current_app.frigate_config,
|
|
|
|
current_app.stats_tracking,
|
|
|
|
current_app.hwaccel_errors,
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
return jsonify(stats)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>")
|
2020-11-01 22:37:51 +01:00
|
|
|
def mjpeg_feed(camera_name):
|
2021-02-17 14:23:32 +01:00
|
|
|
fps = int(request.args.get("fps", "3"))
|
|
|
|
height = int(request.args.get("h", "360"))
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
# return a multipart response
|
2021-02-17 14:23:32 +01:00
|
|
|
return Response(
|
|
|
|
imagestream(
|
|
|
|
current_app.detected_frames_processor,
|
|
|
|
camera_name,
|
|
|
|
fps,
|
|
|
|
height,
|
|
|
|
draw_options,
|
|
|
|
),
|
|
|
|
mimetype="multipart/x-mixed-replace; boundary=frame",
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>/latest.jpg")
|
2020-11-01 22:37:51 +01:00
|
|
|
def latest_frame(camera_name):
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2021-07-02 14:47:03 +02:00
|
|
|
resize_quality = request.args.get("quality", default=70, type=int)
|
|
|
|
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = current_app.detected_frames_processor.get_current_frame(
|
|
|
|
camera_name, draw_options
|
|
|
|
)
|
2022-11-29 04:47:20 +01:00
|
|
|
|
|
|
|
if frame is None or datetime.now().timestamp() > (
|
|
|
|
current_app.detected_frames_processor.get_current_frame_time(camera_name)
|
|
|
|
+ 10
|
|
|
|
):
|
|
|
|
if current_app.camera_error_image is None:
|
|
|
|
error_image = glob.glob("/opt/frigate/frigate/images/camera-error.jpg")
|
|
|
|
|
|
|
|
if len(error_image) > 0:
|
|
|
|
current_app.camera_error_image = cv2.imread(
|
|
|
|
error_image[0], cv2.IMREAD_UNCHANGED
|
|
|
|
)
|
|
|
|
|
|
|
|
frame = current_app.camera_error_image
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2022-12-31 15:54:10 +01:00
|
|
|
ret, jpg = cv2.imencode(
|
|
|
|
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
|
|
|
|
)
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
|
|
|
return response
|
2023-01-21 03:40:55 +01:00
|
|
|
elif camera_name == "birdseye" and current_app.frigate_config.birdseye.restream:
|
2022-12-31 15:54:10 +01:00
|
|
|
frame = cv2.cvtColor(
|
|
|
|
current_app.detected_frames_processor.get_current_frame(camera_name),
|
|
|
|
cv2.COLOR_YUV2BGR_I420,
|
|
|
|
)
|
|
|
|
|
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
|
|
|
|
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2021-07-02 14:47:03 +02:00
|
|
|
ret, jpg = cv2.imencode(
|
|
|
|
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
response = make_response(jpg.tobytes())
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2020-11-01 22:37:51 +01:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2022-11-30 02:59:56 +01:00
|
|
|
@bp.route("/recordings/storage", methods=["GET"])
|
|
|
|
def get_recordings_storage_usage():
|
|
|
|
recording_stats = stats_snapshot(
|
2023-01-07 02:31:25 +01:00
|
|
|
current_app.frigate_config,
|
|
|
|
current_app.stats_tracking,
|
|
|
|
current_app.hwaccel_errors,
|
2022-11-30 02:59:56 +01:00
|
|
|
)["service"]["storage"][RECORD_DIR]
|
2023-01-13 14:22:47 +01:00
|
|
|
|
|
|
|
if not recording_stats:
|
|
|
|
return jsonify({})
|
|
|
|
|
2022-11-30 02:59:56 +01:00
|
|
|
total_mb = recording_stats["total"]
|
|
|
|
|
|
|
|
camera_usages: dict[
|
|
|
|
str, dict
|
|
|
|
] = current_app.storage_maintainer.calculate_camera_usages()
|
|
|
|
|
|
|
|
for camera_name in camera_usages.keys():
|
|
|
|
if camera_usages.get(camera_name, {}).get("usage"):
|
|
|
|
camera_usages[camera_name]["usage_percent"] = (
|
|
|
|
camera_usages.get(camera_name, {}).get("usage", 0) / total_mb
|
|
|
|
) * 100
|
|
|
|
|
|
|
|
return jsonify(camera_usages)
|
|
|
|
|
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
# return hourly summary for recordings of camera
|
|
|
|
@bp.route("/<camera_name>/recordings/summary")
|
|
|
|
def recordings_summary(camera_name):
|
2022-12-11 14:45:32 +01:00
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
2023-01-17 00:17:03 +01:00
|
|
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
2022-05-10 14:48:29 +02:00
|
|
|
recording_groups = (
|
|
|
|
Recordings.select(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).alias("hour"),
|
|
|
|
fn.SUM(Recordings.duration).alias("duration"),
|
|
|
|
fn.SUM(Recordings.motion).alias("motion"),
|
|
|
|
fn.SUM(Recordings.objects).alias("objects"),
|
|
|
|
)
|
2021-06-07 03:24:36 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2022-05-10 14:48:29 +02:00
|
|
|
.group_by(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
.order_by(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).desc()
|
|
|
|
)
|
2021-06-07 03:24:36 +02:00
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
event_groups = (
|
|
|
|
Event.select(
|
|
|
|
fn.strftime(
|
2022-12-11 14:45:32 +01:00
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).alias("hour"),
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(Event.camera == camera_name, Event.has_clip)
|
|
|
|
.group_by(
|
|
|
|
fn.strftime(
|
2022-12-11 14:45:32 +01:00
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
),
|
2021-06-03 05:20:07 +02:00
|
|
|
)
|
2022-05-10 14:48:29 +02:00
|
|
|
.objects()
|
2021-06-03 05:20:07 +02:00
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
event_map = {g.hour: g.count for g in event_groups}
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
days = {}
|
|
|
|
|
|
|
|
for recording_group in recording_groups.objects():
|
|
|
|
parts = recording_group.hour.split()
|
|
|
|
hour = parts[1]
|
|
|
|
day = parts[0]
|
|
|
|
events_count = event_map.get(recording_group.hour, 0)
|
|
|
|
hour_data = {
|
|
|
|
"hour": hour,
|
|
|
|
"events": events_count,
|
|
|
|
"motion": recording_group.motion,
|
|
|
|
"objects": recording_group.objects,
|
|
|
|
"duration": round(recording_group.duration),
|
|
|
|
}
|
|
|
|
if day not in days:
|
|
|
|
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
|
|
|
|
else:
|
|
|
|
days[day]["events"] += events_count
|
|
|
|
days[day]["hours"].append(hour_data)
|
|
|
|
|
|
|
|
return jsonify(list(days.values()))
|
|
|
|
|
|
|
|
|
|
|
|
# return hour of recordings data for camera
|
|
|
|
@bp.route("/<camera_name>/recordings")
|
|
|
|
def recordings(camera_name):
|
|
|
|
after = request.args.get(
|
|
|
|
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
2021-05-28 19:13:48 +02:00
|
|
|
)
|
2022-05-10 14:48:29 +02:00
|
|
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
|
|
|
|
|
|
|
recordings = (
|
|
|
|
Recordings.select(
|
|
|
|
Recordings.id,
|
|
|
|
Recordings.start_time,
|
|
|
|
Recordings.end_time,
|
2022-10-09 13:28:26 +02:00
|
|
|
Recordings.segment_size,
|
2022-05-10 14:48:29 +02:00
|
|
|
Recordings.motion,
|
|
|
|
Recordings.objects,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Recordings.camera == camera_name,
|
|
|
|
Recordings.end_time >= after,
|
|
|
|
Recordings.start_time <= before,
|
|
|
|
)
|
|
|
|
.order_by(Recordings.start_time)
|
|
|
|
)
|
|
|
|
|
|
|
|
return jsonify([e for e in recordings.dicts()])
|
2021-05-28 19:13:48 +02:00
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
|
|
|
@bp.route("/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
|
|
|
|
def recording_clip(camera_name, start_ts, end_ts):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
recordings = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(
|
|
|
|
(Recordings.start_time.between(start_ts, end_ts))
|
|
|
|
| (Recordings.end_time.between(start_ts, end_ts))
|
2021-07-11 06:22:45 +02:00
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-07-09 22:14:16 +02:00
|
|
|
)
|
2022-08-25 14:32:30 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
|
|
|
|
|
|
|
playlist_lines = []
|
|
|
|
clip: Recordings
|
|
|
|
for clip in recordings:
|
|
|
|
playlist_lines.append(f"file '{clip.path}'")
|
|
|
|
# if this is the starting clip, add an inpoint
|
|
|
|
if clip.start_time < start_ts:
|
|
|
|
playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
|
|
|
|
# if this is the ending clip, add an outpoint
|
|
|
|
if clip.end_time > end_ts:
|
|
|
|
playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
file_name = f"clip_{camera_name}_{start_ts}-{end_ts}.mp4"
|
2021-08-10 15:27:31 +02:00
|
|
|
path = f"/tmp/cache/{file_name}"
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2022-09-08 00:25:06 +02:00
|
|
|
if not os.path.exists(path):
|
|
|
|
ffmpeg_cmd = [
|
|
|
|
"ffmpeg",
|
|
|
|
"-y",
|
|
|
|
"-protocol_whitelist",
|
|
|
|
"pipe,file",
|
|
|
|
"-f",
|
|
|
|
"concat",
|
|
|
|
"-safe",
|
|
|
|
"0",
|
|
|
|
"-i",
|
|
|
|
"/dev/stdin",
|
|
|
|
"-c",
|
|
|
|
"copy",
|
|
|
|
"-movflags",
|
|
|
|
"+faststart",
|
|
|
|
path,
|
|
|
|
]
|
|
|
|
p = sp.run(
|
|
|
|
ffmpeg_cmd,
|
|
|
|
input="\n".join(playlist_lines),
|
|
|
|
encoding="ascii",
|
|
|
|
capture_output=True,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2022-09-08 00:25:06 +02:00
|
|
|
if p.returncode != 0:
|
|
|
|
logger.error(p.stderr)
|
|
|
|
return f"Could not create clip from recordings for {camera_name}.", 500
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
f"Ignoring subsequent request for {path} as it already exists in the cache."
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
2021-08-10 15:27:31 +02:00
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/vod/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
|
|
|
|
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
|
|
|
def vod_ts(camera_name, start_ts, end_ts):
|
2021-06-07 03:24:36 +02:00
|
|
|
recordings = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(
|
2021-07-11 06:22:45 +02:00
|
|
|
Recordings.start_time.between(start_ts, end_ts)
|
|
|
|
| Recordings.end_time.between(start_ts, end_ts)
|
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-06-07 03:24:36 +02:00
|
|
|
)
|
2022-08-25 14:32:30 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2021-06-07 03:24:36 +02:00
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
2021-05-18 07:52:08 +02:00
|
|
|
|
|
|
|
clips = []
|
|
|
|
durations = []
|
2022-12-18 00:53:34 +01:00
|
|
|
max_duration_ms = MAX_SEGMENT_DURATION * 1000
|
2021-05-18 07:52:08 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
recording: Recordings
|
|
|
|
for recording in recordings:
|
|
|
|
clip = {"type": "source", "path": recording.path}
|
|
|
|
duration = int(recording.duration * 1000)
|
2022-08-26 13:32:34 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
# Determine if we need to end the last clip early
|
|
|
|
if recording.end_time > end_ts:
|
|
|
|
duration -= int((recording.end_time - end_ts) * 1000)
|
2021-11-21 14:25:01 +01:00
|
|
|
|
2022-12-18 00:53:34 +01:00
|
|
|
if 0 < duration < max_duration_ms:
|
2022-08-26 13:32:34 +02:00
|
|
|
clip["keyFrameDurations"] = [duration]
|
2021-11-21 14:25:01 +01:00
|
|
|
clips.append(clip)
|
|
|
|
durations.append(duration)
|
|
|
|
else:
|
|
|
|
logger.warning(f"Recording clip is missing or empty: {recording.path}")
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
if not clips:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error("No recordings found for the requested time range")
|
2021-07-09 22:14:16 +02:00
|
|
|
return "No recordings found.", 404
|
|
|
|
|
|
|
|
hour_ago = datetime.now() - timedelta(hours=1)
|
2021-05-18 07:52:08 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
2021-07-09 22:14:16 +02:00
|
|
|
"cache": hour_ago.timestamp() > start_ts,
|
2021-05-18 07:52:08 +02:00
|
|
|
"discontinuity": False,
|
2022-12-18 00:53:34 +01:00
|
|
|
"consistentSequenceMediaInfo": True,
|
2021-05-18 07:52:08 +02:00
|
|
|
"durations": durations,
|
2022-12-18 00:53:34 +01:00
|
|
|
"segment_duration": max(durations),
|
2021-05-18 07:52:08 +02:00
|
|
|
"sequences": [{"clips": clips}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>")
|
2022-12-11 14:45:32 +01:00
|
|
|
def vod_hour_no_timezone(year_month, day, hour, camera_name):
|
|
|
|
return vod_hour(
|
2022-12-30 15:34:01 +01:00
|
|
|
year_month, day, hour, camera_name, get_localzone_name().replace("/", ",")
|
2022-12-11 14:45:32 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# TODO make this nicer when vod module is removed
|
|
|
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>/<tz_name>")
|
|
|
|
def vod_hour(year_month, day, hour, camera_name, tz_name):
|
|
|
|
parts = year_month.split("-")
|
2023-01-17 00:17:03 +01:00
|
|
|
start_date = (
|
|
|
|
datetime(int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc)
|
|
|
|
- datetime.now(pytz.timezone(tz_name.replace(",", "/"))).utcoffset()
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
|
|
|
start_ts = start_date.timestamp()
|
|
|
|
end_ts = end_date.timestamp()
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
return vod_ts(camera_name, start_ts, end_ts)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vod/event/<id>")
|
|
|
|
def vod_event(id):
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event not found: {id}")
|
2021-07-09 22:14:16 +02:00
|
|
|
return "Event not found.", 404
|
|
|
|
|
|
|
|
if not event.has_clip:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event does not have recordings: {id}")
|
|
|
|
return "Recordings not available", 404
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
|
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2021-10-23 23:18:13 +02:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
2022-02-06 16:49:01 +01:00
|
|
|
vod_response = vod_ts(event.camera, event.start_time, end_ts)
|
|
|
|
# If the recordings are not found, set has_clip to false
|
|
|
|
if (
|
|
|
|
type(vod_response) == tuple
|
|
|
|
and len(vod_response) == 2
|
|
|
|
and vod_response[1] == 404
|
|
|
|
):
|
|
|
|
Event.update(has_clip=False).where(Event.id == id).execute()
|
|
|
|
return vod_response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-09-11 15:34:27 +02:00
|
|
|
duration = int((event.end_time - event.start_time) * 1000)
|
2021-07-09 22:14:16 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"cache": True,
|
|
|
|
"discontinuity": False,
|
|
|
|
"durations": [duration],
|
|
|
|
"sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-19 15:22:31 +01:00
|
|
|
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
2020-11-01 22:37:51 +01:00
|
|
|
while True:
|
|
|
|
# max out at specified FPS
|
2021-06-14 14:31:13 +02:00
|
|
|
time.sleep(1 / fps)
|
2020-12-19 15:22:31 +01:00
|
|
|
frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
|
|
|
yield (
|
|
|
|
b"--frame\r\n"
|
|
|
|
b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
|
|
|
|
)
|
2022-11-13 19:48:14 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/ffprobe", methods=["GET"])
|
|
|
|
def ffprobe():
|
|
|
|
path_param = request.args.get("paths", "")
|
|
|
|
|
|
|
|
if not path_param:
|
|
|
|
return jsonify(
|
|
|
|
{"success": False, "message": f"Path needs to be provided."}, "404"
|
|
|
|
)
|
|
|
|
|
2022-12-09 02:02:11 +01:00
|
|
|
if path_param.startswith("camera"):
|
|
|
|
camera = path_param[7:]
|
|
|
|
|
|
|
|
if camera not in current_app.frigate_config.cameras.keys():
|
|
|
|
return jsonify(
|
|
|
|
{"success": False, "message": f"{camera} is not a valid camera."}, "404"
|
|
|
|
)
|
|
|
|
|
2022-12-15 00:20:47 +01:00
|
|
|
if not current_app.frigate_config.cameras[camera].enabled:
|
|
|
|
return jsonify(
|
|
|
|
{"success": False, "message": f"{camera} is not enabled."}, "404"
|
|
|
|
)
|
|
|
|
|
2022-12-09 02:02:11 +01:00
|
|
|
paths = map(
|
|
|
|
lambda input: input.path,
|
|
|
|
current_app.frigate_config.cameras[camera].ffmpeg.inputs,
|
|
|
|
)
|
|
|
|
elif "," in clean_camera_user_pass(path_param):
|
2022-11-13 19:48:14 +01:00
|
|
|
paths = path_param.split(",")
|
|
|
|
else:
|
|
|
|
paths = [path_param]
|
|
|
|
|
|
|
|
# user has multiple streams
|
|
|
|
output = []
|
|
|
|
|
|
|
|
for path in paths:
|
2022-12-09 02:02:11 +01:00
|
|
|
ffprobe = ffprobe_stream(path.strip())
|
2022-11-13 19:48:14 +01:00
|
|
|
output.append(
|
|
|
|
{
|
|
|
|
"return_code": ffprobe.returncode,
|
2023-02-26 14:05:27 +01:00
|
|
|
"stderr": ffprobe.stderr.decode("unicode_escape").strip()
|
|
|
|
if ffprobe.returncode != 0
|
|
|
|
else "",
|
2022-11-13 19:48:14 +01:00
|
|
|
"stdout": json.loads(ffprobe.stdout.decode("unicode_escape").strip())
|
2023-02-26 14:05:27 +01:00
|
|
|
if ffprobe.returncode == 0
|
|
|
|
else "",
|
2022-11-13 19:48:14 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
return jsonify(output)
|
2022-11-29 02:24:20 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vainfo", methods=["GET"])
|
|
|
|
def vainfo():
|
|
|
|
vainfo = vainfo_hwaccel()
|
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"return_code": vainfo.returncode,
|
|
|
|
"stderr": vainfo.stderr.decode("unicode_escape").strip()
|
2023-01-03 00:31:59 +01:00
|
|
|
if vainfo.returncode != 0
|
2022-11-29 02:24:20 +01:00
|
|
|
else "",
|
|
|
|
"stdout": vainfo.stdout.decode("unicode_escape").strip()
|
2023-01-03 00:31:59 +01:00
|
|
|
if vainfo.returncode == 0
|
2022-11-29 02:24:20 +01:00
|
|
|
else "",
|
|
|
|
}
|
|
|
|
)
|
2022-12-09 03:15:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/logs/<service>", methods=["GET"])
|
|
|
|
def logs(service: str):
|
|
|
|
log_locations = {
|
|
|
|
"frigate": "/dev/shm/logs/frigate/current",
|
|
|
|
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
|
|
|
"nginx": "/dev/shm/logs/nginx/current",
|
|
|
|
}
|
|
|
|
service_location = log_locations.get(service)
|
|
|
|
|
|
|
|
if not service:
|
|
|
|
return f"{service} is not a valid service", 404
|
|
|
|
|
|
|
|
try:
|
|
|
|
file = open(service_location, "r")
|
|
|
|
contents = file.read()
|
|
|
|
file.close()
|
|
|
|
return contents, 200
|
|
|
|
except FileNotFoundError as e:
|
|
|
|
return f"Could not find log file: {e}", 500
|