2020-11-25 03:36:56 +01:00
|
|
|
import base64
|
2021-11-09 02:05:39 +01:00
|
|
|
import copy
|
2023-05-19 12:16:11 +02:00
|
|
|
import glob
|
2022-11-13 19:48:14 +01:00
|
|
|
import json
|
2023-05-29 12:31:17 +02:00
|
|
|
import logging
|
2020-11-01 15:06:15 +01:00
|
|
|
import os
|
2021-07-09 22:14:16 +02:00
|
|
|
import subprocess as sp
|
2020-11-01 22:37:51 +01:00
|
|
|
import time
|
2022-12-07 14:36:56 +01:00
|
|
|
import traceback
|
2023-05-29 12:31:17 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2020-11-16 14:27:56 +01:00
|
|
|
from functools import reduce
|
2021-05-12 17:19:02 +02:00
|
|
|
from pathlib import Path
|
2022-08-25 14:32:30 +02:00
|
|
|
from urllib.parse import unquote
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
import cv2
|
|
|
|
import numpy as np
|
2023-05-29 12:31:17 +02:00
|
|
|
import pytz
|
2021-02-17 14:23:32 +01:00
|
|
|
from flask import (
|
|
|
|
Blueprint,
|
|
|
|
Flask,
|
|
|
|
Response,
|
|
|
|
current_app,
|
2023-10-07 05:20:30 +02:00
|
|
|
escape,
|
2021-02-17 14:23:32 +01:00
|
|
|
jsonify,
|
|
|
|
make_response,
|
|
|
|
request,
|
|
|
|
)
|
2023-07-06 20:50:22 +02:00
|
|
|
from peewee import DoesNotExist, fn, operator
|
2020-11-01 15:06:15 +01:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
2023-07-06 20:50:22 +02:00
|
|
|
from playhouse.sqliteq import SqliteQueueDatabase
|
2023-05-29 12:31:17 +02:00
|
|
|
from tzlocal import get_localzone_name
|
2023-10-07 16:12:48 +02:00
|
|
|
from werkzeug.utils import secure_filename
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
from frigate.config import FrigateConfig
|
2023-07-29 20:19:21 +02:00
|
|
|
from frigate.const import (
|
|
|
|
CACHE_DIR,
|
|
|
|
CLIPS_DIR,
|
|
|
|
CONFIG_DIR,
|
2023-09-21 12:20:57 +02:00
|
|
|
EXPORT_DIR,
|
2023-07-29 20:19:21 +02:00
|
|
|
MAX_SEGMENT_DURATION,
|
|
|
|
RECORD_DIR,
|
|
|
|
)
|
2023-05-19 12:16:11 +02:00
|
|
|
from frigate.events.external import ExternalEventProcessor
|
2023-05-29 12:31:17 +02:00
|
|
|
from frigate.models import Event, Recordings, Timeline
|
2022-11-13 19:48:14 +01:00
|
|
|
from frigate.object_processing import TrackedObject
|
2023-04-24 14:24:28 +02:00
|
|
|
from frigate.plus import PlusApi
|
2023-07-08 14:04:47 +02:00
|
|
|
from frigate.ptz.onvif import OnvifController
|
2023-06-08 13:32:35 +02:00
|
|
|
from frigate.record.export import PlaybackFactorEnum, RecordingExporter
|
2021-01-04 00:35:58 +01:00
|
|
|
from frigate.stats import stats_snapshot
|
2023-05-29 12:31:17 +02:00
|
|
|
from frigate.storage import StorageMaintainer
|
2023-07-06 20:54:26 +02:00
|
|
|
from frigate.util.builtin import (
|
|
|
|
clean_camera_user_pass,
|
|
|
|
get_tz_modifiers,
|
|
|
|
update_yaml_from_url,
|
|
|
|
)
|
2023-07-06 16:28:50 +02:00
|
|
|
from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel
|
2020-12-19 13:51:10 +01:00
|
|
|
from frigate.version import VERSION
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2020-11-25 17:37:41 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-09-27 13:09:38 +02:00
|
|
|
DEFAULT_TIME_RANGE = "00:00,24:00"
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
bp = Blueprint("frigate", __name__)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
def create_app(
|
|
|
|
frigate_config,
|
2023-07-06 20:50:22 +02:00
|
|
|
database: SqliteQueueDatabase,
|
2021-02-17 14:23:32 +01:00
|
|
|
stats_tracking,
|
|
|
|
detected_frames_processor,
|
2022-11-30 02:59:56 +01:00
|
|
|
storage_maintainer: StorageMaintainer,
|
2023-04-26 13:08:53 +02:00
|
|
|
onvif: OnvifController,
|
2023-05-19 12:16:11 +02:00
|
|
|
external_processor: ExternalEventProcessor,
|
2023-04-24 14:24:28 +02:00
|
|
|
plus_api: PlusApi,
|
2021-02-17 14:23:32 +01:00
|
|
|
):
|
2020-11-01 15:06:15 +01:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
2023-10-07 05:20:30 +02:00
|
|
|
@app.before_request
|
|
|
|
def check_csrf():
|
|
|
|
if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]:
|
|
|
|
pass
|
|
|
|
if "origin" in request.headers and "x-csrf-token" not in request.headers:
|
|
|
|
return jsonify({"success": False, "message": "Missing CSRF header"}), 401
|
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
@app.before_request
|
|
|
|
def _db_connect():
|
2021-06-27 01:41:26 +02:00
|
|
|
if database.is_closed():
|
|
|
|
database.connect()
|
2020-11-01 15:06:15 +01:00
|
|
|
|
|
|
|
@app.teardown_request
|
|
|
|
def _db_close(exc):
|
|
|
|
if not database.is_closed():
|
|
|
|
database.close()
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
app.frigate_config = frigate_config
|
2021-01-04 00:35:58 +01:00
|
|
|
app.stats_tracking = stats_tracking
|
2020-11-01 22:37:51 +01:00
|
|
|
app.detected_frames_processor = detected_frames_processor
|
2022-11-30 02:59:56 +01:00
|
|
|
app.storage_maintainer = storage_maintainer
|
2023-04-26 13:08:53 +02:00
|
|
|
app.onvif = onvif
|
2023-05-19 12:16:11 +02:00
|
|
|
app.external_processor = external_processor
|
2022-04-03 22:00:11 +02:00
|
|
|
app.plus_api = plus_api
|
2022-11-29 04:47:20 +01:00
|
|
|
app.camera_error_image = None
|
2023-01-07 02:31:25 +01:00
|
|
|
app.hwaccel_errors = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
app.register_blueprint(bp)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
return app
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/")
|
2020-11-01 15:06:15 +01:00
|
|
|
def is_healthy():
|
|
|
|
return "Frigate is running. Alive and healthy!"
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/summary")
|
2020-11-18 04:11:19 +01:00
|
|
|
def events_summary():
|
2022-12-11 14:45:32 +01:00
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
2023-01-17 00:17:03 +01:00
|
|
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
2021-02-17 14:23:32 +01:00
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2021-01-16 16:26:26 +01:00
|
|
|
|
|
|
|
clauses = []
|
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if has_clip is not None:
|
2021-01-16 16:26:26 +01:00
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if has_snapshot is not None:
|
2021-01-16 16:26:26 +01:00
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2021-01-16 16:26:26 +01:00
|
|
|
|
2020-11-18 04:11:19 +01:00
|
|
|
groups = (
|
2021-02-17 14:23:32 +01:00
|
|
|
Event.select(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
2023-03-05 18:39:41 +01:00
|
|
|
Event.sub_label,
|
2021-02-17 14:23:32 +01:00
|
|
|
fn.strftime(
|
2023-01-17 00:17:03 +01:00
|
|
|
"%Y-%m-%d",
|
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2021-02-17 14:23:32 +01:00
|
|
|
).alias("day"),
|
|
|
|
Event.zones,
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.group_by(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
2023-03-05 18:39:41 +01:00
|
|
|
Event.sub_label,
|
2021-02-17 14:23:32 +01:00
|
|
|
fn.strftime(
|
2023-01-17 00:17:03 +01:00
|
|
|
"%Y-%m-%d",
|
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2021-02-17 14:23:32 +01:00
|
|
|
),
|
|
|
|
Event.zones,
|
2020-11-18 04:11:19 +01:00
|
|
|
)
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
|
|
|
return jsonify([e for e in groups.dicts()])
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-12 17:19:02 +02:00
|
|
|
@bp.route("/events/<id>", methods=("GET",))
|
2020-11-24 22:44:59 +01:00
|
|
|
def event(id):
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
return model_to_dict(Event.get(Event.id == id))
|
|
|
|
except DoesNotExist:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("POST",))
|
|
|
|
def set_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = True
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
@bp.route("/events/<id>/plus", methods=("POST",))
|
|
|
|
def send_to_plus(id):
|
2022-05-11 13:54:46 +02:00
|
|
|
if not current_app.plus_api.is_active():
|
|
|
|
message = "PLUS_API_KEY environment variable is not set"
|
|
|
|
logger.error(message)
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
2022-05-11 13:54:46 +02:00
|
|
|
"message": message,
|
2022-04-10 15:09:41 +02:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-04-03 22:00:11 +02:00
|
|
|
)
|
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
include_annotation = (
|
|
|
|
request.json.get("include_annotation") if request.is_json else None
|
|
|
|
)
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2022-05-11 13:54:46 +02:00
|
|
|
message = f"Event {id} not found"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
# events from before the conversion to relative dimensions cant include annotations
|
2023-05-19 18:59:24 +02:00
|
|
|
if event.data.get("box") is None:
|
2023-04-24 14:24:28 +02:00
|
|
|
include_annotation = None
|
|
|
|
|
2023-02-19 14:38:16 +01:00
|
|
|
if event.end_time is None:
|
|
|
|
logger.error(f"Unable to load clean png for in-progress event: {event.id}")
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Unable to load clean png for in-progress event",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
if event.plus_id:
|
2022-05-11 13:54:46 +02:00
|
|
|
message = "Already submitted to plus"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
|
|
|
# load clean.png
|
|
|
|
try:
|
|
|
|
filename = f"{event.camera}-{event.id}-clean.png"
|
|
|
|
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
|
|
|
|
except Exception:
|
2022-04-16 15:43:49 +02:00
|
|
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Unable to load clean png for event"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2023-02-19 14:38:16 +01:00
|
|
|
if image is None or image.size == 0:
|
|
|
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Unable to load clean png for event"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
try:
|
|
|
|
plus_id = current_app.plus_api.upload_image(image, event.camera)
|
|
|
|
except Exception as ex:
|
2022-05-11 13:54:46 +02:00
|
|
|
logger.exception(ex)
|
2022-04-03 22:00:11 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": str(ex)}),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
# store image id in the database
|
|
|
|
event.plus_id = plus_id
|
|
|
|
event.save()
|
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if include_annotation is not None:
|
2023-05-19 18:59:24 +02:00
|
|
|
box = event.data["box"]
|
2023-04-24 14:24:28 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
current_app.plus_api.add_annotation(
|
|
|
|
event.plus_id,
|
|
|
|
box,
|
|
|
|
event.label,
|
|
|
|
)
|
|
|
|
except Exception as ex:
|
|
|
|
logger.exception(ex)
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": str(ex)}),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
|
|
|
|
2023-04-24 14:24:28 +02:00
|
|
|
@bp.route("/events/<id>/false_positive", methods=("PUT",))
|
|
|
|
def false_positive(id):
|
|
|
|
if not current_app.plus_api.is_active():
|
|
|
|
message = "PLUS_API_KEY environment variable is not set"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": message,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
message = f"Event {id} not found"
|
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
|
|
|
|
|
|
|
# events from before the conversion to relative dimensions cant include annotations
|
2023-05-19 21:26:19 +02:00
|
|
|
if event.data.get("box") is None:
|
2023-05-29 12:31:17 +02:00
|
|
|
message = "Events prior to 0.13 cannot be submitted as false positives"
|
2023-04-24 14:24:28 +02:00
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
|
|
|
|
|
|
|
if event.false_positive:
|
2023-05-29 12:31:17 +02:00
|
|
|
message = "False positive already submitted to Frigate+"
|
2023-04-24 14:24:28 +02:00
|
|
|
logger.error(message)
|
|
|
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
|
|
|
|
|
|
|
if not event.plus_id:
|
|
|
|
plus_response = send_to_plus(id)
|
|
|
|
if plus_response.status_code != 200:
|
|
|
|
return plus_response
|
|
|
|
# need to refetch the event now that it has a plus_id
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
|
2023-05-19 18:59:24 +02:00
|
|
|
region = event.data["region"]
|
|
|
|
box = event.data["box"]
|
2023-04-24 14:24:28 +02:00
|
|
|
|
|
|
|
# provide top score if score is unavailable
|
2023-05-19 18:59:24 +02:00
|
|
|
score = (
|
|
|
|
(event.data["top_score"] if event.data["top_score"] else event.top_score)
|
|
|
|
if event.data["score"] is None
|
|
|
|
else event.data["score"]
|
|
|
|
)
|
2023-04-24 14:24:28 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
current_app.plus_api.add_false_positive(
|
|
|
|
event.plus_id,
|
|
|
|
region,
|
|
|
|
box,
|
|
|
|
score,
|
|
|
|
event.label,
|
|
|
|
event.model_hash,
|
|
|
|
event.model_type,
|
|
|
|
event.detector_type,
|
|
|
|
)
|
|
|
|
except Exception as ex:
|
|
|
|
logger.exception(ex)
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": str(ex)}),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
event.false_positive = True
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(jsonify({"success": True, "plus_id": event.plus_id}), 200)
|
|
|
|
|
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("DELETE",))
|
|
|
|
def delete_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = False
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
@bp.route("/events/<id>/sub_label", methods=("POST",))
|
|
|
|
def set_sub_label(id):
|
|
|
|
try:
|
2022-10-01 16:00:56 +02:00
|
|
|
event: Event = Event.get(Event.id == id)
|
2022-03-17 13:18:43 +01:00
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
|
|
|
)
|
|
|
|
|
2023-07-14 13:28:16 +02:00
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
new_sub_label = json.get("subLabel")
|
|
|
|
new_score = json.get("subLabelScore")
|
2022-03-17 13:18:43 +01:00
|
|
|
|
2023-05-05 00:59:44 +02:00
|
|
|
if new_sub_label and len(new_sub_label) > 100:
|
2022-03-17 13:18:43 +01:00
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": new_sub_label
|
2023-05-05 00:59:44 +02:00
|
|
|
+ " exceeds the 100 character limit for sub_label",
|
2022-04-10 15:09:41 +02:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
|
|
|
|
2023-07-14 13:28:16 +02:00
|
|
|
if new_score is not None and (new_score > 1.0 or new_score < 0):
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": new_score
|
|
|
|
+ " does not fit within the expected bounds 0 <= score <= 1.0",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2022-10-01 16:00:56 +02:00
|
|
|
if not event.end_time:
|
|
|
|
tracked_obj: TrackedObject = (
|
2022-10-05 01:49:37 +02:00
|
|
|
current_app.detected_frames_processor.camera_states[
|
|
|
|
event.camera
|
|
|
|
].tracked_objects.get(event.id)
|
2022-10-01 16:00:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if tracked_obj:
|
2023-07-14 13:28:16 +02:00
|
|
|
tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score)
|
2022-10-01 16:00:56 +02:00
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
event.sub_label = new_sub_label
|
2023-07-14 13:28:16 +02:00
|
|
|
|
|
|
|
if new_score:
|
|
|
|
data = event.data
|
|
|
|
data["sub_label_score"] = new_score
|
|
|
|
event.data = data
|
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
event.save()
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Event " + id + " sub label set to " + new_sub_label,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
2022-02-22 05:03:01 +01:00
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2023-06-28 12:51:53 +02:00
|
|
|
@bp.route("/labels")
|
|
|
|
def get_labels():
|
|
|
|
camera = request.args.get("camera", type=str, default="")
|
|
|
|
|
|
|
|
try:
|
|
|
|
if camera:
|
|
|
|
events = Event.select(Event.label).where(Event.camera == camera).distinct()
|
|
|
|
else:
|
|
|
|
events = Event.select(Event.label).distinct()
|
|
|
|
except Exception as e:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"Failed to get labels: {e}"}), 404
|
2023-06-28 12:51:53 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
labels = sorted([e.label for e in events])
|
|
|
|
return jsonify(labels)
|
|
|
|
|
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
@bp.route("/sub_labels")
|
|
|
|
def get_sub_labels():
|
2022-12-12 13:30:34 +01:00
|
|
|
split_joined = request.args.get("split_joined", type=int)
|
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
try:
|
|
|
|
events = Event.select(Event.sub_label).distinct()
|
|
|
|
except Exception as e:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"Failed to get sub_labels: {e}"}),
|
|
|
|
404,
|
2022-05-29 16:47:43 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
sub_labels = [e.sub_label for e in events]
|
2022-06-30 14:53:46 +02:00
|
|
|
|
|
|
|
if None in sub_labels:
|
|
|
|
sub_labels.remove(None)
|
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
if split_joined:
|
2023-03-04 00:43:27 +01:00
|
|
|
original_labels = sub_labels.copy()
|
|
|
|
|
|
|
|
for label in original_labels:
|
2022-12-12 13:30:34 +01:00
|
|
|
if "," in label:
|
|
|
|
sub_labels.remove(label)
|
|
|
|
parts = label.split(",")
|
|
|
|
|
|
|
|
for part in parts:
|
2023-05-29 12:31:17 +02:00
|
|
|
if part.strip() not in sub_labels:
|
2022-12-12 13:30:34 +01:00
|
|
|
sub_labels.append(part.strip())
|
|
|
|
|
2023-03-04 00:43:27 +01:00
|
|
|
sub_labels.sort()
|
2022-05-29 16:47:43 +02:00
|
|
|
return jsonify(sub_labels)
|
|
|
|
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>", methods=("DELETE",))
|
2021-05-12 17:19:02 +02:00
|
|
|
def delete_event(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
|
|
|
if event.has_snapshot:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
|
|
media.unlink(missing_ok=True)
|
2022-02-02 14:29:01 +01:00
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
|
|
media.unlink(missing_ok=True)
|
2021-05-12 17:19:02 +02:00
|
|
|
if event.has_clip:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
|
|
media.unlink(missing_ok=True)
|
|
|
|
|
|
|
|
event.delete_instance()
|
2023-10-17 13:37:07 +02:00
|
|
|
Timeline.delete().where(Timeline.source_id == id).execute()
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>/thumbnail.jpg")
|
2022-04-27 02:42:07 +02:00
|
|
|
def event_thumbnail(id, max_cache_age=2592000):
|
2021-02-17 14:23:32 +01:00
|
|
|
format = request.args.get("format", "ios")
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = None
|
2022-02-26 20:11:00 +01:00
|
|
|
event_complete = False
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
2023-05-29 12:31:17 +02:00
|
|
|
if event.end_time is not None:
|
2022-02-26 20:11:00 +01:00
|
|
|
event_complete = True
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
2020-11-25 03:36:56 +01:00
|
|
|
except DoesNotExist:
|
2020-11-25 17:37:41 +01:00
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2020-11-25 17:37:41 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
2023-05-29 12:31:17 +02:00
|
|
|
if tracked_obj is not None:
|
2021-01-22 00:28:49 +01:00
|
|
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
2023-05-29 12:31:17 +02:00
|
|
|
except Exception:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
if thumbnail_bytes is None:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
# android notifications prefer a 2:1 ratio
|
2021-02-17 14:23:32 +01:00
|
|
|
if format == "android":
|
2020-12-02 02:28:26 +01:00
|
|
|
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
|
|
|
|
img = cv2.imdecode(jpg_as_np, flags=1)
|
2021-02-17 14:23:32 +01:00
|
|
|
thumbnail = cv2.copyMakeBorder(
|
|
|
|
img,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
cv2.BORDER_CONSTANT,
|
|
|
|
(0, 0, 0),
|
|
|
|
)
|
|
|
|
ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = jpg.tobytes()
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
response = make_response(thumbnail_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-02-26 20:11:00 +01:00
|
|
|
if event_complete:
|
2022-04-27 02:42:07 +02:00
|
|
|
response.headers["Cache-Control"] = f"private, max-age={max_cache_age}"
|
2022-04-25 15:00:01 +02:00
|
|
|
else:
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2020-12-02 02:28:26 +01:00
|
|
|
return response
|
2020-11-24 22:44:59 +01:00
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
@bp.route("/timeline")
|
|
|
|
def timeline():
|
|
|
|
camera = request.args.get("camera", "all")
|
|
|
|
source_id = request.args.get("source_id", type=str)
|
|
|
|
limit = request.args.get("limit", 100)
|
|
|
|
|
|
|
|
clauses = []
|
|
|
|
|
|
|
|
selected_columns = [
|
|
|
|
Timeline.timestamp,
|
|
|
|
Timeline.camera,
|
|
|
|
Timeline.source,
|
|
|
|
Timeline.source_id,
|
|
|
|
Timeline.class_type,
|
|
|
|
Timeline.data,
|
|
|
|
]
|
|
|
|
|
|
|
|
if camera != "all":
|
|
|
|
clauses.append((Timeline.camera == camera))
|
|
|
|
|
|
|
|
if source_id:
|
|
|
|
clauses.append((Timeline.source_id == source_id))
|
|
|
|
|
|
|
|
if len(clauses) == 0:
|
|
|
|
clauses.append((True))
|
|
|
|
|
|
|
|
timeline = (
|
|
|
|
Timeline.select(*selected_columns)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Timeline.timestamp.asc())
|
|
|
|
.limit(limit)
|
2023-09-12 00:07:04 +02:00
|
|
|
.dicts()
|
2023-04-23 17:45:19 +02:00
|
|
|
)
|
|
|
|
|
2023-09-12 00:07:04 +02:00
|
|
|
return jsonify([t for t in timeline])
|
2023-04-23 17:45:19 +02:00
|
|
|
|
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/best.jpg")
|
|
|
|
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
|
|
|
|
def label_thumbnail(camera_name, label):
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(label)
|
2023-07-14 13:30:47 +02:00
|
|
|
event_query = Event.select(fn.MAX(Event.id)).where(Event.camera == camera_name)
|
|
|
|
if label != "any":
|
|
|
|
event_query = event_query.where(Event.label == label)
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
try:
|
2023-07-14 13:30:47 +02:00
|
|
|
event = event_query.scalar()
|
2022-03-11 14:56:39 +01:00
|
|
|
|
2023-07-14 13:30:47 +02:00
|
|
|
return event_thumbnail(event, 60)
|
2022-03-11 14:56:39 +01:00
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((175, 175, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2022-03-11 14:56:39 +01:00
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/<id>/snapshot.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_snapshot(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
2022-04-25 15:00:01 +02:00
|
|
|
event_complete = False
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = None
|
|
|
|
try:
|
2023-06-11 14:18:47 +02:00
|
|
|
event = Event.get(Event.id == id, Event.end_time != None)
|
2022-04-25 15:00:01 +02:00
|
|
|
event_complete = True
|
2021-01-22 01:49:20 +01:00
|
|
|
if not event.has_snapshot:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Snapshot not available"}), 404
|
|
|
|
)
|
2021-01-22 01:49:20 +01:00
|
|
|
# read snapshot from disk
|
2021-02-17 14:23:32 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
|
|
|
|
) as image_file:
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = image_file.read()
|
|
|
|
except DoesNotExist:
|
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2021-01-22 01:49:20 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
2023-05-29 12:31:17 +02:00
|
|
|
if tracked_obj is not None:
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
2021-02-17 14:23:32 +01:00
|
|
|
timestamp=request.args.get("timestamp", type=int),
|
|
|
|
bounding_box=request.args.get("bbox", type=int),
|
|
|
|
crop=request.args.get("crop", type=int),
|
|
|
|
height=request.args.get("h", type=int),
|
2021-07-02 14:47:03 +02:00
|
|
|
quality=request.args.get("quality", default=70, type=int),
|
2021-01-22 01:49:20 +01:00
|
|
|
)
|
2023-05-29 12:31:17 +02:00
|
|
|
except Exception:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2023-05-29 12:31:17 +02:00
|
|
|
except Exception:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2021-01-22 01:49:20 +01:00
|
|
|
|
2021-06-27 01:42:09 +02:00
|
|
|
if jpg_bytes is None:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2021-06-27 01:42:09 +02:00
|
|
|
|
2021-01-22 01:49:20 +01:00
|
|
|
response = make_response(jpg_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
if event_complete:
|
|
|
|
response.headers["Cache-Control"] = "private, max-age=31536000"
|
|
|
|
else:
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
|
|
|
response.headers[
|
|
|
|
"Content-Disposition"
|
|
|
|
] = f"attachment; filename=snapshot-{id}.jpg"
|
2021-01-22 01:49:20 +01:00
|
|
|
return response
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/snapshot.jpg")
|
|
|
|
def label_snapshot(camera_name, label):
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(label)
|
2022-03-11 14:56:39 +01:00
|
|
|
if label == "any":
|
|
|
|
event_query = (
|
2023-09-12 00:07:04 +02:00
|
|
|
Event.select(Event.id)
|
2022-03-11 14:56:39 +01:00
|
|
|
.where(Event.camera == camera_name)
|
2023-06-11 14:18:47 +02:00
|
|
|
.where(Event.has_snapshot == True)
|
2022-03-11 14:56:39 +01:00
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
event_query = (
|
2023-09-12 00:07:04 +02:00
|
|
|
Event.select(Event.id)
|
2022-03-11 14:56:39 +01:00
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.label == label)
|
2023-06-11 14:18:47 +02:00
|
|
|
.where(Event.has_snapshot == True)
|
2022-03-11 14:56:39 +01:00
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = event_query.get()
|
|
|
|
return event_snapshot(event.id)
|
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((720, 1280, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
@bp.route("/events/<id>/clip.mp4")
|
|
|
|
def event_clip(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event not found"}), 404
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
if not event.has_clip:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Clip not available"}), 404
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
file_name = f"{event.camera}-{id}.mp4"
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, file_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2022-02-09 22:41:38 +01:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
|
|
|
return recording_clip(event.camera, event.start_time, end_ts)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
|
|
|
if download:
|
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(clip_path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/clips/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
|
|
|
return response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@bp.route("/events")
|
2020-11-01 15:06:15 +01:00
|
|
|
def events():
|
2022-02-26 20:11:00 +01:00
|
|
|
camera = request.args.get("camera", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
cameras = request.args.get("cameras", "all")
|
|
|
|
|
|
|
|
# handle old camera arg
|
|
|
|
if cameras == "all" and camera != "all":
|
|
|
|
cameras = camera
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
label = unquote(request.args.get("label", "all"))
|
2022-12-12 13:30:34 +01:00
|
|
|
labels = request.args.get("labels", "all")
|
|
|
|
|
|
|
|
# handle old label arg
|
|
|
|
if labels == "all" and label != "all":
|
|
|
|
labels = label
|
|
|
|
|
2022-05-29 16:47:43 +02:00
|
|
|
sub_label = request.args.get("sub_label", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
sub_labels = request.args.get("sub_labels", "all")
|
|
|
|
|
|
|
|
# handle old sub_label arg
|
|
|
|
if sub_labels == "all" and sub_label != "all":
|
|
|
|
sub_labels = sub_label
|
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
zone = request.args.get("zone", "all")
|
2022-12-12 13:30:34 +01:00
|
|
|
zones = request.args.get("zones", "all")
|
|
|
|
|
|
|
|
# handle old label arg
|
|
|
|
if zones == "all" and zone != "all":
|
|
|
|
zones = zone
|
|
|
|
|
|
|
|
limit = request.args.get("limit", 100)
|
2021-02-17 14:23:32 +01:00
|
|
|
after = request.args.get("after", type=float)
|
|
|
|
before = request.args.get("before", type=float)
|
2023-09-27 13:09:38 +02:00
|
|
|
time_range = request.args.get("time_range", DEFAULT_TIME_RANGE)
|
2021-02-17 14:23:32 +01:00
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2023-01-12 00:22:39 +01:00
|
|
|
in_progress = request.args.get("in_progress", type=int)
|
2021-02-17 14:23:32 +01:00
|
|
|
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
2022-12-31 15:56:30 +01:00
|
|
|
favorites = request.args.get("favorites", type=int)
|
2023-10-07 16:22:14 +02:00
|
|
|
min_score = request.args.get("min_score", type=float)
|
|
|
|
max_score = request.args.get("max_score", type=float)
|
|
|
|
is_submitted = request.args.get("is_submitted", type=int)
|
2023-10-09 15:52:26 +02:00
|
|
|
min_length = request.args.get("min_length", type=float)
|
|
|
|
max_length = request.args.get("max_length", type=float)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
|
|
|
clauses = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-04-30 15:38:54 +02:00
|
|
|
selected_columns = [
|
|
|
|
Event.id,
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
Event.zones,
|
|
|
|
Event.start_time,
|
|
|
|
Event.end_time,
|
|
|
|
Event.has_clip,
|
|
|
|
Event.has_snapshot,
|
|
|
|
Event.plus_id,
|
|
|
|
Event.retain_indefinitely,
|
|
|
|
Event.sub_label,
|
|
|
|
Event.top_score,
|
2023-04-24 14:24:28 +02:00
|
|
|
Event.false_positive,
|
|
|
|
Event.box,
|
2023-05-19 18:59:24 +02:00
|
|
|
Event.data,
|
2022-04-30 15:38:54 +02:00
|
|
|
]
|
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
if camera != "all":
|
2020-11-16 14:27:56 +01:00
|
|
|
clauses.append((Event.camera == camera))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((Event.camera << camera_list))
|
|
|
|
|
|
|
|
if labels != "all":
|
|
|
|
label_list = labels.split(",")
|
|
|
|
clauses.append((Event.label << label_list))
|
|
|
|
|
|
|
|
if sub_labels != "all":
|
|
|
|
# use matching so joined sub labels are included
|
|
|
|
# for example a sub label 'bob' would get events
|
|
|
|
# with sub labels 'bob' and 'bob, john'
|
|
|
|
sub_label_clauses = []
|
2023-01-10 02:44:26 +01:00
|
|
|
filtered_sub_labels = sub_labels.split(",")
|
2022-12-12 13:30:34 +01:00
|
|
|
|
2023-01-10 02:44:26 +01:00
|
|
|
if "None" in filtered_sub_labels:
|
|
|
|
filtered_sub_labels.remove("None")
|
|
|
|
sub_label_clauses.append((Event.sub_label.is_null()))
|
|
|
|
|
|
|
|
for label in filtered_sub_labels:
|
2023-03-04 00:43:27 +01:00
|
|
|
sub_label_clauses.append(
|
|
|
|
(Event.sub_label.cast("text") == label)
|
|
|
|
) # include exact matches
|
|
|
|
|
|
|
|
# include this label when part of a list
|
|
|
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
|
|
|
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
|
2022-12-12 13:30:34 +01:00
|
|
|
|
|
|
|
sub_label_clause = reduce(operator.or_, sub_label_clauses)
|
|
|
|
clauses.append((sub_label_clause))
|
|
|
|
|
|
|
|
if zones != "all":
|
|
|
|
# use matching so events with multiple zones
|
|
|
|
# still match on a search where any zone matches
|
|
|
|
zone_clauses = []
|
2023-01-10 02:44:26 +01:00
|
|
|
filtered_zones = zones.split(",")
|
|
|
|
|
|
|
|
if "None" in filtered_zones:
|
|
|
|
filtered_zones.remove("None")
|
|
|
|
zone_clauses.append((Event.zones.length() == 0))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2023-01-10 02:44:26 +01:00
|
|
|
for zone in filtered_zones:
|
2022-12-12 13:30:34 +01:00
|
|
|
zone_clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
|
2022-05-29 16:47:43 +02:00
|
|
|
|
2022-12-12 13:30:34 +01:00
|
|
|
zone_clause = reduce(operator.or_, zone_clauses)
|
|
|
|
clauses.append((zone_clause))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if after:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time > after))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if before:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time < before))
|
2023-09-27 13:09:38 +02:00
|
|
|
|
|
|
|
if time_range != DEFAULT_TIME_RANGE:
|
|
|
|
# get timezone arg to ensure browser times are used
|
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
|
|
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
|
|
|
|
|
|
|
times = time_range.split(",")
|
|
|
|
time_after = times[0]
|
|
|
|
time_before = times[1]
|
|
|
|
|
|
|
|
start_hour_fun = fn.strftime(
|
|
|
|
"%H:%M",
|
|
|
|
fn.datetime(Event.start_time, "unixepoch", hour_modifier, minute_modifier),
|
|
|
|
)
|
|
|
|
|
|
|
|
# cases where user wants events overnight, ex: from 20:00 to 06:00
|
|
|
|
# should use or operator
|
|
|
|
if time_after > time_before:
|
|
|
|
clauses.append(
|
|
|
|
(
|
|
|
|
reduce(
|
|
|
|
operator.or_,
|
|
|
|
[(start_hour_fun > time_after), (start_hour_fun < time_before)],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# all other cases should be and operator
|
|
|
|
else:
|
|
|
|
clauses.append((start_hour_fun > time_after))
|
|
|
|
clauses.append((start_hour_fun < time_before))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if has_clip is not None:
|
2020-12-24 15:09:15 +01:00
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if has_snapshot is not None:
|
2020-12-24 15:09:15 +01:00
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if in_progress is not None:
|
2023-01-12 00:22:39 +01:00
|
|
|
clauses.append((Event.end_time.is_null(in_progress)))
|
|
|
|
|
2023-09-12 00:07:04 +02:00
|
|
|
if include_thumbnails:
|
2022-04-30 15:38:54 +02:00
|
|
|
selected_columns.append(Event.thumbnail)
|
2021-01-30 14:19:52 +01:00
|
|
|
|
2022-12-31 15:56:30 +01:00
|
|
|
if favorites:
|
|
|
|
clauses.append((Event.retain_indefinitely == favorites))
|
|
|
|
|
2023-10-07 16:22:14 +02:00
|
|
|
if max_score is not None:
|
|
|
|
clauses.append((Event.data["score"] <= max_score))
|
|
|
|
|
|
|
|
if min_score is not None:
|
|
|
|
clauses.append((Event.data["score"] >= min_score))
|
|
|
|
|
2023-10-09 15:52:26 +02:00
|
|
|
if min_length is not None:
|
|
|
|
clauses.append(((Event.end_time - Event.start_time) >= min_length))
|
|
|
|
|
|
|
|
if max_length is not None:
|
|
|
|
clauses.append(((Event.end_time - Event.start_time) <= max_length))
|
|
|
|
|
2023-10-07 16:22:14 +02:00
|
|
|
if is_submitted is not None:
|
|
|
|
if is_submitted == 0:
|
|
|
|
clauses.append((Event.plus_id.is_null()))
|
|
|
|
else:
|
|
|
|
clauses.append((Event.plus_id != ""))
|
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
events = (
|
2022-04-30 15:38:54 +02:00
|
|
|
Event.select(*selected_columns)
|
2021-02-17 14:23:32 +01:00
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.limit(limit)
|
2023-09-12 00:07:04 +02:00
|
|
|
.dicts()
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2023-09-12 00:07:04 +02:00
|
|
|
return jsonify([e for e in events])
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
|
|
|
def create_event(camera_name, label):
|
|
|
|
if not camera_name or not current_app.frigate_config.cameras.get(camera_name):
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": f"{camera_name} is not a valid camera."}
|
|
|
|
),
|
|
|
|
404,
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if not label:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"{label} must be set."}), 404
|
|
|
|
)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
|
|
|
|
try:
|
|
|
|
frame = current_app.detected_frames_processor.get_current_frame(camera_name)
|
|
|
|
|
|
|
|
event_id = current_app.external_processor.create_manual_event(
|
|
|
|
camera_name,
|
|
|
|
label,
|
2023-07-03 16:48:00 +02:00
|
|
|
json.get("source_type", "api"),
|
2023-05-19 12:16:11 +02:00
|
|
|
json.get("sub_label", None),
|
2023-07-17 13:07:15 +02:00
|
|
|
json.get("score", 0),
|
2023-05-19 12:16:11 +02:00
|
|
|
json.get("duration", 30),
|
|
|
|
json.get("include_recording", True),
|
|
|
|
json.get("draw", {}),
|
|
|
|
frame,
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"An unknown error occurred: {e}"}),
|
2023-07-14 13:29:11 +02:00
|
|
|
500,
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
|
|
|
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Successfully created event.",
|
|
|
|
"event_id": event_id,
|
|
|
|
}
|
|
|
|
),
|
2023-05-19 12:16:11 +02:00
|
|
|
200,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/events/<event_id>/end", methods=["PUT"])
|
|
|
|
def end_event(event_id):
|
2023-07-01 15:18:33 +02:00
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
try:
|
2023-07-01 15:18:33 +02:00
|
|
|
end_time = json.get("end_time", datetime.now().timestamp())
|
|
|
|
current_app.external_processor.finish_manual_event(event_id, end_time)
|
2023-05-29 12:31:17 +02:00
|
|
|
except Exception:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": f"{event_id} must be set and valid."}
|
|
|
|
),
|
|
|
|
404,
|
2023-05-19 12:16:11 +02:00
|
|
|
)
|
|
|
|
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": True, "message": "Event successfully ended."}), 200
|
|
|
|
)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@bp.route("/config")
|
2020-11-18 04:11:19 +01:00
|
|
|
def config():
|
2021-07-10 03:22:11 +02:00
|
|
|
config = current_app.frigate_config.dict()
|
|
|
|
|
|
|
|
for camera_name, camera in current_app.frigate_config.cameras.items():
|
|
|
|
camera_dict = config["cameras"][camera_name]
|
2022-11-27 02:18:33 +01:00
|
|
|
|
|
|
|
# clean paths
|
|
|
|
for input in camera_dict.get("ffmpeg", {}).get("inputs", []):
|
|
|
|
input["path"] = clean_camera_user_pass(input["path"])
|
|
|
|
|
|
|
|
# add clean ffmpeg_cmds
|
2021-11-09 02:05:39 +01:00
|
|
|
camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
|
2021-07-10 03:22:11 +02:00
|
|
|
for cmd in camera_dict["ffmpeg_cmds"]:
|
2022-11-02 13:00:54 +01:00
|
|
|
cmd["cmd"] = clean_camera_user_pass(" ".join(cmd["cmd"]))
|
2021-07-10 03:22:11 +02:00
|
|
|
|
2022-07-22 14:56:28 +02:00
|
|
|
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2023-05-19 18:59:24 +02:00
|
|
|
for detector, detector_config in config["detectors"].items():
|
|
|
|
detector_config["model"][
|
|
|
|
"labelmap"
|
|
|
|
] = current_app.frigate_config.model.merged_labelmap
|
|
|
|
|
2021-07-10 03:22:11 +02:00
|
|
|
return jsonify(config)
|
2021-06-24 07:45:27 +02:00
|
|
|
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
@bp.route("/config/raw")
|
|
|
|
def config_raw():
|
|
|
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
|
|
|
if not os.path.isfile(config_file):
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Could not find file"}), 404
|
|
|
|
)
|
2022-12-07 14:36:56 +01:00
|
|
|
|
|
|
|
with open(config_file, "r") as f:
|
|
|
|
raw_config = f.read()
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
return raw_config, 200
|
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/config/save", methods=["POST"])
|
|
|
|
def config_save():
|
2023-01-15 18:25:49 +01:00
|
|
|
save_option = request.args.get("save_option")
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
new_config = request.get_data().decode()
|
|
|
|
|
|
|
|
if not new_config:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Config with body param is required"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
2022-12-07 14:36:56 +01:00
|
|
|
|
|
|
|
# Validate the config schema
|
|
|
|
try:
|
2023-05-29 12:31:17 +02:00
|
|
|
FrigateConfig.parse_raw(new_config)
|
|
|
|
except Exception:
|
2022-12-07 14:36:56 +01:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
2023-10-07 05:20:30 +02:00
|
|
|
"message": f"\nConfig Error:\n\n{escape(str(traceback.format_exc()))}",
|
2022-12-07 14:36:56 +01:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Save the config to file
|
|
|
|
try:
|
|
|
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
|
|
|
with open(config_file, "w") as f:
|
|
|
|
f.write(new_config)
|
|
|
|
f.close()
|
2023-05-29 12:31:17 +02:00
|
|
|
except Exception:
|
2022-12-07 14:36:56 +01:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
2023-05-29 12:31:17 +02:00
|
|
|
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
2022-12-07 14:36:56 +01:00
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2023-01-15 18:25:49 +01:00
|
|
|
if save_option == "restart":
|
|
|
|
try:
|
|
|
|
restart_frigate()
|
|
|
|
except Exception as e:
|
|
|
|
logging.error(f"Error restarting Frigate: {e}")
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Config successfully saved, unable to restart Frigate",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
|
|
|
)
|
2022-12-07 14:36:56 +01:00
|
|
|
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Config successfully saved, restarting (this can take up to one minute)...",
|
|
|
|
}
|
|
|
|
),
|
2023-01-18 14:53:53 +01:00
|
|
|
200,
|
|
|
|
)
|
2023-01-15 18:25:49 +01:00
|
|
|
else:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": True, "message": "Config successfully saved."}),
|
|
|
|
200,
|
|
|
|
)
|
2022-12-07 14:36:56 +01:00
|
|
|
|
|
|
|
|
2023-07-06 20:54:26 +02:00
|
|
|
@bp.route("/config/set", methods=["PUT"])
|
|
|
|
def config_set():
|
|
|
|
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
|
|
|
with open(config_file, "r") as f:
|
|
|
|
old_raw_config = f.read()
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
update_yaml_from_url(config_file, request.url)
|
|
|
|
with open(config_file, "r") as f:
|
|
|
|
new_raw_config = f.read()
|
|
|
|
f.close()
|
|
|
|
# Validate the config schema
|
|
|
|
try:
|
|
|
|
FrigateConfig.parse_raw(new_raw_config)
|
|
|
|
except Exception:
|
|
|
|
with open(config_file, "w") as f:
|
|
|
|
f.write(old_raw_config)
|
|
|
|
f.close()
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": f"\nConfig Error:\n\n{str(traceback.format_exc())}",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logging.error(f"Error updating config: {e}")
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Error updating config"}),
|
|
|
|
500,
|
|
|
|
)
|
2023-07-06 20:54:26 +02:00
|
|
|
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Config successfully updated, restart to apply",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
|
|
|
)
|
2023-07-06 20:54:26 +02:00
|
|
|
|
|
|
|
|
2022-12-07 14:36:56 +01:00
|
|
|
@bp.route("/config/schema.json")
|
2021-06-24 07:45:27 +02:00
|
|
|
def config_schema():
|
|
|
|
return current_app.response_class(
|
|
|
|
current_app.frigate_config.schema_json(), mimetype="application/json"
|
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/version")
|
2020-12-19 13:51:10 +01:00
|
|
|
def version():
|
|
|
|
return VERSION
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/stats")
|
2020-11-01 22:37:51 +01:00
|
|
|
def stats():
|
2023-01-07 02:31:25 +01:00
|
|
|
stats = stats_snapshot(
|
|
|
|
current_app.frigate_config,
|
|
|
|
current_app.stats_tracking,
|
|
|
|
current_app.hwaccel_errors,
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
return jsonify(stats)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>")
|
2020-11-01 22:37:51 +01:00
|
|
|
def mjpeg_feed(camera_name):
|
2021-02-17 14:23:32 +01:00
|
|
|
fps = int(request.args.get("fps", "3"))
|
|
|
|
height = int(request.args.get("h", "360"))
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
# return a multipart response
|
2021-02-17 14:23:32 +01:00
|
|
|
return Response(
|
|
|
|
imagestream(
|
|
|
|
current_app.detected_frames_processor,
|
|
|
|
camera_name,
|
|
|
|
fps,
|
|
|
|
height,
|
|
|
|
draw_options,
|
|
|
|
),
|
|
|
|
mimetype="multipart/x-mixed-replace; boundary=frame",
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
else:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Camera not found"}),
|
|
|
|
404,
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-04-26 13:08:53 +02:00
|
|
|
@bp.route("/<camera_name>/ptz/info")
|
|
|
|
def camera_ptz_info(camera_name):
|
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
|
|
|
return jsonify(current_app.onvif.get_camera_info(camera_name))
|
|
|
|
else:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Camera not found"}),
|
|
|
|
404,
|
|
|
|
)
|
2023-04-26 13:08:53 +02:00
|
|
|
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@bp.route("/<camera_name>/latest.jpg")
|
2020-11-01 22:37:51 +01:00
|
|
|
def latest_frame(camera_name):
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2021-07-02 14:47:03 +02:00
|
|
|
resize_quality = request.args.get("quality", default=70, type=int)
|
|
|
|
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = current_app.detected_frames_processor.get_current_frame(
|
|
|
|
camera_name, draw_options
|
|
|
|
)
|
2023-07-06 20:50:22 +02:00
|
|
|
retry_interval = float(
|
|
|
|
current_app.frigate_config.cameras.get(camera_name).ffmpeg.retry_interval
|
|
|
|
or 10
|
|
|
|
)
|
2022-11-29 04:47:20 +01:00
|
|
|
|
|
|
|
if frame is None or datetime.now().timestamp() > (
|
|
|
|
current_app.detected_frames_processor.get_current_frame_time(camera_name)
|
2023-07-06 20:50:22 +02:00
|
|
|
+ retry_interval
|
2022-11-29 04:47:20 +01:00
|
|
|
):
|
|
|
|
if current_app.camera_error_image is None:
|
|
|
|
error_image = glob.glob("/opt/frigate/frigate/images/camera-error.jpg")
|
|
|
|
|
|
|
|
if len(error_image) > 0:
|
|
|
|
current_app.camera_error_image = cv2.imread(
|
|
|
|
error_image[0], cv2.IMREAD_UNCHANGED
|
|
|
|
)
|
|
|
|
|
|
|
|
frame = current_app.camera_error_image
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2023-06-30 15:06:38 +02:00
|
|
|
if frame is None:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Unable to get valid frame"}),
|
|
|
|
500,
|
|
|
|
)
|
2023-06-30 14:34:10 +02:00
|
|
|
|
|
|
|
if height < 1 or width < 1:
|
|
|
|
return (
|
|
|
|
"Invalid height / width requested :: {} / {}".format(height, width),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
2020-11-01 22:37:51 +01:00
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2022-12-31 15:54:10 +01:00
|
|
|
ret, jpg = cv2.imencode(
|
|
|
|
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
|
|
|
|
)
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
response.headers["Cache-Control"] = "no-store"
|
|
|
|
return response
|
2023-01-21 03:40:55 +01:00
|
|
|
elif camera_name == "birdseye" and current_app.frigate_config.birdseye.restream:
|
2022-12-31 15:54:10 +01:00
|
|
|
frame = cv2.cvtColor(
|
|
|
|
current_app.detected_frames_processor.get_current_frame(camera_name),
|
|
|
|
cv2.COLOR_YUV2BGR_I420,
|
|
|
|
)
|
|
|
|
|
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
|
|
|
|
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2021-07-02 14:47:03 +02:00
|
|
|
ret, jpg = cv2.imencode(
|
|
|
|
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
response = make_response(jpg.tobytes())
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-04-25 15:00:01 +02:00
|
|
|
response.headers["Cache-Control"] = "no-store"
|
2020-11-01 22:37:51 +01:00
|
|
|
return response
|
|
|
|
else:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Camera not found"}),
|
|
|
|
404,
|
|
|
|
)
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
@bp.route("/<camera_name>/recordings/<frame_time>/snapshot.png")
|
|
|
|
def get_snapshot_from_recording(camera_name: str, frame_time: str):
|
|
|
|
if camera_name not in current_app.frigate_config.cameras:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
|
|
|
|
|
|
|
frame_time = float(frame_time)
|
|
|
|
recording_query = (
|
2023-09-12 00:07:04 +02:00
|
|
|
Recordings.select(
|
|
|
|
Recordings.path,
|
|
|
|
Recordings.start_time,
|
|
|
|
)
|
2023-04-23 17:45:19 +02:00
|
|
|
.where(
|
2023-10-07 16:16:12 +02:00
|
|
|
(
|
|
|
|
(frame_time >= Recordings.start_time)
|
|
|
|
& (frame_time <= Recordings.end_time)
|
|
|
|
)
|
2023-04-23 17:45:19 +02:00
|
|
|
)
|
|
|
|
.where(Recordings.camera == camera_name)
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
recording: Recordings = recording_query.get()
|
|
|
|
time_in_segment = frame_time - recording.start_time
|
|
|
|
|
|
|
|
ffmpeg_cmd = [
|
|
|
|
"ffmpeg",
|
|
|
|
"-hide_banner",
|
|
|
|
"-loglevel",
|
|
|
|
"warning",
|
|
|
|
"-ss",
|
|
|
|
f"00:00:{time_in_segment}",
|
|
|
|
"-i",
|
|
|
|
recording.path,
|
|
|
|
"-frames:v",
|
|
|
|
"1",
|
|
|
|
"-c:v",
|
|
|
|
"png",
|
|
|
|
"-f",
|
|
|
|
"image2pipe",
|
|
|
|
"-",
|
|
|
|
]
|
|
|
|
|
|
|
|
process = sp.run(
|
|
|
|
ffmpeg_cmd,
|
|
|
|
capture_output=True,
|
|
|
|
)
|
|
|
|
response = make_response(process.stdout)
|
|
|
|
response.headers["Content-Type"] = "image/png"
|
|
|
|
return response
|
|
|
|
except DoesNotExist:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Recording not found at {}".format(frame_time),
|
|
|
|
}
|
|
|
|
),
|
|
|
|
404,
|
|
|
|
)
|
2023-04-23 17:45:19 +02:00
|
|
|
|
|
|
|
|
2022-11-30 02:59:56 +01:00
|
|
|
@bp.route("/recordings/storage", methods=["GET"])
|
|
|
|
def get_recordings_storage_usage():
|
|
|
|
recording_stats = stats_snapshot(
|
2023-01-07 02:31:25 +01:00
|
|
|
current_app.frigate_config,
|
|
|
|
current_app.stats_tracking,
|
|
|
|
current_app.hwaccel_errors,
|
2022-11-30 02:59:56 +01:00
|
|
|
)["service"]["storage"][RECORD_DIR]
|
2023-01-13 14:22:47 +01:00
|
|
|
|
|
|
|
if not recording_stats:
|
|
|
|
return jsonify({})
|
|
|
|
|
2022-11-30 02:59:56 +01:00
|
|
|
total_mb = recording_stats["total"]
|
|
|
|
|
|
|
|
camera_usages: dict[
|
|
|
|
str, dict
|
|
|
|
] = current_app.storage_maintainer.calculate_camera_usages()
|
|
|
|
|
|
|
|
for camera_name in camera_usages.keys():
|
|
|
|
if camera_usages.get(camera_name, {}).get("usage"):
|
|
|
|
camera_usages[camera_name]["usage_percent"] = (
|
|
|
|
camera_usages.get(camera_name, {}).get("usage", 0) / total_mb
|
|
|
|
) * 100
|
|
|
|
|
|
|
|
return jsonify(camera_usages)
|
|
|
|
|
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
# return hourly summary for recordings of camera
|
|
|
|
@bp.route("/<camera_name>/recordings/summary")
|
|
|
|
def recordings_summary(camera_name):
|
2022-12-11 14:45:32 +01:00
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
2023-01-17 00:17:03 +01:00
|
|
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
2022-05-10 14:48:29 +02:00
|
|
|
recording_groups = (
|
|
|
|
Recordings.select(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).alias("hour"),
|
|
|
|
fn.SUM(Recordings.duration).alias("duration"),
|
|
|
|
fn.SUM(Recordings.motion).alias("motion"),
|
|
|
|
fn.SUM(Recordings.objects).alias("objects"),
|
|
|
|
)
|
2021-06-07 03:24:36 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2022-05-10 14:48:29 +02:00
|
|
|
.group_by(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
.order_by(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).desc()
|
|
|
|
)
|
2021-06-07 03:24:36 +02:00
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
event_groups = (
|
|
|
|
Event.select(
|
|
|
|
fn.strftime(
|
2022-12-11 14:45:32 +01:00
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
).alias("hour"),
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(Event.camera == camera_name, Event.has_clip)
|
|
|
|
.group_by(
|
|
|
|
fn.strftime(
|
2022-12-11 14:45:32 +01:00
|
|
|
"%Y-%m-%d %H",
|
2023-01-17 00:17:03 +01:00
|
|
|
fn.datetime(
|
|
|
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
|
|
|
),
|
2022-05-10 14:48:29 +02:00
|
|
|
),
|
2021-06-03 05:20:07 +02:00
|
|
|
)
|
2022-05-10 14:48:29 +02:00
|
|
|
.objects()
|
2021-06-03 05:20:07 +02:00
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
event_map = {g.hour: g.count for g in event_groups}
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2022-05-10 14:48:29 +02:00
|
|
|
days = {}
|
|
|
|
|
|
|
|
for recording_group in recording_groups.objects():
|
|
|
|
parts = recording_group.hour.split()
|
|
|
|
hour = parts[1]
|
|
|
|
day = parts[0]
|
|
|
|
events_count = event_map.get(recording_group.hour, 0)
|
|
|
|
hour_data = {
|
|
|
|
"hour": hour,
|
|
|
|
"events": events_count,
|
|
|
|
"motion": recording_group.motion,
|
|
|
|
"objects": recording_group.objects,
|
|
|
|
"duration": round(recording_group.duration),
|
|
|
|
}
|
|
|
|
if day not in days:
|
|
|
|
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
|
|
|
|
else:
|
|
|
|
days[day]["events"] += events_count
|
|
|
|
days[day]["hours"].append(hour_data)
|
|
|
|
|
|
|
|
return jsonify(list(days.values()))
|
|
|
|
|
|
|
|
|
|
|
|
# return hour of recordings data for camera
|
|
|
|
@bp.route("/<camera_name>/recordings")
|
|
|
|
def recordings(camera_name):
|
|
|
|
after = request.args.get(
|
|
|
|
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
2021-05-28 19:13:48 +02:00
|
|
|
)
|
2022-05-10 14:48:29 +02:00
|
|
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
|
|
|
|
|
|
|
recordings = (
|
|
|
|
Recordings.select(
|
|
|
|
Recordings.id,
|
|
|
|
Recordings.start_time,
|
|
|
|
Recordings.end_time,
|
2022-10-09 13:28:26 +02:00
|
|
|
Recordings.segment_size,
|
2022-05-10 14:48:29 +02:00
|
|
|
Recordings.motion,
|
|
|
|
Recordings.objects,
|
|
|
|
)
|
|
|
|
.where(
|
|
|
|
Recordings.camera == camera_name,
|
|
|
|
Recordings.end_time >= after,
|
|
|
|
Recordings.start_time <= before,
|
|
|
|
)
|
|
|
|
.order_by(Recordings.start_time)
|
|
|
|
)
|
|
|
|
|
|
|
|
return jsonify([e for e in recordings.dicts()])
|
2021-05-28 19:13:48 +02:00
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
|
|
|
@bp.route("/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
|
|
|
|
def recording_clip(camera_name, start_ts, end_ts):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
recordings = (
|
2023-09-12 00:07:04 +02:00
|
|
|
Recordings.select(
|
|
|
|
Recordings.path,
|
|
|
|
Recordings.start_time,
|
|
|
|
Recordings.end_time,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
.where(
|
|
|
|
(Recordings.start_time.between(start_ts, end_ts))
|
|
|
|
| (Recordings.end_time.between(start_ts, end_ts))
|
2021-07-11 06:22:45 +02:00
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-07-09 22:14:16 +02:00
|
|
|
)
|
2022-08-25 14:32:30 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
|
|
|
|
|
|
|
playlist_lines = []
|
|
|
|
clip: Recordings
|
|
|
|
for clip in recordings:
|
|
|
|
playlist_lines.append(f"file '{clip.path}'")
|
|
|
|
# if this is the starting clip, add an inpoint
|
|
|
|
if clip.start_time < start_ts:
|
|
|
|
playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
|
|
|
|
# if this is the ending clip, add an outpoint
|
|
|
|
if clip.end_time > end_ts:
|
|
|
|
playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
file_name = f"clip_{camera_name}_{start_ts}-{end_ts}.mp4"
|
2023-07-29 20:19:21 +02:00
|
|
|
path = os.path.join(CACHE_DIR, file_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2022-09-08 00:25:06 +02:00
|
|
|
if not os.path.exists(path):
|
|
|
|
ffmpeg_cmd = [
|
|
|
|
"ffmpeg",
|
2023-06-01 12:46:34 +02:00
|
|
|
"-hide_banner",
|
2022-09-08 00:25:06 +02:00
|
|
|
"-y",
|
|
|
|
"-protocol_whitelist",
|
|
|
|
"pipe,file",
|
|
|
|
"-f",
|
|
|
|
"concat",
|
|
|
|
"-safe",
|
|
|
|
"0",
|
|
|
|
"-i",
|
|
|
|
"/dev/stdin",
|
|
|
|
"-c",
|
|
|
|
"copy",
|
|
|
|
"-movflags",
|
|
|
|
"+faststart",
|
|
|
|
path,
|
|
|
|
]
|
|
|
|
p = sp.run(
|
|
|
|
ffmpeg_cmd,
|
|
|
|
input="\n".join(playlist_lines),
|
|
|
|
encoding="ascii",
|
|
|
|
capture_output=True,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2022-09-08 00:25:06 +02:00
|
|
|
if p.returncode != 0:
|
|
|
|
logger.error(p.stderr)
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Could not create clip from recordings",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
500,
|
|
|
|
)
|
2022-09-08 00:25:06 +02:00
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
f"Ignoring subsequent request for {path} as it already exists in the cache."
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
2021-08-10 15:27:31 +02:00
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/vod/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
|
|
|
|
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
|
|
|
def vod_ts(camera_name, start_ts, end_ts):
|
2021-06-07 03:24:36 +02:00
|
|
|
recordings = (
|
2023-09-12 00:07:04 +02:00
|
|
|
Recordings.select(Recordings.path, Recordings.duration, Recordings.end_time)
|
2021-06-07 03:24:36 +02:00
|
|
|
.where(
|
2021-07-11 06:22:45 +02:00
|
|
|
Recordings.start_time.between(start_ts, end_ts)
|
|
|
|
| Recordings.end_time.between(start_ts, end_ts)
|
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-06-07 03:24:36 +02:00
|
|
|
)
|
2022-08-25 14:32:30 +02:00
|
|
|
.where(Recordings.camera == camera_name)
|
2021-06-07 03:24:36 +02:00
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
2021-05-18 07:52:08 +02:00
|
|
|
|
|
|
|
clips = []
|
|
|
|
durations = []
|
2022-12-18 00:53:34 +01:00
|
|
|
max_duration_ms = MAX_SEGMENT_DURATION * 1000
|
2021-05-18 07:52:08 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
recording: Recordings
|
|
|
|
for recording in recordings:
|
|
|
|
clip = {"type": "source", "path": recording.path}
|
|
|
|
duration = int(recording.duration * 1000)
|
2022-08-26 13:32:34 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
# Determine if we need to end the last clip early
|
|
|
|
if recording.end_time > end_ts:
|
|
|
|
duration -= int((recording.end_time - end_ts) * 1000)
|
2021-11-21 14:25:01 +01:00
|
|
|
|
2022-12-18 00:53:34 +01:00
|
|
|
if 0 < duration < max_duration_ms:
|
2022-08-26 13:32:34 +02:00
|
|
|
clip["keyFrameDurations"] = [duration]
|
2021-11-21 14:25:01 +01:00
|
|
|
clips.append(clip)
|
|
|
|
durations.append(duration)
|
|
|
|
else:
|
|
|
|
logger.warning(f"Recording clip is missing or empty: {recording.path}")
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
if not clips:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error("No recordings found for the requested time range")
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "No recordings found.",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
404,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
hour_ago = datetime.now() - timedelta(hours=1)
|
2021-05-18 07:52:08 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
2021-07-09 22:14:16 +02:00
|
|
|
"cache": hour_ago.timestamp() > start_ts,
|
2021-05-18 07:52:08 +02:00
|
|
|
"discontinuity": False,
|
2022-12-18 00:53:34 +01:00
|
|
|
"consistentSequenceMediaInfo": True,
|
2021-05-18 07:52:08 +02:00
|
|
|
"durations": durations,
|
2022-12-18 00:53:34 +01:00
|
|
|
"segment_duration": max(durations),
|
2021-05-18 07:52:08 +02:00
|
|
|
"sequences": [{"clips": clips}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>")
|
2022-12-11 14:45:32 +01:00
|
|
|
def vod_hour_no_timezone(year_month, day, hour, camera_name):
|
|
|
|
return vod_hour(
|
2022-12-30 15:34:01 +01:00
|
|
|
year_month, day, hour, camera_name, get_localzone_name().replace("/", ",")
|
2022-12-11 14:45:32 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# TODO make this nicer when vod module is removed
|
|
|
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>/<tz_name>")
|
|
|
|
def vod_hour(year_month, day, hour, camera_name, tz_name):
|
|
|
|
parts = year_month.split("-")
|
2023-01-17 00:17:03 +01:00
|
|
|
start_date = (
|
|
|
|
datetime(int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc)
|
|
|
|
- datetime.now(pytz.timezone(tz_name.replace(",", "/"))).utcoffset()
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
|
|
|
start_ts = start_date.timestamp()
|
|
|
|
end_ts = end_date.timestamp()
|
|
|
|
|
2022-08-25 14:32:30 +02:00
|
|
|
return vod_ts(camera_name, start_ts, end_ts)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vod/event/<id>")
|
|
|
|
def vod_event(id):
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event not found: {id}")
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Event not found.",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
404,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
if not event.has_clip:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event does not have recordings: {id}")
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "Recordings not available.",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
404,
|
|
|
|
)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
|
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2021-10-23 23:18:13 +02:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
2022-02-06 16:49:01 +01:00
|
|
|
vod_response = vod_ts(event.camera, event.start_time, end_ts)
|
2023-10-15 20:31:56 +02:00
|
|
|
# If the recordings are not found and the event started more than 5 minutes ago, set has_clip to false
|
2022-02-06 16:49:01 +01:00
|
|
|
if (
|
2023-10-15 20:31:56 +02:00
|
|
|
event.start_time < datetime.now().timestamp() - 300
|
|
|
|
and type(vod_response) == tuple
|
2022-02-06 16:49:01 +01:00
|
|
|
and len(vod_response) == 2
|
|
|
|
and vod_response[1] == 404
|
|
|
|
):
|
|
|
|
Event.update(has_clip=False).where(Event.id == id).execute()
|
|
|
|
return vod_response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-09-11 15:34:27 +02:00
|
|
|
duration = int((event.end_time - event.start_time) * 1000)
|
2021-07-09 22:14:16 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"cache": True,
|
|
|
|
"discontinuity": False,
|
|
|
|
"durations": [duration],
|
|
|
|
"sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-07-29 14:05:17 +02:00
|
|
|
@bp.route(
|
|
|
|
"/export/<camera_name>/start/<int:start_time>/end/<int:end_time>", methods=["POST"]
|
|
|
|
)
|
|
|
|
@bp.route(
|
|
|
|
"/export/<camera_name>/start/<float:start_time>/end/<float:end_time>",
|
|
|
|
methods=["POST"],
|
|
|
|
)
|
|
|
|
def export_recording(camera_name: str, start_time, end_time):
|
2023-09-02 12:42:33 +02:00
|
|
|
if not camera_name or not current_app.frigate_config.cameras.get(camera_name):
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": f"{camera_name} is not a valid camera."}
|
|
|
|
),
|
|
|
|
404,
|
|
|
|
)
|
|
|
|
|
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
playback_factor = json.get("playback", "realtime")
|
|
|
|
|
|
|
|
recordings_count = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(
|
|
|
|
Recordings.start_time.between(start_time, end_time)
|
|
|
|
| Recordings.end_time.between(start_time, end_time)
|
|
|
|
| ((start_time > Recordings.start_time) & (end_time < Recordings.end_time))
|
|
|
|
)
|
|
|
|
.where(Recordings.camera == camera_name)
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
|
|
|
|
if recordings_count <= 0:
|
2023-09-16 11:59:50 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "No recordings found for time range"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
2023-09-02 12:42:33 +02:00
|
|
|
|
2023-06-08 13:32:35 +02:00
|
|
|
exporter = RecordingExporter(
|
|
|
|
current_app.frigate_config,
|
|
|
|
camera_name,
|
|
|
|
int(start_time),
|
|
|
|
int(end_time),
|
|
|
|
PlaybackFactorEnum[playback_factor]
|
|
|
|
if playback_factor in PlaybackFactorEnum.__members__.values()
|
|
|
|
else PlaybackFactorEnum.realtime,
|
|
|
|
)
|
|
|
|
exporter.start()
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Starting export of recording.",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
|
|
|
)
|
2023-06-08 13:32:35 +02:00
|
|
|
|
|
|
|
|
2023-09-21 12:20:57 +02:00
|
|
|
@bp.route("/export/<file_name>", methods=["DELETE"])
|
|
|
|
def export_delete(file_name: str):
|
2023-10-07 16:12:48 +02:00
|
|
|
safe_file_name = secure_filename(file_name)
|
|
|
|
file = os.path.join(EXPORT_DIR, safe_file_name)
|
2023-09-21 12:20:57 +02:00
|
|
|
|
|
|
|
if not os.path.exists(file):
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"{file_name} not found."}),
|
|
|
|
404,
|
|
|
|
)
|
|
|
|
|
|
|
|
os.unlink(file)
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Successfully deleted file.",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
|
|
|
)
|
2023-09-21 12:20:57 +02:00
|
|
|
|
|
|
|
|
2020-12-19 15:22:31 +01:00
|
|
|
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
2020-11-01 22:37:51 +01:00
|
|
|
while True:
|
|
|
|
# max out at specified FPS
|
2021-06-14 14:31:13 +02:00
|
|
|
time.sleep(1 / fps)
|
2020-12-19 15:22:31 +01:00
|
|
|
frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
|
|
|
yield (
|
|
|
|
b"--frame\r\n"
|
|
|
|
b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
|
|
|
|
)
|
2022-11-13 19:48:14 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/ffprobe", methods=["GET"])
|
|
|
|
def ffprobe():
|
|
|
|
path_param = request.args.get("paths", "")
|
|
|
|
|
|
|
|
if not path_param:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Path needs to be provided."}), 404
|
2022-11-13 19:48:14 +01:00
|
|
|
)
|
|
|
|
|
2022-12-09 02:02:11 +01:00
|
|
|
if path_param.startswith("camera"):
|
|
|
|
camera = path_param[7:]
|
|
|
|
|
|
|
|
if camera not in current_app.frigate_config.cameras.keys():
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": f"{camera} is not a valid camera."}
|
|
|
|
),
|
|
|
|
404,
|
2022-12-09 02:02:11 +01:00
|
|
|
)
|
|
|
|
|
2022-12-15 00:20:47 +01:00
|
|
|
if not current_app.frigate_config.cameras[camera].enabled:
|
2023-07-07 14:06:42 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"{camera} is not enabled."}), 404
|
2022-12-15 00:20:47 +01:00
|
|
|
)
|
|
|
|
|
2022-12-09 02:02:11 +01:00
|
|
|
paths = map(
|
|
|
|
lambda input: input.path,
|
|
|
|
current_app.frigate_config.cameras[camera].ffmpeg.inputs,
|
|
|
|
)
|
|
|
|
elif "," in clean_camera_user_pass(path_param):
|
2022-11-13 19:48:14 +01:00
|
|
|
paths = path_param.split(",")
|
|
|
|
else:
|
|
|
|
paths = [path_param]
|
|
|
|
|
|
|
|
# user has multiple streams
|
|
|
|
output = []
|
|
|
|
|
|
|
|
for path in paths:
|
2022-12-09 02:02:11 +01:00
|
|
|
ffprobe = ffprobe_stream(path.strip())
|
2022-11-13 19:48:14 +01:00
|
|
|
output.append(
|
|
|
|
{
|
|
|
|
"return_code": ffprobe.returncode,
|
2023-02-26 14:05:27 +01:00
|
|
|
"stderr": ffprobe.stderr.decode("unicode_escape").strip()
|
|
|
|
if ffprobe.returncode != 0
|
|
|
|
else "",
|
2022-11-13 19:48:14 +01:00
|
|
|
"stdout": json.loads(ffprobe.stdout.decode("unicode_escape").strip())
|
2023-02-26 14:05:27 +01:00
|
|
|
if ffprobe.returncode == 0
|
|
|
|
else "",
|
2022-11-13 19:48:14 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
return jsonify(output)
|
2022-11-29 02:24:20 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vainfo", methods=["GET"])
|
|
|
|
def vainfo():
|
|
|
|
vainfo = vainfo_hwaccel()
|
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"return_code": vainfo.returncode,
|
|
|
|
"stderr": vainfo.stderr.decode("unicode_escape").strip()
|
2023-01-03 00:31:59 +01:00
|
|
|
if vainfo.returncode != 0
|
2022-11-29 02:24:20 +01:00
|
|
|
else "",
|
|
|
|
"stdout": vainfo.stdout.decode("unicode_escape").strip()
|
2023-01-03 00:31:59 +01:00
|
|
|
if vainfo.returncode == 0
|
2022-11-29 02:24:20 +01:00
|
|
|
else "",
|
|
|
|
}
|
|
|
|
)
|
2022-12-09 03:15:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/logs/<service>", methods=["GET"])
|
|
|
|
def logs(service: str):
|
|
|
|
log_locations = {
|
|
|
|
"frigate": "/dev/shm/logs/frigate/current",
|
|
|
|
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
|
|
|
"nginx": "/dev/shm/logs/nginx/current",
|
|
|
|
}
|
|
|
|
service_location = log_locations.get(service)
|
|
|
|
|
2023-10-07 05:20:30 +02:00
|
|
|
if not service_location:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Not a valid service"}),
|
|
|
|
404,
|
|
|
|
)
|
2022-12-09 03:15:00 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
file = open(service_location, "r")
|
|
|
|
contents = file.read()
|
|
|
|
file.close()
|
|
|
|
return contents, 200
|
|
|
|
except FileNotFoundError as e:
|
2023-10-07 05:20:30 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": f"Could not find log file: {e}"}),
|
|
|
|
500,
|
|
|
|
)
|