mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
Update flask and restructure into api folder with apis put into separate folders (#10193)
* Update flask * Update flask and break apart different sections into different files * formatting * Fix test and add safety check
This commit is contained in:
parent
0022c1aad0
commit
3c4b1fb6f2
@ -1,5 +1,5 @@
|
|||||||
click == 8.1.*
|
click == 8.1.*
|
||||||
Flask == 2.3.*
|
Flask == 3.0.*
|
||||||
imutils == 0.5.*
|
imutils == 0.5.*
|
||||||
markupsafe == 2.1.*
|
markupsafe == 2.1.*
|
||||||
matplotlib == 3.7.*
|
matplotlib == 3.7.*
|
||||||
|
0
frigate/api/__init__.py
Normal file
0
frigate/api/__init__.py
Normal file
733
frigate/api/app.py
Normal file
733
frigate/api/app.py
Normal file
@ -0,0 +1,733 @@
|
|||||||
|
"""Main api runner."""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import requests
|
||||||
|
from flask import (
|
||||||
|
Blueprint,
|
||||||
|
Flask,
|
||||||
|
current_app,
|
||||||
|
jsonify,
|
||||||
|
make_response,
|
||||||
|
request,
|
||||||
|
)
|
||||||
|
from markupsafe import escape
|
||||||
|
from peewee import operator
|
||||||
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
|
|
||||||
|
from frigate.api.event import EventBp
|
||||||
|
from frigate.api.media import MediaBp
|
||||||
|
from frigate.api.preview import PreviewBp
|
||||||
|
from frigate.api.review import ReviewBp
|
||||||
|
from frigate.config import FrigateConfig
|
||||||
|
from frigate.const import CONFIG_DIR
|
||||||
|
from frigate.events.external import ExternalEventProcessor
|
||||||
|
from frigate.models import Event, Recordings, Timeline
|
||||||
|
from frigate.plus import PlusApi
|
||||||
|
from frigate.ptz.onvif import OnvifController
|
||||||
|
from frigate.stats.emitter import StatsEmitter
|
||||||
|
from frigate.storage import StorageMaintainer
|
||||||
|
from frigate.util.builtin import (
|
||||||
|
clean_camera_user_pass,
|
||||||
|
get_tz_modifiers,
|
||||||
|
update_yaml_from_url,
|
||||||
|
)
|
||||||
|
from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel
|
||||||
|
from frigate.version import VERSION
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
bp = Blueprint("frigate", __name__)
|
||||||
|
bp.register_blueprint(EventBp)
|
||||||
|
bp.register_blueprint(MediaBp)
|
||||||
|
bp.register_blueprint(PreviewBp)
|
||||||
|
bp.register_blueprint(ReviewBp)
|
||||||
|
|
||||||
|
|
||||||
|
def create_app(
|
||||||
|
frigate_config,
|
||||||
|
database: SqliteQueueDatabase,
|
||||||
|
detected_frames_processor,
|
||||||
|
storage_maintainer: StorageMaintainer,
|
||||||
|
onvif: OnvifController,
|
||||||
|
external_processor: ExternalEventProcessor,
|
||||||
|
plus_api: PlusApi,
|
||||||
|
stats_emitter: StatsEmitter,
|
||||||
|
):
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
@app.before_request
|
||||||
|
def check_csrf():
|
||||||
|
if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]:
|
||||||
|
pass
|
||||||
|
if "origin" in request.headers and "x-csrf-token" not in request.headers:
|
||||||
|
return jsonify({"success": False, "message": "Missing CSRF header"}), 401
|
||||||
|
|
||||||
|
@app.before_request
|
||||||
|
def _db_connect():
|
||||||
|
if database.is_closed():
|
||||||
|
database.connect()
|
||||||
|
|
||||||
|
@app.teardown_request
|
||||||
|
def _db_close(exc):
|
||||||
|
if not database.is_closed():
|
||||||
|
database.close()
|
||||||
|
|
||||||
|
app.frigate_config = frigate_config
|
||||||
|
app.detected_frames_processor = detected_frames_processor
|
||||||
|
app.storage_maintainer = storage_maintainer
|
||||||
|
app.onvif = onvif
|
||||||
|
app.external_processor = external_processor
|
||||||
|
app.plus_api = plus_api
|
||||||
|
app.camera_error_image = None
|
||||||
|
app.stats_emitter = stats_emitter
|
||||||
|
|
||||||
|
app.register_blueprint(bp)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/")
|
||||||
|
def is_healthy():
|
||||||
|
return "Frigate is running. Alive and healthy!"
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/config/schema.json")
|
||||||
|
def config_schema():
|
||||||
|
return current_app.response_class(
|
||||||
|
current_app.frigate_config.schema_json(), mimetype="application/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/go2rtc/streams")
|
||||||
|
def go2rtc_streams():
|
||||||
|
r = requests.get("http://127.0.0.1:1984/api/streams")
|
||||||
|
if not r.ok:
|
||||||
|
logger.error("Failed to fetch streams from go2rtc")
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Error fetching stream data"}),
|
||||||
|
500,
|
||||||
|
)
|
||||||
|
stream_data = r.json()
|
||||||
|
for data in stream_data.values():
|
||||||
|
for producer in data.get("producers", []):
|
||||||
|
producer["url"] = clean_camera_user_pass(producer.get("url", ""))
|
||||||
|
return jsonify(stream_data)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/version")
|
||||||
|
def version():
|
||||||
|
return VERSION
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/stats")
|
||||||
|
def stats():
|
||||||
|
return jsonify(current_app.stats_emitter.get_latest_stats())
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/stats/history")
|
||||||
|
def stats_history():
|
||||||
|
return jsonify(current_app.stats_emitter.get_stats_history())
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/config")
|
||||||
|
def config():
|
||||||
|
config = current_app.frigate_config.model_dump(mode="json", exclude_none=True)
|
||||||
|
|
||||||
|
# remove the mqtt password
|
||||||
|
config["mqtt"].pop("password", None)
|
||||||
|
|
||||||
|
for camera_name, camera in current_app.frigate_config.cameras.items():
|
||||||
|
camera_dict = config["cameras"][camera_name]
|
||||||
|
|
||||||
|
# clean paths
|
||||||
|
for input in camera_dict.get("ffmpeg", {}).get("inputs", []):
|
||||||
|
input["path"] = clean_camera_user_pass(input["path"])
|
||||||
|
|
||||||
|
# add clean ffmpeg_cmds
|
||||||
|
camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
|
||||||
|
for cmd in camera_dict["ffmpeg_cmds"]:
|
||||||
|
cmd["cmd"] = clean_camera_user_pass(" ".join(cmd["cmd"]))
|
||||||
|
|
||||||
|
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
||||||
|
|
||||||
|
for detector, detector_config in config["detectors"].items():
|
||||||
|
detector_config["model"]["labelmap"] = (
|
||||||
|
current_app.frigate_config.model.merged_labelmap
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify(config)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/config/raw")
|
||||||
|
def config_raw():
|
||||||
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||||
|
|
||||||
|
# Check if we can use .yaml instead of .yml
|
||||||
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||||
|
|
||||||
|
if os.path.isfile(config_file_yaml):
|
||||||
|
config_file = config_file_yaml
|
||||||
|
|
||||||
|
if not os.path.isfile(config_file):
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Could not find file"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(config_file, "r") as f:
|
||||||
|
raw_config = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return raw_config, 200
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/config/save", methods=["POST"])
|
||||||
|
def config_save():
|
||||||
|
save_option = request.args.get("save_option")
|
||||||
|
|
||||||
|
new_config = request.get_data().decode()
|
||||||
|
|
||||||
|
if not new_config:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": "Config with body param is required"}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate the config schema
|
||||||
|
try:
|
||||||
|
FrigateConfig.parse_raw(new_config)
|
||||||
|
except Exception:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": f"\nConfig Error:\n\n{escape(str(traceback.format_exc()))}",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save the config to file
|
||||||
|
try:
|
||||||
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||||
|
|
||||||
|
# Check if we can use .yaml instead of .yml
|
||||||
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||||
|
|
||||||
|
if os.path.isfile(config_file_yaml):
|
||||||
|
config_file = config_file_yaml
|
||||||
|
|
||||||
|
with open(config_file, "w") as f:
|
||||||
|
f.write(new_config)
|
||||||
|
f.close()
|
||||||
|
except Exception:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if save_option == "restart":
|
||||||
|
try:
|
||||||
|
restart_frigate()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error restarting Frigate: {e}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Config successfully saved, unable to restart Frigate",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Config successfully saved, restarting (this can take up to one minute)...",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Config successfully saved."}),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/config/set", methods=["PUT"])
|
||||||
|
def config_set():
|
||||||
|
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
||||||
|
|
||||||
|
# Check if we can use .yaml instead of .yml
|
||||||
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||||
|
|
||||||
|
if os.path.isfile(config_file_yaml):
|
||||||
|
config_file = config_file_yaml
|
||||||
|
|
||||||
|
with open(config_file, "r") as f:
|
||||||
|
old_raw_config = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
try:
|
||||||
|
update_yaml_from_url(config_file, request.url)
|
||||||
|
with open(config_file, "r") as f:
|
||||||
|
new_raw_config = f.read()
|
||||||
|
f.close()
|
||||||
|
# Validate the config schema
|
||||||
|
try:
|
||||||
|
FrigateConfig.parse_raw(new_raw_config)
|
||||||
|
except Exception:
|
||||||
|
with open(config_file, "w") as f:
|
||||||
|
f.write(old_raw_config)
|
||||||
|
f.close()
|
||||||
|
logger.error(f"\nConfig Error:\n\n{str(traceback.format_exc())}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "Error parsing config. Check logs for error message.",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error updating config: {e}")
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Error updating config"}),
|
||||||
|
500,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Config successfully updated, restart to apply",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/ffprobe", methods=["GET"])
|
||||||
|
def ffprobe():
|
||||||
|
path_param = request.args.get("paths", "")
|
||||||
|
|
||||||
|
if not path_param:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Path needs to be provided."}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
if path_param.startswith("camera"):
|
||||||
|
camera = path_param[7:]
|
||||||
|
|
||||||
|
if camera not in current_app.frigate_config.cameras.keys():
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": f"{camera} is not a valid camera."}
|
||||||
|
),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not current_app.frigate_config.cameras[camera].enabled:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": f"{camera} is not enabled."}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
paths = map(
|
||||||
|
lambda input: input.path,
|
||||||
|
current_app.frigate_config.cameras[camera].ffmpeg.inputs,
|
||||||
|
)
|
||||||
|
elif "," in clean_camera_user_pass(path_param):
|
||||||
|
paths = path_param.split(",")
|
||||||
|
else:
|
||||||
|
paths = [path_param]
|
||||||
|
|
||||||
|
# user has multiple streams
|
||||||
|
output = []
|
||||||
|
|
||||||
|
for path in paths:
|
||||||
|
ffprobe = ffprobe_stream(path.strip())
|
||||||
|
output.append(
|
||||||
|
{
|
||||||
|
"return_code": ffprobe.returncode,
|
||||||
|
"stderr": (
|
||||||
|
ffprobe.stderr.decode("unicode_escape").strip()
|
||||||
|
if ffprobe.returncode != 0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
|
"stdout": (
|
||||||
|
json.loads(ffprobe.stdout.decode("unicode_escape").strip())
|
||||||
|
if ffprobe.returncode == 0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify(output)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/vainfo", methods=["GET"])
|
||||||
|
def vainfo():
|
||||||
|
vainfo = vainfo_hwaccel()
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"return_code": vainfo.returncode,
|
||||||
|
"stderr": (
|
||||||
|
vainfo.stderr.decode("unicode_escape").strip()
|
||||||
|
if vainfo.returncode != 0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
|
"stdout": (
|
||||||
|
vainfo.stdout.decode("unicode_escape").strip()
|
||||||
|
if vainfo.returncode == 0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/logs/<service>", methods=["GET"])
|
||||||
|
def logs(service: str):
|
||||||
|
log_locations = {
|
||||||
|
"frigate": "/dev/shm/logs/frigate/current",
|
||||||
|
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
||||||
|
"nginx": "/dev/shm/logs/nginx/current",
|
||||||
|
}
|
||||||
|
service_location = log_locations.get(service)
|
||||||
|
|
||||||
|
if not service_location:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Not a valid service"}),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
file = open(service_location, "r")
|
||||||
|
contents = file.read()
|
||||||
|
file.close()
|
||||||
|
return contents, 200
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
logger.error(e)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Could not find log file"}),
|
||||||
|
500,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/restart", methods=["POST"])
|
||||||
|
def restart():
|
||||||
|
try:
|
||||||
|
restart_frigate()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error restarting Frigate: {e}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "Unable to restart Frigate.",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
500,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Restarting (this can take up to one minute)...",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/labels")
|
||||||
|
def get_labels():
|
||||||
|
camera = request.args.get("camera", type=str, default="")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if camera:
|
||||||
|
events = Event.select(Event.label).where(Event.camera == camera).distinct()
|
||||||
|
else:
|
||||||
|
events = Event.select(Event.label).distinct()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Failed to get labels"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
labels = sorted([e.label for e in events])
|
||||||
|
return jsonify(labels)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/sub_labels")
|
||||||
|
def get_sub_labels():
|
||||||
|
split_joined = request.args.get("split_joined", type=int)
|
||||||
|
|
||||||
|
try:
|
||||||
|
events = Event.select(Event.sub_label).distinct()
|
||||||
|
except Exception:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Failed to get sub_labels"}),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
sub_labels = [e.sub_label for e in events]
|
||||||
|
|
||||||
|
if None in sub_labels:
|
||||||
|
sub_labels.remove(None)
|
||||||
|
|
||||||
|
if split_joined:
|
||||||
|
original_labels = sub_labels.copy()
|
||||||
|
|
||||||
|
for label in original_labels:
|
||||||
|
if "," in label:
|
||||||
|
sub_labels.remove(label)
|
||||||
|
parts = label.split(",")
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
if part.strip() not in sub_labels:
|
||||||
|
sub_labels.append(part.strip())
|
||||||
|
|
||||||
|
sub_labels.sort()
|
||||||
|
return jsonify(sub_labels)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/timeline")
|
||||||
|
def timeline():
|
||||||
|
camera = request.args.get("camera", "all")
|
||||||
|
source_id = request.args.get("source_id", type=str)
|
||||||
|
limit = request.args.get("limit", 100)
|
||||||
|
|
||||||
|
clauses = []
|
||||||
|
|
||||||
|
selected_columns = [
|
||||||
|
Timeline.timestamp,
|
||||||
|
Timeline.camera,
|
||||||
|
Timeline.source,
|
||||||
|
Timeline.source_id,
|
||||||
|
Timeline.class_type,
|
||||||
|
Timeline.data,
|
||||||
|
]
|
||||||
|
|
||||||
|
if camera != "all":
|
||||||
|
clauses.append((Timeline.camera == camera))
|
||||||
|
|
||||||
|
if source_id:
|
||||||
|
clauses.append((Timeline.source_id == source_id))
|
||||||
|
|
||||||
|
if len(clauses) == 0:
|
||||||
|
clauses.append((True))
|
||||||
|
|
||||||
|
timeline = (
|
||||||
|
Timeline.select(*selected_columns)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.order_by(Timeline.timestamp.asc())
|
||||||
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify([t for t in timeline])
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/timeline/hourly")
|
||||||
|
def hourly_timeline():
|
||||||
|
"""Get hourly summary for timeline."""
|
||||||
|
cameras = request.args.get("cameras", "all")
|
||||||
|
labels = request.args.get("labels", "all")
|
||||||
|
before = request.args.get("before", type=float)
|
||||||
|
after = request.args.get("after", type=float)
|
||||||
|
limit = request.args.get("limit", 200)
|
||||||
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
|
|
||||||
|
_, minute_modifier, _ = get_tz_modifiers(tz_name)
|
||||||
|
minute_offset = int(minute_modifier.split(" ")[0])
|
||||||
|
|
||||||
|
clauses = []
|
||||||
|
|
||||||
|
if cameras != "all":
|
||||||
|
camera_list = cameras.split(",")
|
||||||
|
clauses.append((Timeline.camera << camera_list))
|
||||||
|
|
||||||
|
if labels != "all":
|
||||||
|
label_list = labels.split(",")
|
||||||
|
clauses.append((Timeline.data["label"] << label_list))
|
||||||
|
|
||||||
|
if before:
|
||||||
|
clauses.append((Timeline.timestamp < before))
|
||||||
|
|
||||||
|
if after:
|
||||||
|
clauses.append((Timeline.timestamp > after))
|
||||||
|
|
||||||
|
if len(clauses) == 0:
|
||||||
|
clauses.append((True))
|
||||||
|
|
||||||
|
timeline = (
|
||||||
|
Timeline.select(
|
||||||
|
Timeline.camera,
|
||||||
|
Timeline.timestamp,
|
||||||
|
Timeline.data,
|
||||||
|
Timeline.class_type,
|
||||||
|
Timeline.source_id,
|
||||||
|
Timeline.source,
|
||||||
|
)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.order_by(Timeline.timestamp.desc())
|
||||||
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
start = 0
|
||||||
|
end = 0
|
||||||
|
hours: dict[str, list[dict[str, any]]] = {}
|
||||||
|
|
||||||
|
for t in timeline:
|
||||||
|
if count == 0:
|
||||||
|
start = t["timestamp"]
|
||||||
|
else:
|
||||||
|
end = t["timestamp"]
|
||||||
|
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
hour = (
|
||||||
|
datetime.fromtimestamp(t["timestamp"]).replace(
|
||||||
|
minute=0, second=0, microsecond=0
|
||||||
|
)
|
||||||
|
+ timedelta(
|
||||||
|
minutes=minute_offset,
|
||||||
|
)
|
||||||
|
).timestamp()
|
||||||
|
if hour not in hours:
|
||||||
|
hours[hour] = [t]
|
||||||
|
else:
|
||||||
|
hours[hour].insert(0, t)
|
||||||
|
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"start": start,
|
||||||
|
"end": end,
|
||||||
|
"count": count,
|
||||||
|
"hours": hours,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/<camera_name>/recording/hourly/activity")
|
||||||
|
def hourly_timeline_activity(camera_name: str):
|
||||||
|
"""Get hourly summary for timeline."""
|
||||||
|
if camera_name not in current_app.frigate_config.cameras:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Camera not found"}),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
before = request.args.get("before", type=float, default=datetime.now())
|
||||||
|
after = request.args.get(
|
||||||
|
"after", type=float, default=datetime.now() - timedelta(hours=1)
|
||||||
|
)
|
||||||
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
|
|
||||||
|
_, minute_modifier, _ = get_tz_modifiers(tz_name)
|
||||||
|
minute_offset = int(minute_modifier.split(" ")[0])
|
||||||
|
|
||||||
|
all_recordings: list[Recordings] = (
|
||||||
|
Recordings.select(
|
||||||
|
Recordings.start_time,
|
||||||
|
Recordings.duration,
|
||||||
|
Recordings.objects,
|
||||||
|
Recordings.motion,
|
||||||
|
)
|
||||||
|
.where(Recordings.camera == camera_name)
|
||||||
|
.where(Recordings.motion > 0)
|
||||||
|
.where((Recordings.start_time > after) & (Recordings.end_time < before))
|
||||||
|
.order_by(Recordings.start_time.asc())
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
# data format is ex:
|
||||||
|
# {timestamp: [{ date: 1, count: 1, type: motion }]}] }}
|
||||||
|
hours: dict[int, list[dict[str, any]]] = defaultdict(list)
|
||||||
|
|
||||||
|
key = datetime.fromtimestamp(after).replace(second=0, microsecond=0) + timedelta(
|
||||||
|
minutes=minute_offset
|
||||||
|
)
|
||||||
|
check = (key + timedelta(hours=1)).timestamp()
|
||||||
|
|
||||||
|
# set initial start so data is representative of full hour
|
||||||
|
hours[int(key.timestamp())].append(
|
||||||
|
[
|
||||||
|
key.timestamp(),
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
for recording in all_recordings:
|
||||||
|
if recording.start_time > check:
|
||||||
|
hours[int(key.timestamp())].append(
|
||||||
|
[
|
||||||
|
(key + timedelta(minutes=59, seconds=59)).timestamp(),
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
key = key + timedelta(hours=1)
|
||||||
|
check = (key + timedelta(hours=1)).timestamp()
|
||||||
|
hours[int(key.timestamp())].append(
|
||||||
|
[
|
||||||
|
key.timestamp(),
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
data_type = recording.objects > 0
|
||||||
|
count = recording.motion + recording.objects
|
||||||
|
hours[int(key.timestamp())].append(
|
||||||
|
[
|
||||||
|
recording.start_time + (recording.duration / 2),
|
||||||
|
0 if count == 0 else np.log2(count),
|
||||||
|
data_type,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# resample data using pandas to get activity on minute to minute basis
|
||||||
|
for key, data in hours.items():
|
||||||
|
df = pd.DataFrame(data, columns=["date", "count", "hasObjects"])
|
||||||
|
|
||||||
|
# set date as datetime index
|
||||||
|
df["date"] = pd.to_datetime(df["date"], unit="s")
|
||||||
|
df.set_index(["date"], inplace=True)
|
||||||
|
|
||||||
|
# normalize data
|
||||||
|
df = df.resample("T").mean().fillna(0)
|
||||||
|
|
||||||
|
# change types for output
|
||||||
|
df.index = df.index.astype(int) // (10**9)
|
||||||
|
df["count"] = df["count"].astype(int)
|
||||||
|
df["hasObjects"] = df["hasObjects"].astype(bool)
|
||||||
|
hours[key] = df.reset_index().to_dict("records")
|
||||||
|
|
||||||
|
return jsonify(hours)
|
688
frigate/api/event.py
Normal file
688
frigate/api/event.py
Normal file
@ -0,0 +1,688 @@
|
|||||||
|
"""Event apis."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from functools import reduce
|
||||||
|
from pathlib import Path
|
||||||
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
import cv2
|
||||||
|
from flask import (
|
||||||
|
Blueprint,
|
||||||
|
current_app,
|
||||||
|
jsonify,
|
||||||
|
make_response,
|
||||||
|
request,
|
||||||
|
)
|
||||||
|
from peewee import DoesNotExist, fn, operator
|
||||||
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
|
from frigate.const import (
|
||||||
|
CLIPS_DIR,
|
||||||
|
)
|
||||||
|
from frigate.models import Event, Timeline
|
||||||
|
from frigate.object_processing import TrackedObject
|
||||||
|
from frigate.util.builtin import (
|
||||||
|
get_tz_modifiers,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
EventBp = Blueprint("events", __name__)
|
||||||
|
|
||||||
|
DEFAULT_TIME_RANGE = "00:00,24:00"
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events")
|
||||||
|
def events():
|
||||||
|
camera = request.args.get("camera", "all")
|
||||||
|
cameras = request.args.get("cameras", "all")
|
||||||
|
|
||||||
|
# handle old camera arg
|
||||||
|
if cameras == "all" and camera != "all":
|
||||||
|
cameras = camera
|
||||||
|
|
||||||
|
label = unquote(request.args.get("label", "all"))
|
||||||
|
labels = request.args.get("labels", "all")
|
||||||
|
|
||||||
|
# handle old label arg
|
||||||
|
if labels == "all" and label != "all":
|
||||||
|
labels = label
|
||||||
|
|
||||||
|
sub_label = request.args.get("sub_label", "all")
|
||||||
|
sub_labels = request.args.get("sub_labels", "all")
|
||||||
|
|
||||||
|
# handle old sub_label arg
|
||||||
|
if sub_labels == "all" and sub_label != "all":
|
||||||
|
sub_labels = sub_label
|
||||||
|
|
||||||
|
zone = request.args.get("zone", "all")
|
||||||
|
zones = request.args.get("zones", "all")
|
||||||
|
|
||||||
|
# handle old label arg
|
||||||
|
if zones == "all" and zone != "all":
|
||||||
|
zones = zone
|
||||||
|
|
||||||
|
limit = request.args.get("limit", 100)
|
||||||
|
after = request.args.get("after", type=float)
|
||||||
|
before = request.args.get("before", type=float)
|
||||||
|
time_range = request.args.get("time_range", DEFAULT_TIME_RANGE)
|
||||||
|
has_clip = request.args.get("has_clip", type=int)
|
||||||
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
||||||
|
in_progress = request.args.get("in_progress", type=int)
|
||||||
|
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
||||||
|
favorites = request.args.get("favorites", type=int)
|
||||||
|
min_score = request.args.get("min_score", type=float)
|
||||||
|
max_score = request.args.get("max_score", type=float)
|
||||||
|
is_submitted = request.args.get("is_submitted", type=int)
|
||||||
|
min_length = request.args.get("min_length", type=float)
|
||||||
|
max_length = request.args.get("max_length", type=float)
|
||||||
|
|
||||||
|
clauses = []
|
||||||
|
|
||||||
|
selected_columns = [
|
||||||
|
Event.id,
|
||||||
|
Event.camera,
|
||||||
|
Event.label,
|
||||||
|
Event.zones,
|
||||||
|
Event.start_time,
|
||||||
|
Event.end_time,
|
||||||
|
Event.has_clip,
|
||||||
|
Event.has_snapshot,
|
||||||
|
Event.plus_id,
|
||||||
|
Event.retain_indefinitely,
|
||||||
|
Event.sub_label,
|
||||||
|
Event.top_score,
|
||||||
|
Event.false_positive,
|
||||||
|
Event.box,
|
||||||
|
Event.data,
|
||||||
|
]
|
||||||
|
|
||||||
|
if camera != "all":
|
||||||
|
clauses.append((Event.camera == camera))
|
||||||
|
|
||||||
|
if cameras != "all":
|
||||||
|
camera_list = cameras.split(",")
|
||||||
|
clauses.append((Event.camera << camera_list))
|
||||||
|
|
||||||
|
if labels != "all":
|
||||||
|
label_list = labels.split(",")
|
||||||
|
clauses.append((Event.label << label_list))
|
||||||
|
|
||||||
|
if sub_labels != "all":
|
||||||
|
# use matching so joined sub labels are included
|
||||||
|
# for example a sub label 'bob' would get events
|
||||||
|
# with sub labels 'bob' and 'bob, john'
|
||||||
|
sub_label_clauses = []
|
||||||
|
filtered_sub_labels = sub_labels.split(",")
|
||||||
|
|
||||||
|
if "None" in filtered_sub_labels:
|
||||||
|
filtered_sub_labels.remove("None")
|
||||||
|
sub_label_clauses.append((Event.sub_label.is_null()))
|
||||||
|
|
||||||
|
for label in filtered_sub_labels:
|
||||||
|
sub_label_clauses.append(
|
||||||
|
(Event.sub_label.cast("text") == label)
|
||||||
|
) # include exact matches
|
||||||
|
|
||||||
|
# include this label when part of a list
|
||||||
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
|
||||||
|
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
|
||||||
|
|
||||||
|
sub_label_clause = reduce(operator.or_, sub_label_clauses)
|
||||||
|
clauses.append((sub_label_clause))
|
||||||
|
|
||||||
|
if zones != "all":
|
||||||
|
# use matching so events with multiple zones
|
||||||
|
# still match on a search where any zone matches
|
||||||
|
zone_clauses = []
|
||||||
|
filtered_zones = zones.split(",")
|
||||||
|
|
||||||
|
if "None" in filtered_zones:
|
||||||
|
filtered_zones.remove("None")
|
||||||
|
zone_clauses.append((Event.zones.length() == 0))
|
||||||
|
|
||||||
|
for zone in filtered_zones:
|
||||||
|
zone_clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
|
||||||
|
|
||||||
|
zone_clause = reduce(operator.or_, zone_clauses)
|
||||||
|
clauses.append((zone_clause))
|
||||||
|
|
||||||
|
if after:
|
||||||
|
clauses.append((Event.start_time > after))
|
||||||
|
|
||||||
|
if before:
|
||||||
|
clauses.append((Event.start_time < before))
|
||||||
|
|
||||||
|
if time_range != DEFAULT_TIME_RANGE:
|
||||||
|
# get timezone arg to ensure browser times are used
|
||||||
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
|
hour_modifier, minute_modifier, _ = get_tz_modifiers(tz_name)
|
||||||
|
|
||||||
|
times = time_range.split(",")
|
||||||
|
time_after = times[0]
|
||||||
|
time_before = times[1]
|
||||||
|
|
||||||
|
start_hour_fun = fn.strftime(
|
||||||
|
"%H:%M",
|
||||||
|
fn.datetime(Event.start_time, "unixepoch", hour_modifier, minute_modifier),
|
||||||
|
)
|
||||||
|
|
||||||
|
# cases where user wants events overnight, ex: from 20:00 to 06:00
|
||||||
|
# should use or operator
|
||||||
|
if time_after > time_before:
|
||||||
|
clauses.append(
|
||||||
|
(
|
||||||
|
reduce(
|
||||||
|
operator.or_,
|
||||||
|
[(start_hour_fun > time_after), (start_hour_fun < time_before)],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# all other cases should be and operator
|
||||||
|
else:
|
||||||
|
clauses.append((start_hour_fun > time_after))
|
||||||
|
clauses.append((start_hour_fun < time_before))
|
||||||
|
|
||||||
|
if has_clip is not None:
|
||||||
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
|
if has_snapshot is not None:
|
||||||
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
|
if in_progress is not None:
|
||||||
|
clauses.append((Event.end_time.is_null(in_progress)))
|
||||||
|
|
||||||
|
if include_thumbnails:
|
||||||
|
selected_columns.append(Event.thumbnail)
|
||||||
|
|
||||||
|
if favorites:
|
||||||
|
clauses.append((Event.retain_indefinitely == favorites))
|
||||||
|
|
||||||
|
if max_score is not None:
|
||||||
|
clauses.append((Event.data["score"] <= max_score))
|
||||||
|
|
||||||
|
if min_score is not None:
|
||||||
|
clauses.append((Event.data["score"] >= min_score))
|
||||||
|
|
||||||
|
if min_length is not None:
|
||||||
|
clauses.append(((Event.end_time - Event.start_time) >= min_length))
|
||||||
|
|
||||||
|
if max_length is not None:
|
||||||
|
clauses.append(((Event.end_time - Event.start_time) <= max_length))
|
||||||
|
|
||||||
|
if is_submitted is not None:
|
||||||
|
if is_submitted == 0:
|
||||||
|
clauses.append((Event.plus_id.is_null()))
|
||||||
|
elif is_submitted > 0:
|
||||||
|
clauses.append((Event.plus_id != ""))
|
||||||
|
|
||||||
|
if len(clauses) == 0:
|
||||||
|
clauses.append((True))
|
||||||
|
|
||||||
|
events = (
|
||||||
|
Event.select(*selected_columns)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.order_by(Event.start_time.desc())
|
||||||
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify(list(events))
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/summary")
|
||||||
|
def events_summary():
|
||||||
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
|
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
|
||||||
|
has_clip = request.args.get("has_clip", type=int)
|
||||||
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
||||||
|
|
||||||
|
clauses = []
|
||||||
|
|
||||||
|
if has_clip is not None:
|
||||||
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
|
if has_snapshot is not None:
|
||||||
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
|
if len(clauses) == 0:
|
||||||
|
clauses.append((True))
|
||||||
|
|
||||||
|
groups = (
|
||||||
|
Event.select(
|
||||||
|
Event.camera,
|
||||||
|
Event.label,
|
||||||
|
Event.sub_label,
|
||||||
|
fn.strftime(
|
||||||
|
"%Y-%m-%d",
|
||||||
|
fn.datetime(
|
||||||
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
|
).alias("day"),
|
||||||
|
Event.zones,
|
||||||
|
fn.COUNT(Event.id).alias("count"),
|
||||||
|
)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.group_by(
|
||||||
|
Event.camera,
|
||||||
|
Event.label,
|
||||||
|
Event.sub_label,
|
||||||
|
(Event.start_time + seconds_offset).cast("int") / (3600 * 24),
|
||||||
|
Event.zones,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify([e for e in groups.dicts()])
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>", methods=("GET",))
|
||||||
|
def event(id):
|
||||||
|
try:
|
||||||
|
return model_to_dict(Event.get(Event.id == id))
|
||||||
|
except DoesNotExist:
|
||||||
|
return "Event not found", 404
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>/retain", methods=("POST",))
|
||||||
|
def set_retain(id):
|
||||||
|
try:
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
event.retain_indefinitely = True
|
||||||
|
event.save()
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>/plus", methods=("POST",))
|
||||||
|
def send_to_plus(id):
|
||||||
|
if not current_app.plus_api.is_active():
|
||||||
|
message = "PLUS_API_KEY environment variable is not set"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
include_annotation = (
|
||||||
|
request.json.get("include_annotation") if request.is_json else None
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
message = f"Event {id} not found"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
||||||
|
|
||||||
|
# events from before the conversion to relative dimensions cant include annotations
|
||||||
|
if event.data.get("box") is None:
|
||||||
|
include_annotation = None
|
||||||
|
|
||||||
|
if event.end_time is None:
|
||||||
|
logger.error(f"Unable to load clean png for in-progress event: {event.id}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "Unable to load clean png for in-progress event",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.plus_id:
|
||||||
|
message = "Already submitted to plus"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
|
# load clean.png
|
||||||
|
try:
|
||||||
|
filename = f"{event.camera}-{event.id}-clean.png"
|
||||||
|
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
|
||||||
|
except Exception:
|
||||||
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": "Unable to load clean png for event"}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if image is None or image.size == 0:
|
||||||
|
logger.error(f"Unable to load clean png for event: {event.id}")
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": "Unable to load clean png for event"}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
plus_id = current_app.plus_api.upload_image(image, event.camera)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.exception(ex)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Error uploading image"}),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
# store image id in the database
|
||||||
|
event.plus_id = plus_id
|
||||||
|
event.save()
|
||||||
|
|
||||||
|
if include_annotation is not None:
|
||||||
|
box = event.data["box"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_app.plus_api.add_annotation(
|
||||||
|
event.plus_id,
|
||||||
|
box,
|
||||||
|
event.label,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
message = "Error uploading annotation, unsupported label provided."
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": message}),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.exception(ex)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Error uploading annotation"}),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>/false_positive", methods=("PUT",))
|
||||||
|
def false_positive(id):
|
||||||
|
if not current_app.plus_api.is_active():
|
||||||
|
message = "PLUS_API_KEY environment variable is not set"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
message = f"Event {id} not found"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
||||||
|
|
||||||
|
# events from before the conversion to relative dimensions cant include annotations
|
||||||
|
if event.data.get("box") is None:
|
||||||
|
message = "Events prior to 0.13 cannot be submitted as false positives"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
|
if event.false_positive:
|
||||||
|
message = "False positive already submitted to Frigate+"
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
|
if not event.plus_id:
|
||||||
|
plus_response = send_to_plus(id)
|
||||||
|
if plus_response.status_code != 200:
|
||||||
|
return plus_response
|
||||||
|
# need to refetch the event now that it has a plus_id
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
|
||||||
|
region = event.data["region"]
|
||||||
|
box = event.data["box"]
|
||||||
|
|
||||||
|
# provide top score if score is unavailable
|
||||||
|
score = (
|
||||||
|
(event.data["top_score"] if event.data["top_score"] else event.top_score)
|
||||||
|
if event.data["score"] is None
|
||||||
|
else event.data["score"]
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_app.plus_api.add_false_positive(
|
||||||
|
event.plus_id,
|
||||||
|
region,
|
||||||
|
box,
|
||||||
|
score,
|
||||||
|
event.label,
|
||||||
|
event.model_hash,
|
||||||
|
event.model_type,
|
||||||
|
event.detector_type,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
message = "Error uploading false positive, unsupported label provided."
|
||||||
|
logger.error(message)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": message}),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.exception(ex)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Error uploading false positive"}),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
event.false_positive = True
|
||||||
|
event.save()
|
||||||
|
|
||||||
|
return make_response(jsonify({"success": True, "plus_id": event.plus_id}), 200)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>/retain", methods=("DELETE",))
|
||||||
|
def delete_retain(id):
|
||||||
|
try:
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
event.retain_indefinitely = False
|
||||||
|
event.save()
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>/sub_label", methods=("POST",))
|
||||||
|
def set_sub_label(id):
|
||||||
|
try:
|
||||||
|
event: Event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||||
|
new_sub_label = json.get("subLabel")
|
||||||
|
new_score = json.get("subLabelScore")
|
||||||
|
|
||||||
|
if new_sub_label is None:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "A sub label must be supplied",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if new_sub_label and len(new_sub_label) > 100:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": new_sub_label
|
||||||
|
+ " exceeds the 100 character limit for sub_label",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if new_score is not None and (new_score > 1.0 or new_score < 0):
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": new_score
|
||||||
|
+ " does not fit within the expected bounds 0 <= score <= 1.0",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not event.end_time:
|
||||||
|
# update tracked object
|
||||||
|
tracked_obj: TrackedObject = (
|
||||||
|
current_app.detected_frames_processor.camera_states[
|
||||||
|
event.camera
|
||||||
|
].tracked_objects.get(event.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
if tracked_obj:
|
||||||
|
tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score)
|
||||||
|
|
||||||
|
# update timeline items
|
||||||
|
Timeline.update(
|
||||||
|
data=Timeline.data.update({"sub_label": (new_sub_label, new_score)})
|
||||||
|
).where(Timeline.source_id == id).execute()
|
||||||
|
|
||||||
|
event.sub_label = new_sub_label
|
||||||
|
|
||||||
|
if new_score:
|
||||||
|
data = event.data
|
||||||
|
data["sub_label_score"] = new_score
|
||||||
|
event.data = data
|
||||||
|
|
||||||
|
event.save()
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Event " + id + " sub label set to " + new_sub_label,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<id>", methods=("DELETE",))
|
||||||
|
def delete_event(id):
|
||||||
|
try:
|
||||||
|
event = Event.get(Event.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
if event.has_snapshot:
|
||||||
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||||
|
media.unlink(missing_ok=True)
|
||||||
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||||
|
media.unlink(missing_ok=True)
|
||||||
|
if event.has_clip:
|
||||||
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
||||||
|
media.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
event.delete_instance()
|
||||||
|
Timeline.delete().where(Timeline.source_id == id).execute()
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
||||||
|
def create_event(camera_name, label):
|
||||||
|
if not camera_name or not current_app.frigate_config.cameras.get(camera_name):
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": f"{camera_name} is not a valid camera."}
|
||||||
|
),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not label:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": f"{label} must be set."}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
frame = current_app.detected_frames_processor.get_current_frame(camera_name)
|
||||||
|
|
||||||
|
event_id = current_app.external_processor.create_manual_event(
|
||||||
|
camera_name,
|
||||||
|
label,
|
||||||
|
json.get("source_type", "api"),
|
||||||
|
json.get("sub_label", None),
|
||||||
|
json.get("score", 0),
|
||||||
|
json.get("duration", 30),
|
||||||
|
json.get("include_recording", True),
|
||||||
|
json.get("draw", {}),
|
||||||
|
frame,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "An unknown error occurred"}),
|
||||||
|
500,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Successfully created event.",
|
||||||
|
"event_id": event_id,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@EventBp.route("/events/<event_id>/end", methods=["PUT"])
|
||||||
|
def end_event(event_id):
|
||||||
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
end_time = json.get("end_time", datetime.now().timestamp())
|
||||||
|
current_app.external_processor.finish_manual_event(event_id, end_time)
|
||||||
|
except Exception:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{"success": False, "message": f"{event_id} must be set and valid."}
|
||||||
|
),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Event successfully ended."}), 200
|
||||||
|
)
|
1272
frigate/api/media.py
Normal file
1272
frigate/api/media.py
Normal file
File diff suppressed because it is too large
Load Diff
116
frigate/api/preview.py
Normal file
116
frigate/api/preview.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
"""Preview apis."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
from flask import (
|
||||||
|
Blueprint,
|
||||||
|
jsonify,
|
||||||
|
make_response,
|
||||||
|
)
|
||||||
|
|
||||||
|
from frigate.const import CACHE_DIR
|
||||||
|
from frigate.models import Previews
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
PreviewBp = Blueprint("previews", __name__)
|
||||||
|
|
||||||
|
|
||||||
|
@PreviewBp.route("/preview/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
|
||||||
|
@PreviewBp.route("/preview/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
||||||
|
def preview_ts(camera_name, start_ts, end_ts):
|
||||||
|
"""Get all mp4 previews relevant for time period."""
|
||||||
|
if camera_name != "all":
|
||||||
|
camera_clause = Previews.camera == camera_name
|
||||||
|
else:
|
||||||
|
camera_clause = True
|
||||||
|
|
||||||
|
previews = (
|
||||||
|
Previews.select(
|
||||||
|
Previews.camera,
|
||||||
|
Previews.path,
|
||||||
|
Previews.duration,
|
||||||
|
Previews.start_time,
|
||||||
|
Previews.end_time,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
Previews.start_time.between(start_ts, end_ts)
|
||||||
|
| Previews.end_time.between(start_ts, end_ts)
|
||||||
|
| ((start_ts > Previews.start_time) & (end_ts < Previews.end_time))
|
||||||
|
)
|
||||||
|
.where(camera_clause)
|
||||||
|
.order_by(Previews.start_time.asc())
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
clips = []
|
||||||
|
|
||||||
|
preview: Previews
|
||||||
|
for preview in previews:
|
||||||
|
clips.append(
|
||||||
|
{
|
||||||
|
"camera": preview["camera"],
|
||||||
|
"src": preview["path"].replace("/media/frigate", ""),
|
||||||
|
"type": "video/mp4",
|
||||||
|
"start": preview["start_time"],
|
||||||
|
"end": preview["end_time"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not clips:
|
||||||
|
return make_response(
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "No previews found.",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
404,
|
||||||
|
)
|
||||||
|
|
||||||
|
return make_response(jsonify(clips), 200)
|
||||||
|
|
||||||
|
|
||||||
|
@PreviewBp.route("/preview/<year_month>/<day>/<hour>/<camera_name>/<tz_name>")
|
||||||
|
def preview_hour(year_month, day, hour, camera_name, tz_name):
|
||||||
|
parts = year_month.split("-")
|
||||||
|
start_date = (
|
||||||
|
datetime(int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc)
|
||||||
|
- datetime.now(pytz.timezone(tz_name.replace(",", "/"))).utcoffset()
|
||||||
|
)
|
||||||
|
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
||||||
|
start_ts = start_date.timestamp()
|
||||||
|
end_ts = end_date.timestamp()
|
||||||
|
|
||||||
|
return preview_ts(camera_name, start_ts, end_ts)
|
||||||
|
|
||||||
|
|
||||||
|
@PreviewBp.route("/preview/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/frames")
|
||||||
|
@PreviewBp.route(
|
||||||
|
"/preview/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/frames"
|
||||||
|
)
|
||||||
|
def get_preview_frames_from_cache(camera_name: str, start_ts, end_ts):
|
||||||
|
"""Get list of cached preview frames"""
|
||||||
|
preview_dir = os.path.join(CACHE_DIR, "preview_frames")
|
||||||
|
file_start = f"preview_{camera_name}"
|
||||||
|
start_file = f"{file_start}-{start_ts}.jpg"
|
||||||
|
end_file = f"{file_start}-{end_ts}.jpg"
|
||||||
|
selected_previews = []
|
||||||
|
|
||||||
|
for file in sorted(os.listdir(preview_dir)):
|
||||||
|
if not file.startswith(file_start):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if file < start_file:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if file > end_file:
|
||||||
|
break
|
||||||
|
|
||||||
|
selected_previews.append(file)
|
||||||
|
|
||||||
|
return jsonify(selected_previews)
|
136
frigate/api/review.py
Normal file
136
frigate/api/review.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
"""Review apis."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
from flask import (
|
||||||
|
Blueprint,
|
||||||
|
jsonify,
|
||||||
|
make_response,
|
||||||
|
request,
|
||||||
|
)
|
||||||
|
from peewee import DoesNotExist, operator
|
||||||
|
|
||||||
|
from frigate.models import ReviewSegment
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ReviewBp = Blueprint("reviews", __name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ReviewBp.route("/review")
|
||||||
|
def review():
|
||||||
|
cameras = request.args.get("cameras", "all")
|
||||||
|
labels = request.args.get("labels", "all")
|
||||||
|
reviewed = request.args.get("reviewed", type=int, default=0)
|
||||||
|
limit = request.args.get("limit", 100)
|
||||||
|
severity = request.args.get("severity", None)
|
||||||
|
|
||||||
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
||||||
|
after = request.args.get(
|
||||||
|
"after", type=float, default=(datetime.now() - timedelta(hours=18)).timestamp()
|
||||||
|
)
|
||||||
|
|
||||||
|
clauses = [((ReviewSegment.start_time > after) & (ReviewSegment.end_time < before))]
|
||||||
|
|
||||||
|
if cameras != "all":
|
||||||
|
camera_list = cameras.split(",")
|
||||||
|
clauses.append((ReviewSegment.camera << camera_list))
|
||||||
|
|
||||||
|
if labels != "all":
|
||||||
|
# use matching so segments with multiple labels
|
||||||
|
# still match on a search where any label matches
|
||||||
|
label_clauses = []
|
||||||
|
filtered_labels = labels.split(",")
|
||||||
|
|
||||||
|
for label in filtered_labels:
|
||||||
|
label_clauses.append(
|
||||||
|
(ReviewSegment.data["objects"].cast("text") % f'*"{label}"*')
|
||||||
|
)
|
||||||
|
|
||||||
|
label_clause = reduce(operator.or_, label_clauses)
|
||||||
|
clauses.append((label_clause))
|
||||||
|
|
||||||
|
if reviewed == 0:
|
||||||
|
clauses.append((ReviewSegment.has_been_reviewed == False))
|
||||||
|
|
||||||
|
if severity:
|
||||||
|
clauses.append((ReviewSegment.severity == severity))
|
||||||
|
|
||||||
|
review = (
|
||||||
|
ReviewSegment.select()
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.order_by(ReviewSegment.severity.asc())
|
||||||
|
.order_by(ReviewSegment.start_time.desc())
|
||||||
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify([r for r in review])
|
||||||
|
|
||||||
|
|
||||||
|
@ReviewBp.route("/review/<id>/viewed", methods=("POST",))
|
||||||
|
def set_reviewed(id):
|
||||||
|
try:
|
||||||
|
review: ReviewSegment = ReviewSegment.get(ReviewSegment.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Review " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
review.has_been_reviewed = True
|
||||||
|
review.save()
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Reviewed " + id + " viewed"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ReviewBp.route("/reviews/<ids>/viewed", methods=("POST",))
|
||||||
|
def set_multiple_reviewed(ids: str):
|
||||||
|
list_of_ids = ids.split(",")
|
||||||
|
|
||||||
|
if not list_of_ids or len(list_of_ids) == 0:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
ReviewSegment.update(has_been_reviewed=True).where(
|
||||||
|
ReviewSegment.id << list_of_ids
|
||||||
|
).execute()
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Reviewed multiple items"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ReviewBp.route("/review/<id>/viewed", methods=("DELETE",))
|
||||||
|
def set_not_reviewed(id):
|
||||||
|
try:
|
||||||
|
review: ReviewSegment = ReviewSegment.get(ReviewSegment.id == id)
|
||||||
|
except DoesNotExist:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Review " + id + " not found"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
review.has_been_reviewed = False
|
||||||
|
review.save()
|
||||||
|
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": True, "message": "Reviewed " + id + " not viewed"}), 200
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ReviewBp.route("/reviews/<ids>", methods=("DELETE",))
|
||||||
|
def delete_reviews(ids: str):
|
||||||
|
list_of_ids = ids.split(",")
|
||||||
|
|
||||||
|
if not list_of_ids or len(list_of_ids) == 0:
|
||||||
|
return make_response(
|
||||||
|
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
||||||
|
)
|
||||||
|
|
||||||
|
ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute()
|
||||||
|
|
||||||
|
return make_response(jsonify({"success": True, "message": "Delete reviews"}), 200)
|
@ -17,6 +17,7 @@ from peewee_migrate import Router
|
|||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
|
|
||||||
|
from frigate.api.app import create_app
|
||||||
from frigate.comms.config_updater import ConfigPublisher
|
from frigate.comms.config_updater import ConfigPublisher
|
||||||
from frigate.comms.detections_updater import DetectionProxy
|
from frigate.comms.detections_updater import DetectionProxy
|
||||||
from frigate.comms.dispatcher import Communicator, Dispatcher
|
from frigate.comms.dispatcher import Communicator, Dispatcher
|
||||||
@ -37,7 +38,6 @@ from frigate.events.audio import listen_to_audio
|
|||||||
from frigate.events.cleanup import EventCleanup
|
from frigate.events.cleanup import EventCleanup
|
||||||
from frigate.events.external import ExternalEventProcessor
|
from frigate.events.external import ExternalEventProcessor
|
||||||
from frigate.events.maintainer import EventProcessor
|
from frigate.events.maintainer import EventProcessor
|
||||||
from frigate.http import create_app
|
|
||||||
from frigate.log import log_process, root_configurer
|
from frigate.log import log_process, root_configurer
|
||||||
from frigate.models import (
|
from frigate.models import (
|
||||||
Event,
|
Event,
|
||||||
|
@ -151,7 +151,6 @@ class MqttConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
@field_validator("password")
|
@field_validator("password")
|
||||||
def user_requires_pass(cls, v, info: ValidationInfo):
|
def user_requires_pass(cls, v, info: ValidationInfo):
|
||||||
print(f"doing a check where {v} is None and {info.data['user']} is None")
|
|
||||||
if (v is None) != (info.data["user"] is None):
|
if (v is None) != (info.data["user"] is None):
|
||||||
raise ValueError("Password must be provided with username.")
|
raise ValueError("Password must be provided with username.")
|
||||||
return v
|
return v
|
||||||
|
2841
frigate/http.py
2841
frigate/http.py
File diff suppressed because it is too large
Load Diff
@ -10,8 +10,8 @@ from playhouse.shortcuts import model_to_dict
|
|||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
|
|
||||||
|
from frigate.api.app import create_app
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.http import create_app
|
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
from frigate.stats.emitter import StatsEmitter
|
from frigate.stats.emitter import StatsEmitter
|
||||||
|
@ -122,6 +122,9 @@ def clean_camera_user_pass(line: str) -> str:
|
|||||||
|
|
||||||
def escape_special_characters(path: str) -> str:
|
def escape_special_characters(path: str) -> str:
|
||||||
"""Cleans reserved characters to encodings for ffmpeg."""
|
"""Cleans reserved characters to encodings for ffmpeg."""
|
||||||
|
if len(path) > 1000:
|
||||||
|
return ValueError("Input too long to check")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
found = re.search(REGEX_RTSP_CAMERA_USER_PASS, path).group(0)[3:-1]
|
found = re.search(REGEX_RTSP_CAMERA_USER_PASS, path).group(0)[3:-1]
|
||||||
pw = found[(found.index(":") + 1) :]
|
pw = found[(found.index(":") + 1) :]
|
||||||
|
Loading…
Reference in New Issue
Block a user