From 3c4b1fb6f2c5b98278c26b9fbcd42eae70cb55ae Mon Sep 17 00:00:00 2001 From: Nicolas Mowen Date: Sat, 2 Mar 2024 15:10:37 -0700 Subject: [PATCH] Update flask and restructure into api folder with apis put into separate folders (#10193) * Update flask * Update flask and break apart different sections into different files * formatting * Fix test and add safety check --- docker/main/requirements-wheels.txt | 2 +- frigate/api/__init__.py | 0 frigate/api/app.py | 733 +++++++ frigate/api/event.py | 688 +++++++ frigate/api/media.py | 1272 ++++++++++++ frigate/api/preview.py | 116 ++ frigate/api/review.py | 136 ++ frigate/app.py | 2 +- frigate/config.py | 1 - frigate/http.py | 2841 --------------------------- frigate/test/test_http.py | 2 +- frigate/util/builtin.py | 3 + 12 files changed, 2951 insertions(+), 2845 deletions(-) create mode 100644 frigate/api/__init__.py create mode 100644 frigate/api/app.py create mode 100644 frigate/api/event.py create mode 100644 frigate/api/media.py create mode 100644 frigate/api/preview.py create mode 100644 frigate/api/review.py delete mode 100644 frigate/http.py diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 1bbf2fab6..8466d9f13 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -1,5 +1,5 @@ click == 8.1.* -Flask == 2.3.* +Flask == 3.0.* imutils == 0.5.* markupsafe == 2.1.* matplotlib == 3.7.* diff --git a/frigate/api/__init__.py b/frigate/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/frigate/api/app.py b/frigate/api/app.py new file mode 100644 index 000000000..321bd5758 --- /dev/null +++ b/frigate/api/app.py @@ -0,0 +1,733 @@ +"""Main api runner.""" + +import copy +import json +import logging +import os +import traceback +from collections import defaultdict +from datetime import datetime, timedelta +from functools import reduce + +import numpy as np +import pandas as pd +import requests +from flask import ( + Blueprint, + Flask, + current_app, + jsonify, + make_response, + request, +) +from markupsafe import escape +from peewee import operator +from playhouse.sqliteq import SqliteQueueDatabase + +from frigate.api.event import EventBp +from frigate.api.media import MediaBp +from frigate.api.preview import PreviewBp +from frigate.api.review import ReviewBp +from frigate.config import FrigateConfig +from frigate.const import CONFIG_DIR +from frigate.events.external import ExternalEventProcessor +from frigate.models import Event, Recordings, Timeline +from frigate.plus import PlusApi +from frigate.ptz.onvif import OnvifController +from frigate.stats.emitter import StatsEmitter +from frigate.storage import StorageMaintainer +from frigate.util.builtin import ( + clean_camera_user_pass, + get_tz_modifiers, + update_yaml_from_url, +) +from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel +from frigate.version import VERSION + +logger = logging.getLogger(__name__) + + +bp = Blueprint("frigate", __name__) +bp.register_blueprint(EventBp) +bp.register_blueprint(MediaBp) +bp.register_blueprint(PreviewBp) +bp.register_blueprint(ReviewBp) + + +def create_app( + frigate_config, + database: SqliteQueueDatabase, + detected_frames_processor, + storage_maintainer: StorageMaintainer, + onvif: OnvifController, + external_processor: ExternalEventProcessor, + plus_api: PlusApi, + stats_emitter: StatsEmitter, +): + app = Flask(__name__) + + @app.before_request + def check_csrf(): + if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]: + pass + if "origin" in request.headers and "x-csrf-token" not in request.headers: + return jsonify({"success": False, "message": "Missing CSRF header"}), 401 + + @app.before_request + def _db_connect(): + if database.is_closed(): + database.connect() + + @app.teardown_request + def _db_close(exc): + if not database.is_closed(): + database.close() + + app.frigate_config = frigate_config + app.detected_frames_processor = detected_frames_processor + app.storage_maintainer = storage_maintainer + app.onvif = onvif + app.external_processor = external_processor + app.plus_api = plus_api + app.camera_error_image = None + app.stats_emitter = stats_emitter + + app.register_blueprint(bp) + + return app + + +@bp.route("/") +def is_healthy(): + return "Frigate is running. Alive and healthy!" + + +@bp.route("/config/schema.json") +def config_schema(): + return current_app.response_class( + current_app.frigate_config.schema_json(), mimetype="application/json" + ) + + +@bp.route("/go2rtc/streams") +def go2rtc_streams(): + r = requests.get("http://127.0.0.1:1984/api/streams") + if not r.ok: + logger.error("Failed to fetch streams from go2rtc") + return make_response( + jsonify({"success": False, "message": "Error fetching stream data"}), + 500, + ) + stream_data = r.json() + for data in stream_data.values(): + for producer in data.get("producers", []): + producer["url"] = clean_camera_user_pass(producer.get("url", "")) + return jsonify(stream_data) + + +@bp.route("/version") +def version(): + return VERSION + + +@bp.route("/stats") +def stats(): + return jsonify(current_app.stats_emitter.get_latest_stats()) + + +@bp.route("/stats/history") +def stats_history(): + return jsonify(current_app.stats_emitter.get_stats_history()) + + +@bp.route("/config") +def config(): + config = current_app.frigate_config.model_dump(mode="json", exclude_none=True) + + # remove the mqtt password + config["mqtt"].pop("password", None) + + for camera_name, camera in current_app.frigate_config.cameras.items(): + camera_dict = config["cameras"][camera_name] + + # clean paths + for input in camera_dict.get("ffmpeg", {}).get("inputs", []): + input["path"] = clean_camera_user_pass(input["path"]) + + # add clean ffmpeg_cmds + camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds) + for cmd in camera_dict["ffmpeg_cmds"]: + cmd["cmd"] = clean_camera_user_pass(" ".join(cmd["cmd"])) + + config["plus"] = {"enabled": current_app.plus_api.is_active()} + + for detector, detector_config in config["detectors"].items(): + detector_config["model"]["labelmap"] = ( + current_app.frigate_config.model.merged_labelmap + ) + + return jsonify(config) + + +@bp.route("/config/raw") +def config_raw(): + config_file = os.environ.get("CONFIG_FILE", "/config/config.yml") + + # Check if we can use .yaml instead of .yml + config_file_yaml = config_file.replace(".yml", ".yaml") + + if os.path.isfile(config_file_yaml): + config_file = config_file_yaml + + if not os.path.isfile(config_file): + return make_response( + jsonify({"success": False, "message": "Could not find file"}), 404 + ) + + with open(config_file, "r") as f: + raw_config = f.read() + f.close() + + return raw_config, 200 + + +@bp.route("/config/save", methods=["POST"]) +def config_save(): + save_option = request.args.get("save_option") + + new_config = request.get_data().decode() + + if not new_config: + return make_response( + jsonify( + {"success": False, "message": "Config with body param is required"} + ), + 400, + ) + + # Validate the config schema + try: + FrigateConfig.parse_raw(new_config) + except Exception: + return make_response( + jsonify( + { + "success": False, + "message": f"\nConfig Error:\n\n{escape(str(traceback.format_exc()))}", + } + ), + 400, + ) + + # Save the config to file + try: + config_file = os.environ.get("CONFIG_FILE", "/config/config.yml") + + # Check if we can use .yaml instead of .yml + config_file_yaml = config_file.replace(".yml", ".yaml") + + if os.path.isfile(config_file_yaml): + config_file = config_file_yaml + + with open(config_file, "w") as f: + f.write(new_config) + f.close() + except Exception: + return make_response( + jsonify( + { + "success": False, + "message": "Could not write config file, be sure that Frigate has write permission on the config file.", + } + ), + 400, + ) + + if save_option == "restart": + try: + restart_frigate() + except Exception as e: + logging.error(f"Error restarting Frigate: {e}") + return make_response( + jsonify( + { + "success": True, + "message": "Config successfully saved, unable to restart Frigate", + } + ), + 200, + ) + + return make_response( + jsonify( + { + "success": True, + "message": "Config successfully saved, restarting (this can take up to one minute)...", + } + ), + 200, + ) + else: + return make_response( + jsonify({"success": True, "message": "Config successfully saved."}), + 200, + ) + + +@bp.route("/config/set", methods=["PUT"]) +def config_set(): + config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml") + + # Check if we can use .yaml instead of .yml + config_file_yaml = config_file.replace(".yml", ".yaml") + + if os.path.isfile(config_file_yaml): + config_file = config_file_yaml + + with open(config_file, "r") as f: + old_raw_config = f.read() + f.close() + + try: + update_yaml_from_url(config_file, request.url) + with open(config_file, "r") as f: + new_raw_config = f.read() + f.close() + # Validate the config schema + try: + FrigateConfig.parse_raw(new_raw_config) + except Exception: + with open(config_file, "w") as f: + f.write(old_raw_config) + f.close() + logger.error(f"\nConfig Error:\n\n{str(traceback.format_exc())}") + return make_response( + jsonify( + { + "success": False, + "message": "Error parsing config. Check logs for error message.", + } + ), + 400, + ) + except Exception as e: + logging.error(f"Error updating config: {e}") + return make_response( + jsonify({"success": False, "message": "Error updating config"}), + 500, + ) + + return make_response( + jsonify( + { + "success": True, + "message": "Config successfully updated, restart to apply", + } + ), + 200, + ) + + +@bp.route("/ffprobe", methods=["GET"]) +def ffprobe(): + path_param = request.args.get("paths", "") + + if not path_param: + return make_response( + jsonify({"success": False, "message": "Path needs to be provided."}), 404 + ) + + if path_param.startswith("camera"): + camera = path_param[7:] + + if camera not in current_app.frigate_config.cameras.keys(): + return make_response( + jsonify( + {"success": False, "message": f"{camera} is not a valid camera."} + ), + 404, + ) + + if not current_app.frigate_config.cameras[camera].enabled: + return make_response( + jsonify({"success": False, "message": f"{camera} is not enabled."}), 404 + ) + + paths = map( + lambda input: input.path, + current_app.frigate_config.cameras[camera].ffmpeg.inputs, + ) + elif "," in clean_camera_user_pass(path_param): + paths = path_param.split(",") + else: + paths = [path_param] + + # user has multiple streams + output = [] + + for path in paths: + ffprobe = ffprobe_stream(path.strip()) + output.append( + { + "return_code": ffprobe.returncode, + "stderr": ( + ffprobe.stderr.decode("unicode_escape").strip() + if ffprobe.returncode != 0 + else "" + ), + "stdout": ( + json.loads(ffprobe.stdout.decode("unicode_escape").strip()) + if ffprobe.returncode == 0 + else "" + ), + } + ) + + return jsonify(output) + + +@bp.route("/vainfo", methods=["GET"]) +def vainfo(): + vainfo = vainfo_hwaccel() + return jsonify( + { + "return_code": vainfo.returncode, + "stderr": ( + vainfo.stderr.decode("unicode_escape").strip() + if vainfo.returncode != 0 + else "" + ), + "stdout": ( + vainfo.stdout.decode("unicode_escape").strip() + if vainfo.returncode == 0 + else "" + ), + } + ) + + +@bp.route("/logs/", methods=["GET"]) +def logs(service: str): + log_locations = { + "frigate": "/dev/shm/logs/frigate/current", + "go2rtc": "/dev/shm/logs/go2rtc/current", + "nginx": "/dev/shm/logs/nginx/current", + } + service_location = log_locations.get(service) + + if not service_location: + return make_response( + jsonify({"success": False, "message": "Not a valid service"}), + 404, + ) + + try: + file = open(service_location, "r") + contents = file.read() + file.close() + return contents, 200 + except FileNotFoundError as e: + logger.error(e) + return make_response( + jsonify({"success": False, "message": "Could not find log file"}), + 500, + ) + + +@bp.route("/restart", methods=["POST"]) +def restart(): + try: + restart_frigate() + except Exception as e: + logging.error(f"Error restarting Frigate: {e}") + return make_response( + jsonify( + { + "success": False, + "message": "Unable to restart Frigate.", + } + ), + 500, + ) + + return make_response( + jsonify( + { + "success": True, + "message": "Restarting (this can take up to one minute)...", + } + ), + 200, + ) + + +@bp.route("/labels") +def get_labels(): + camera = request.args.get("camera", type=str, default="") + + try: + if camera: + events = Event.select(Event.label).where(Event.camera == camera).distinct() + else: + events = Event.select(Event.label).distinct() + except Exception as e: + logger.error(e) + return make_response( + jsonify({"success": False, "message": "Failed to get labels"}), 404 + ) + + labels = sorted([e.label for e in events]) + return jsonify(labels) + + +@bp.route("/sub_labels") +def get_sub_labels(): + split_joined = request.args.get("split_joined", type=int) + + try: + events = Event.select(Event.sub_label).distinct() + except Exception: + return make_response( + jsonify({"success": False, "message": "Failed to get sub_labels"}), + 404, + ) + + sub_labels = [e.sub_label for e in events] + + if None in sub_labels: + sub_labels.remove(None) + + if split_joined: + original_labels = sub_labels.copy() + + for label in original_labels: + if "," in label: + sub_labels.remove(label) + parts = label.split(",") + + for part in parts: + if part.strip() not in sub_labels: + sub_labels.append(part.strip()) + + sub_labels.sort() + return jsonify(sub_labels) + + +@bp.route("/timeline") +def timeline(): + camera = request.args.get("camera", "all") + source_id = request.args.get("source_id", type=str) + limit = request.args.get("limit", 100) + + clauses = [] + + selected_columns = [ + Timeline.timestamp, + Timeline.camera, + Timeline.source, + Timeline.source_id, + Timeline.class_type, + Timeline.data, + ] + + if camera != "all": + clauses.append((Timeline.camera == camera)) + + if source_id: + clauses.append((Timeline.source_id == source_id)) + + if len(clauses) == 0: + clauses.append((True)) + + timeline = ( + Timeline.select(*selected_columns) + .where(reduce(operator.and_, clauses)) + .order_by(Timeline.timestamp.asc()) + .limit(limit) + .dicts() + ) + + return jsonify([t for t in timeline]) + + +@bp.route("/timeline/hourly") +def hourly_timeline(): + """Get hourly summary for timeline.""" + cameras = request.args.get("cameras", "all") + labels = request.args.get("labels", "all") + before = request.args.get("before", type=float) + after = request.args.get("after", type=float) + limit = request.args.get("limit", 200) + tz_name = request.args.get("timezone", default="utc", type=str) + + _, minute_modifier, _ = get_tz_modifiers(tz_name) + minute_offset = int(minute_modifier.split(" ")[0]) + + clauses = [] + + if cameras != "all": + camera_list = cameras.split(",") + clauses.append((Timeline.camera << camera_list)) + + if labels != "all": + label_list = labels.split(",") + clauses.append((Timeline.data["label"] << label_list)) + + if before: + clauses.append((Timeline.timestamp < before)) + + if after: + clauses.append((Timeline.timestamp > after)) + + if len(clauses) == 0: + clauses.append((True)) + + timeline = ( + Timeline.select( + Timeline.camera, + Timeline.timestamp, + Timeline.data, + Timeline.class_type, + Timeline.source_id, + Timeline.source, + ) + .where(reduce(operator.and_, clauses)) + .order_by(Timeline.timestamp.desc()) + .limit(limit) + .dicts() + .iterator() + ) + + count = 0 + start = 0 + end = 0 + hours: dict[str, list[dict[str, any]]] = {} + + for t in timeline: + if count == 0: + start = t["timestamp"] + else: + end = t["timestamp"] + + count += 1 + + hour = ( + datetime.fromtimestamp(t["timestamp"]).replace( + minute=0, second=0, microsecond=0 + ) + + timedelta( + minutes=minute_offset, + ) + ).timestamp() + if hour not in hours: + hours[hour] = [t] + else: + hours[hour].insert(0, t) + + return jsonify( + { + "start": start, + "end": end, + "count": count, + "hours": hours, + } + ) + + +@bp.route("//recording/hourly/activity") +def hourly_timeline_activity(camera_name: str): + """Get hourly summary for timeline.""" + if camera_name not in current_app.frigate_config.cameras: + return make_response( + jsonify({"success": False, "message": "Camera not found"}), + 404, + ) + + before = request.args.get("before", type=float, default=datetime.now()) + after = request.args.get( + "after", type=float, default=datetime.now() - timedelta(hours=1) + ) + tz_name = request.args.get("timezone", default="utc", type=str) + + _, minute_modifier, _ = get_tz_modifiers(tz_name) + minute_offset = int(minute_modifier.split(" ")[0]) + + all_recordings: list[Recordings] = ( + Recordings.select( + Recordings.start_time, + Recordings.duration, + Recordings.objects, + Recordings.motion, + ) + .where(Recordings.camera == camera_name) + .where(Recordings.motion > 0) + .where((Recordings.start_time > after) & (Recordings.end_time < before)) + .order_by(Recordings.start_time.asc()) + .iterator() + ) + + # data format is ex: + # {timestamp: [{ date: 1, count: 1, type: motion }]}] }} + hours: dict[int, list[dict[str, any]]] = defaultdict(list) + + key = datetime.fromtimestamp(after).replace(second=0, microsecond=0) + timedelta( + minutes=minute_offset + ) + check = (key + timedelta(hours=1)).timestamp() + + # set initial start so data is representative of full hour + hours[int(key.timestamp())].append( + [ + key.timestamp(), + 0, + False, + ] + ) + + for recording in all_recordings: + if recording.start_time > check: + hours[int(key.timestamp())].append( + [ + (key + timedelta(minutes=59, seconds=59)).timestamp(), + 0, + False, + ] + ) + key = key + timedelta(hours=1) + check = (key + timedelta(hours=1)).timestamp() + hours[int(key.timestamp())].append( + [ + key.timestamp(), + 0, + False, + ] + ) + + data_type = recording.objects > 0 + count = recording.motion + recording.objects + hours[int(key.timestamp())].append( + [ + recording.start_time + (recording.duration / 2), + 0 if count == 0 else np.log2(count), + data_type, + ] + ) + + # resample data using pandas to get activity on minute to minute basis + for key, data in hours.items(): + df = pd.DataFrame(data, columns=["date", "count", "hasObjects"]) + + # set date as datetime index + df["date"] = pd.to_datetime(df["date"], unit="s") + df.set_index(["date"], inplace=True) + + # normalize data + df = df.resample("T").mean().fillna(0) + + # change types for output + df.index = df.index.astype(int) // (10**9) + df["count"] = df["count"].astype(int) + df["hasObjects"] = df["hasObjects"].astype(bool) + hours[key] = df.reset_index().to_dict("records") + + return jsonify(hours) diff --git a/frigate/api/event.py b/frigate/api/event.py new file mode 100644 index 000000000..32aa42146 --- /dev/null +++ b/frigate/api/event.py @@ -0,0 +1,688 @@ +"""Event apis.""" + +import logging +import os +from datetime import datetime +from functools import reduce +from pathlib import Path +from urllib.parse import unquote + +import cv2 +from flask import ( + Blueprint, + current_app, + jsonify, + make_response, + request, +) +from peewee import DoesNotExist, fn, operator +from playhouse.shortcuts import model_to_dict + +from frigate.const import ( + CLIPS_DIR, +) +from frigate.models import Event, Timeline +from frigate.object_processing import TrackedObject +from frigate.util.builtin import ( + get_tz_modifiers, +) + +logger = logging.getLogger(__name__) + +EventBp = Blueprint("events", __name__) + +DEFAULT_TIME_RANGE = "00:00,24:00" + + +@EventBp.route("/events") +def events(): + camera = request.args.get("camera", "all") + cameras = request.args.get("cameras", "all") + + # handle old camera arg + if cameras == "all" and camera != "all": + cameras = camera + + label = unquote(request.args.get("label", "all")) + labels = request.args.get("labels", "all") + + # handle old label arg + if labels == "all" and label != "all": + labels = label + + sub_label = request.args.get("sub_label", "all") + sub_labels = request.args.get("sub_labels", "all") + + # handle old sub_label arg + if sub_labels == "all" and sub_label != "all": + sub_labels = sub_label + + zone = request.args.get("zone", "all") + zones = request.args.get("zones", "all") + + # handle old label arg + if zones == "all" and zone != "all": + zones = zone + + limit = request.args.get("limit", 100) + after = request.args.get("after", type=float) + before = request.args.get("before", type=float) + time_range = request.args.get("time_range", DEFAULT_TIME_RANGE) + has_clip = request.args.get("has_clip", type=int) + has_snapshot = request.args.get("has_snapshot", type=int) + in_progress = request.args.get("in_progress", type=int) + include_thumbnails = request.args.get("include_thumbnails", default=1, type=int) + favorites = request.args.get("favorites", type=int) + min_score = request.args.get("min_score", type=float) + max_score = request.args.get("max_score", type=float) + is_submitted = request.args.get("is_submitted", type=int) + min_length = request.args.get("min_length", type=float) + max_length = request.args.get("max_length", type=float) + + clauses = [] + + selected_columns = [ + Event.id, + Event.camera, + Event.label, + Event.zones, + Event.start_time, + Event.end_time, + Event.has_clip, + Event.has_snapshot, + Event.plus_id, + Event.retain_indefinitely, + Event.sub_label, + Event.top_score, + Event.false_positive, + Event.box, + Event.data, + ] + + if camera != "all": + clauses.append((Event.camera == camera)) + + if cameras != "all": + camera_list = cameras.split(",") + clauses.append((Event.camera << camera_list)) + + if labels != "all": + label_list = labels.split(",") + clauses.append((Event.label << label_list)) + + if sub_labels != "all": + # use matching so joined sub labels are included + # for example a sub label 'bob' would get events + # with sub labels 'bob' and 'bob, john' + sub_label_clauses = [] + filtered_sub_labels = sub_labels.split(",") + + if "None" in filtered_sub_labels: + filtered_sub_labels.remove("None") + sub_label_clauses.append((Event.sub_label.is_null())) + + for label in filtered_sub_labels: + sub_label_clauses.append( + (Event.sub_label.cast("text") == label) + ) # include exact matches + + # include this label when part of a list + sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*")) + sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*")) + + sub_label_clause = reduce(operator.or_, sub_label_clauses) + clauses.append((sub_label_clause)) + + if zones != "all": + # use matching so events with multiple zones + # still match on a search where any zone matches + zone_clauses = [] + filtered_zones = zones.split(",") + + if "None" in filtered_zones: + filtered_zones.remove("None") + zone_clauses.append((Event.zones.length() == 0)) + + for zone in filtered_zones: + zone_clauses.append((Event.zones.cast("text") % f'*"{zone}"*')) + + zone_clause = reduce(operator.or_, zone_clauses) + clauses.append((zone_clause)) + + if after: + clauses.append((Event.start_time > after)) + + if before: + clauses.append((Event.start_time < before)) + + if time_range != DEFAULT_TIME_RANGE: + # get timezone arg to ensure browser times are used + tz_name = request.args.get("timezone", default="utc", type=str) + hour_modifier, minute_modifier, _ = get_tz_modifiers(tz_name) + + times = time_range.split(",") + time_after = times[0] + time_before = times[1] + + start_hour_fun = fn.strftime( + "%H:%M", + fn.datetime(Event.start_time, "unixepoch", hour_modifier, minute_modifier), + ) + + # cases where user wants events overnight, ex: from 20:00 to 06:00 + # should use or operator + if time_after > time_before: + clauses.append( + ( + reduce( + operator.or_, + [(start_hour_fun > time_after), (start_hour_fun < time_before)], + ) + ) + ) + # all other cases should be and operator + else: + clauses.append((start_hour_fun > time_after)) + clauses.append((start_hour_fun < time_before)) + + if has_clip is not None: + clauses.append((Event.has_clip == has_clip)) + + if has_snapshot is not None: + clauses.append((Event.has_snapshot == has_snapshot)) + + if in_progress is not None: + clauses.append((Event.end_time.is_null(in_progress))) + + if include_thumbnails: + selected_columns.append(Event.thumbnail) + + if favorites: + clauses.append((Event.retain_indefinitely == favorites)) + + if max_score is not None: + clauses.append((Event.data["score"] <= max_score)) + + if min_score is not None: + clauses.append((Event.data["score"] >= min_score)) + + if min_length is not None: + clauses.append(((Event.end_time - Event.start_time) >= min_length)) + + if max_length is not None: + clauses.append(((Event.end_time - Event.start_time) <= max_length)) + + if is_submitted is not None: + if is_submitted == 0: + clauses.append((Event.plus_id.is_null())) + elif is_submitted > 0: + clauses.append((Event.plus_id != "")) + + if len(clauses) == 0: + clauses.append((True)) + + events = ( + Event.select(*selected_columns) + .where(reduce(operator.and_, clauses)) + .order_by(Event.start_time.desc()) + .limit(limit) + .dicts() + .iterator() + ) + + return jsonify(list(events)) + + +@EventBp.route("/events/summary") +def events_summary(): + tz_name = request.args.get("timezone", default="utc", type=str) + hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name) + has_clip = request.args.get("has_clip", type=int) + has_snapshot = request.args.get("has_snapshot", type=int) + + clauses = [] + + if has_clip is not None: + clauses.append((Event.has_clip == has_clip)) + + if has_snapshot is not None: + clauses.append((Event.has_snapshot == has_snapshot)) + + if len(clauses) == 0: + clauses.append((True)) + + groups = ( + Event.select( + Event.camera, + Event.label, + Event.sub_label, + fn.strftime( + "%Y-%m-%d", + fn.datetime( + Event.start_time, "unixepoch", hour_modifier, minute_modifier + ), + ).alias("day"), + Event.zones, + fn.COUNT(Event.id).alias("count"), + ) + .where(reduce(operator.and_, clauses)) + .group_by( + Event.camera, + Event.label, + Event.sub_label, + (Event.start_time + seconds_offset).cast("int") / (3600 * 24), + Event.zones, + ) + ) + + return jsonify([e for e in groups.dicts()]) + + +@EventBp.route("/events/", methods=("GET",)) +def event(id): + try: + return model_to_dict(Event.get(Event.id == id)) + except DoesNotExist: + return "Event not found", 404 + + +@EventBp.route("/events//retain", methods=("POST",)) +def set_retain(id): + try: + event = Event.get(Event.id == id) + except DoesNotExist: + return make_response( + jsonify({"success": False, "message": "Event " + id + " not found"}), 404 + ) + + event.retain_indefinitely = True + event.save() + + return make_response( + jsonify({"success": True, "message": "Event " + id + " retained"}), 200 + ) + + +@EventBp.route("/events//plus", methods=("POST",)) +def send_to_plus(id): + if not current_app.plus_api.is_active(): + message = "PLUS_API_KEY environment variable is not set" + logger.error(message) + return make_response( + jsonify( + { + "success": False, + "message": message, + } + ), + 400, + ) + + include_annotation = ( + request.json.get("include_annotation") if request.is_json else None + ) + + try: + event = Event.get(Event.id == id) + except DoesNotExist: + message = f"Event {id} not found" + logger.error(message) + return make_response(jsonify({"success": False, "message": message}), 404) + + # events from before the conversion to relative dimensions cant include annotations + if event.data.get("box") is None: + include_annotation = None + + if event.end_time is None: + logger.error(f"Unable to load clean png for in-progress event: {event.id}") + return make_response( + jsonify( + { + "success": False, + "message": "Unable to load clean png for in-progress event", + } + ), + 400, + ) + + if event.plus_id: + message = "Already submitted to plus" + logger.error(message) + return make_response(jsonify({"success": False, "message": message}), 400) + + # load clean.png + try: + filename = f"{event.camera}-{event.id}-clean.png" + image = cv2.imread(os.path.join(CLIPS_DIR, filename)) + except Exception: + logger.error(f"Unable to load clean png for event: {event.id}") + return make_response( + jsonify( + {"success": False, "message": "Unable to load clean png for event"} + ), + 400, + ) + + if image is None or image.size == 0: + logger.error(f"Unable to load clean png for event: {event.id}") + return make_response( + jsonify( + {"success": False, "message": "Unable to load clean png for event"} + ), + 400, + ) + + try: + plus_id = current_app.plus_api.upload_image(image, event.camera) + except Exception as ex: + logger.exception(ex) + return make_response( + jsonify({"success": False, "message": "Error uploading image"}), + 400, + ) + + # store image id in the database + event.plus_id = plus_id + event.save() + + if include_annotation is not None: + box = event.data["box"] + + try: + current_app.plus_api.add_annotation( + event.plus_id, + box, + event.label, + ) + except ValueError: + message = "Error uploading annotation, unsupported label provided." + logger.error(message) + return make_response( + jsonify({"success": False, "message": message}), + 400, + ) + except Exception as ex: + logger.exception(ex) + return make_response( + jsonify({"success": False, "message": "Error uploading annotation"}), + 400, + ) + + return make_response(jsonify({"success": True, "plus_id": plus_id}), 200) + + +@EventBp.route("/events//false_positive", methods=("PUT",)) +def false_positive(id): + if not current_app.plus_api.is_active(): + message = "PLUS_API_KEY environment variable is not set" + logger.error(message) + return make_response( + jsonify( + { + "success": False, + "message": message, + } + ), + 400, + ) + + try: + event = Event.get(Event.id == id) + except DoesNotExist: + message = f"Event {id} not found" + logger.error(message) + return make_response(jsonify({"success": False, "message": message}), 404) + + # events from before the conversion to relative dimensions cant include annotations + if event.data.get("box") is None: + message = "Events prior to 0.13 cannot be submitted as false positives" + logger.error(message) + return make_response(jsonify({"success": False, "message": message}), 400) + + if event.false_positive: + message = "False positive already submitted to Frigate+" + logger.error(message) + return make_response(jsonify({"success": False, "message": message}), 400) + + if not event.plus_id: + plus_response = send_to_plus(id) + if plus_response.status_code != 200: + return plus_response + # need to refetch the event now that it has a plus_id + event = Event.get(Event.id == id) + + region = event.data["region"] + box = event.data["box"] + + # provide top score if score is unavailable + score = ( + (event.data["top_score"] if event.data["top_score"] else event.top_score) + if event.data["score"] is None + else event.data["score"] + ) + + try: + current_app.plus_api.add_false_positive( + event.plus_id, + region, + box, + score, + event.label, + event.model_hash, + event.model_type, + event.detector_type, + ) + except ValueError: + message = "Error uploading false positive, unsupported label provided." + logger.error(message) + return make_response( + jsonify({"success": False, "message": message}), + 400, + ) + except Exception as ex: + logger.exception(ex) + return make_response( + jsonify({"success": False, "message": "Error uploading false positive"}), + 400, + ) + + event.false_positive = True + event.save() + + return make_response(jsonify({"success": True, "plus_id": event.plus_id}), 200) + + +@EventBp.route("/events//retain", methods=("DELETE",)) +def delete_retain(id): + try: + event = Event.get(Event.id == id) + except DoesNotExist: + return make_response( + jsonify({"success": False, "message": "Event " + id + " not found"}), 404 + ) + + event.retain_indefinitely = False + event.save() + + return make_response( + jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200 + ) + + +@EventBp.route("/events//sub_label", methods=("POST",)) +def set_sub_label(id): + try: + event: Event = Event.get(Event.id == id) + except DoesNotExist: + return make_response( + jsonify({"success": False, "message": "Event " + id + " not found"}), 404 + ) + + json: dict[str, any] = request.get_json(silent=True) or {} + new_sub_label = json.get("subLabel") + new_score = json.get("subLabelScore") + + if new_sub_label is None: + return make_response( + jsonify( + { + "success": False, + "message": "A sub label must be supplied", + } + ), + 400, + ) + + if new_sub_label and len(new_sub_label) > 100: + return make_response( + jsonify( + { + "success": False, + "message": new_sub_label + + " exceeds the 100 character limit for sub_label", + } + ), + 400, + ) + + if new_score is not None and (new_score > 1.0 or new_score < 0): + return make_response( + jsonify( + { + "success": False, + "message": new_score + + " does not fit within the expected bounds 0 <= score <= 1.0", + } + ), + 400, + ) + + if not event.end_time: + # update tracked object + tracked_obj: TrackedObject = ( + current_app.detected_frames_processor.camera_states[ + event.camera + ].tracked_objects.get(event.id) + ) + + if tracked_obj: + tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score) + + # update timeline items + Timeline.update( + data=Timeline.data.update({"sub_label": (new_sub_label, new_score)}) + ).where(Timeline.source_id == id).execute() + + event.sub_label = new_sub_label + + if new_score: + data = event.data + data["sub_label_score"] = new_score + event.data = data + + event.save() + return make_response( + jsonify( + { + "success": True, + "message": "Event " + id + " sub label set to " + new_sub_label, + } + ), + 200, + ) + + +@EventBp.route("/events/", methods=("DELETE",)) +def delete_event(id): + try: + event = Event.get(Event.id == id) + except DoesNotExist: + return make_response( + jsonify({"success": False, "message": "Event " + id + " not found"}), 404 + ) + + media_name = f"{event.camera}-{event.id}" + if event.has_snapshot: + media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg") + media.unlink(missing_ok=True) + media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png") + media.unlink(missing_ok=True) + if event.has_clip: + media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4") + media.unlink(missing_ok=True) + + event.delete_instance() + Timeline.delete().where(Timeline.source_id == id).execute() + return make_response( + jsonify({"success": True, "message": "Event " + id + " deleted"}), 200 + ) + + +@EventBp.route("/events//