blakeblackshear.frigate/frigate/http.py

960 lines
29 KiB
Python
Raw Normal View History

2020-11-25 03:36:56 +01:00
import base64
2021-05-28 19:13:48 +02:00
from collections import OrderedDict
from datetime import datetime, timedelta
2021-11-09 02:05:39 +01:00
import copy
2020-11-09 14:32:30 +01:00
import logging
2020-11-01 15:06:15 +01:00
import os
2021-07-09 22:14:16 +02:00
import subprocess as sp
2020-11-01 22:37:51 +01:00
import time
2020-11-16 14:27:56 +01:00
from functools import reduce
from pathlib import Path
from urllib.parse import unquote
2020-11-01 22:37:51 +01:00
import cv2
2021-06-14 14:31:13 +02:00
2020-11-01 22:37:51 +01:00
import numpy as np
2021-02-17 14:23:32 +01:00
from flask import (
Blueprint,
Flask,
Response,
current_app,
jsonify,
make_response,
request,
)
2021-06-14 14:31:13 +02:00
from peewee import SqliteDatabase, operator, fn, DoesNotExist
2020-11-01 15:06:15 +01:00
from playhouse.shortcuts import model_to_dict
2022-07-22 14:56:28 +02:00
from frigate.const import CLIPS_DIR
from frigate.models import Event, Recordings
from frigate.object_processing import TrackedObject, TrackedObjectProcessor
from frigate.stats import stats_snapshot
from frigate.util import clean_camera_user_pass
2020-12-19 13:51:10 +01:00
from frigate.version import VERSION
2020-11-01 15:06:15 +01:00
2020-11-25 17:37:41 +01:00
logger = logging.getLogger(__name__)
2021-02-17 14:23:32 +01:00
bp = Blueprint("frigate", __name__)
2021-02-13 16:33:32 +01:00
2021-02-17 14:23:32 +01:00
def create_app(
frigate_config,
database: SqliteDatabase,
stats_tracking,
detected_frames_processor,
2022-04-03 22:00:11 +02:00
plus_api,
2021-02-17 14:23:32 +01:00
):
2020-11-01 15:06:15 +01:00
app = Flask(__name__)
@app.before_request
def _db_connect():
if database.is_closed():
database.connect()
2020-11-01 15:06:15 +01:00
@app.teardown_request
def _db_close(exc):
if not database.is_closed():
database.close()
2020-11-01 22:37:51 +01:00
app.frigate_config = frigate_config
app.stats_tracking = stats_tracking
2020-11-01 22:37:51 +01:00
app.detected_frames_processor = detected_frames_processor
2022-04-03 22:00:11 +02:00
app.plus_api = plus_api
2021-01-09 18:26:46 +01:00
2020-11-01 15:06:15 +01:00
app.register_blueprint(bp)
2021-02-13 16:33:32 +01:00
2020-11-01 15:06:15 +01:00
return app
2021-02-17 14:23:32 +01:00
@bp.route("/")
2020-11-01 15:06:15 +01:00
def is_healthy():
return "Frigate is running. Alive and healthy!"
2021-02-17 14:23:32 +01:00
@bp.route("/events/summary")
2020-11-18 04:11:19 +01:00
def events_summary():
2021-02-17 14:23:32 +01:00
has_clip = request.args.get("has_clip", type=int)
has_snapshot = request.args.get("has_snapshot", type=int)
2021-01-16 16:26:26 +01:00
clauses = []
if not has_clip is None:
clauses.append((Event.has_clip == has_clip))
2021-01-26 16:04:03 +01:00
2021-01-16 16:26:26 +01:00
if not has_snapshot is None:
clauses.append((Event.has_snapshot == has_snapshot))
if len(clauses) == 0:
clauses.append((True))
2021-01-16 16:26:26 +01:00
2020-11-18 04:11:19 +01:00
groups = (
2021-02-17 14:23:32 +01:00
Event.select(
Event.camera,
Event.label,
fn.strftime(
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
).alias("day"),
Event.zones,
fn.COUNT(Event.id).alias("count"),
)
.where(reduce(operator.and_, clauses))
.group_by(
Event.camera,
Event.label,
fn.strftime(
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
),
Event.zones,
2020-11-18 04:11:19 +01:00
)
2021-02-17 14:23:32 +01:00
)
2020-11-18 04:11:19 +01:00
return jsonify([e for e in groups.dicts()])
2021-02-17 14:23:32 +01:00
@bp.route("/events/<id>", methods=("GET",))
2020-11-24 22:44:59 +01:00
def event(id):
2020-11-25 03:36:56 +01:00
try:
return model_to_dict(Event.get(Event.id == id))
except DoesNotExist:
return "Event not found", 404
2021-05-18 07:52:08 +02:00
@bp.route("/events/<id>/retain", methods=("POST",))
def set_retain(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
event.retain_indefinitely = True
event.save()
return make_response(
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
)
2022-04-03 22:00:11 +02:00
@bp.route("/events/<id>/plus", methods=("POST",))
def send_to_plus(id):
2022-05-11 13:54:46 +02:00
if not current_app.plus_api.is_active():
message = "PLUS_API_KEY environment variable is not set"
logger.error(message)
2022-04-03 22:00:11 +02:00
return make_response(
2022-04-10 15:09:41 +02:00
jsonify(
{
"success": False,
2022-05-11 13:54:46 +02:00
"message": message,
2022-04-10 15:09:41 +02:00
}
),
400,
2022-04-03 22:00:11 +02:00
)
try:
event = Event.get(Event.id == id)
except DoesNotExist:
2022-05-11 13:54:46 +02:00
message = f"Event {id} not found"
logger.error(message)
return make_response(jsonify({"success": False, "message": message}), 404)
2022-04-03 22:00:11 +02:00
if event.plus_id:
2022-05-11 13:54:46 +02:00
message = "Already submitted to plus"
logger.error(message)
return make_response(jsonify({"success": False, "message": message}), 400)
2022-04-03 22:00:11 +02:00
# load clean.png
try:
filename = f"{event.camera}-{event.id}-clean.png"
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
except Exception:
2022-04-16 15:43:49 +02:00
logger.error(f"Unable to load clean png for event: {event.id}")
2022-04-03 22:00:11 +02:00
return make_response(
jsonify(
{"success": False, "message": "Unable to load clean png for event"}
),
400,
)
try:
plus_id = current_app.plus_api.upload_image(image, event.camera)
except Exception as ex:
2022-05-11 13:54:46 +02:00
logger.exception(ex)
2022-04-03 22:00:11 +02:00
return make_response(
jsonify({"success": False, "message": str(ex)}),
400,
)
# store image id in the database
event.plus_id = plus_id
event.save()
2022-04-10 15:09:41 +02:00
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
2022-04-03 22:00:11 +02:00
@bp.route("/events/<id>/retain", methods=("DELETE",))
def delete_retain(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
event.retain_indefinitely = False
event.save()
return make_response(
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
)
2022-04-10 15:09:41 +02:00
@bp.route("/events/<id>/sub_label", methods=("POST",))
def set_sub_label(id):
try:
event: Event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
if request.json:
new_sub_label = request.json.get("subLabel")
else:
new_sub_label = None
if new_sub_label and len(new_sub_label) > 20:
return make_response(
2022-04-10 15:09:41 +02:00
jsonify(
{
"success": False,
"message": new_sub_label
+ " exceeds the 20 character limit for sub_label",
}
),
400,
)
if not event.end_time:
tracked_obj: TrackedObject = (
current_app.detected_frames_processor.camera_states[
event.camera
].tracked_objects.get(event.id)
)
if tracked_obj:
tracked_obj.obj_data["sub_label"] = new_sub_label
event.sub_label = new_sub_label
event.save()
return make_response(
2022-04-10 15:09:41 +02:00
jsonify(
{
"success": True,
"message": "Event " + id + " sub label set to " + new_sub_label,
}
),
200,
)
2022-04-10 15:09:41 +02:00
@bp.route("/sub_labels")
def get_sub_labels():
try:
events = Event.select(Event.sub_label).distinct()
except Exception as e:
return jsonify(
{"success": False, "message": f"Failed to get sub_labels: {e}"}, "404"
)
sub_labels = [e.sub_label for e in events]
if None in sub_labels:
sub_labels.remove(None)
return jsonify(sub_labels)
2021-05-18 07:52:08 +02:00
@bp.route("/events/<id>", methods=("DELETE",))
def delete_event(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
2021-05-18 07:52:08 +02:00
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
2021-05-18 07:52:08 +02:00
)
media_name = f"{event.camera}-{event.id}"
if event.has_snapshot:
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
media.unlink(missing_ok=True)
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
media.unlink(missing_ok=True)
if event.has_clip:
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
media.unlink(missing_ok=True)
event.delete_instance()
2021-05-18 07:52:08 +02:00
return make_response(
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
2021-05-18 07:52:08 +02:00
)
2021-02-17 14:23:32 +01:00
2021-05-18 07:52:08 +02:00
@bp.route("/events/<id>/thumbnail.jpg")
def event_thumbnail(id, max_cache_age=2592000):
2021-02-17 14:23:32 +01:00
format = request.args.get("format", "ios")
thumbnail_bytes = None
2022-02-26 20:11:00 +01:00
event_complete = False
2020-11-25 03:36:56 +01:00
try:
event = Event.get(Event.id == id)
2022-02-26 20:11:00 +01:00
if not event.end_time is None:
event_complete = True
thumbnail_bytes = base64.b64decode(event.thumbnail)
2020-11-25 03:36:56 +01:00
except DoesNotExist:
2020-11-25 17:37:41 +01:00
# see if the object is currently being tracked
try:
2021-02-18 14:10:25 +01:00
camera_states = current_app.detected_frames_processor.camera_states.values()
for camera_state in camera_states:
2020-11-25 17:37:41 +01:00
if id in camera_state.tracked_objects:
tracked_obj = camera_state.tracked_objects.get(id)
if not tracked_obj is None:
thumbnail_bytes = tracked_obj.get_thumbnail()
2020-11-25 17:37:41 +01:00
except:
return "Event not found", 404
2021-01-09 18:26:46 +01:00
if thumbnail_bytes is None:
2020-11-25 03:36:56 +01:00
return "Event not found", 404
2021-01-09 18:26:46 +01:00
# android notifications prefer a 2:1 ratio
2021-02-17 14:23:32 +01:00
if format == "android":
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
img = cv2.imdecode(jpg_as_np, flags=1)
2021-02-17 14:23:32 +01:00
thumbnail = cv2.copyMakeBorder(
img,
0,
0,
int(img.shape[1] * 0.5),
int(img.shape[1] * 0.5),
cv2.BORDER_CONSTANT,
(0, 0, 0),
)
ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
thumbnail_bytes = jpg.tobytes()
2021-01-09 18:26:46 +01:00
response = make_response(thumbnail_bytes)
2021-12-31 00:03:29 +01:00
response.headers["Content-Type"] = "image/jpeg"
2022-02-26 20:11:00 +01:00
if event_complete:
response.headers["Cache-Control"] = f"private, max-age={max_cache_age}"
else:
response.headers["Cache-Control"] = "no-store"
return response
2020-11-24 22:44:59 +01:00
2022-04-03 22:00:11 +02:00
@bp.route("/<camera_name>/<label>/best.jpg")
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
def label_thumbnail(camera_name, label):
label = unquote(label)
if label == "any":
event_query = (
Event.select()
.where(Event.camera == camera_name)
.order_by(Event.start_time.desc())
)
else:
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.label == label)
.order_by(Event.start_time.desc())
)
try:
event = event_query.get()
return event_thumbnail(event.id, 60)
except DoesNotExist:
frame = np.zeros((175, 175, 3), np.uint8)
2022-04-03 22:00:11 +02:00
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
response = make_response(jpg.tobytes())
response.headers["Content-Type"] = "image/jpeg"
response.headers["Cache-Control"] = "no-store"
return response
2021-02-17 14:23:32 +01:00
@bp.route("/events/<id>/snapshot.jpg")
def event_snapshot(id):
download = request.args.get("download", type=bool)
event_complete = False
jpg_bytes = None
try:
event = Event.get(Event.id == id, Event.end_time != None)
event_complete = True
if not event.has_snapshot:
return "Snapshot not available", 404
# read snapshot from disk
2021-02-17 14:23:32 +01:00
with open(
os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
) as image_file:
jpg_bytes = image_file.read()
except DoesNotExist:
# see if the object is currently being tracked
try:
2021-02-18 14:10:25 +01:00
camera_states = current_app.detected_frames_processor.camera_states.values()
for camera_state in camera_states:
if id in camera_state.tracked_objects:
tracked_obj = camera_state.tracked_objects.get(id)
if not tracked_obj is None:
jpg_bytes = tracked_obj.get_jpg_bytes(
2021-02-17 14:23:32 +01:00
timestamp=request.args.get("timestamp", type=int),
bounding_box=request.args.get("bbox", type=int),
crop=request.args.get("crop", type=int),
height=request.args.get("h", type=int),
2021-07-02 14:47:03 +02:00
quality=request.args.get("quality", default=70, type=int),
)
except:
return "Event not found", 404
except:
return "Event not found", 404
if jpg_bytes is None:
return "Event not found", 404
response = make_response(jpg_bytes)
2021-12-31 00:03:29 +01:00
response.headers["Content-Type"] = "image/jpeg"
if event_complete:
response.headers["Cache-Control"] = "private, max-age=31536000"
else:
response.headers["Cache-Control"] = "no-store"
if download:
response.headers[
"Content-Disposition"
] = f"attachment; filename=snapshot-{id}.jpg"
return response
2022-04-03 22:00:11 +02:00
@bp.route("/<camera_name>/<label>/snapshot.jpg")
def label_snapshot(camera_name, label):
label = unquote(label)
if label == "any":
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
else:
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.label == label)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
try:
event = event_query.get()
return event_snapshot(event.id)
except DoesNotExist:
frame = np.zeros((720, 1280, 3), np.uint8)
2022-04-03 22:00:11 +02:00
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
response = make_response(jpg.tobytes())
response.headers["Content-Type"] = "image/jpeg"
return response
2021-02-17 14:23:32 +01:00
2021-07-09 22:14:16 +02:00
@bp.route("/events/<id>/clip.mp4")
def event_clip(id):
download = request.args.get("download", type=bool)
2021-07-12 02:58:39 +02:00
try:
event: Event = Event.get(Event.id == id)
except DoesNotExist:
2021-07-09 22:14:16 +02:00
return "Event not found.", 404
if not event.has_clip:
return "Clip not available", 404
2021-08-10 15:27:31 +02:00
file_name = f"{event.camera}-{id}.mp4"
clip_path = os.path.join(CLIPS_DIR, file_name)
2021-07-09 22:14:16 +02:00
if not os.path.isfile(clip_path):
2022-02-09 22:41:38 +01:00
end_ts = (
datetime.now().timestamp() if event.end_time is None else event.end_time
)
return recording_clip(event.camera, event.start_time, end_ts)
2021-07-09 22:14:16 +02:00
2021-08-10 15:27:31 +02:00
response = make_response()
response.headers["Content-Description"] = "File Transfer"
response.headers["Cache-Control"] = "no-cache"
response.headers["Content-Type"] = "video/mp4"
if download:
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
response.headers["Content-Length"] = os.path.getsize(clip_path)
response.headers[
"X-Accel-Redirect"
] = f"/clips/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
return response
2021-07-09 22:14:16 +02:00
2021-02-17 14:23:32 +01:00
@bp.route("/events")
2020-11-01 15:06:15 +01:00
def events():
2021-02-17 14:23:32 +01:00
limit = request.args.get("limit", 100)
2022-02-26 20:11:00 +01:00
camera = request.args.get("camera", "all")
label = unquote(request.args.get("label", "all"))
sub_label = request.args.get("sub_label", "all")
2022-02-26 20:11:00 +01:00
zone = request.args.get("zone", "all")
2021-02-17 14:23:32 +01:00
after = request.args.get("after", type=float)
before = request.args.get("before", type=float)
has_clip = request.args.get("has_clip", type=int)
has_snapshot = request.args.get("has_snapshot", type=int)
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
2020-11-16 14:27:56 +01:00
clauses = []
excluded_fields = []
2021-01-09 18:26:46 +01:00
2022-04-30 15:38:54 +02:00
selected_columns = [
Event.id,
Event.camera,
Event.label,
Event.zones,
Event.start_time,
Event.end_time,
Event.has_clip,
Event.has_snapshot,
Event.plus_id,
Event.retain_indefinitely,
Event.sub_label,
Event.top_score,
]
2022-02-26 20:11:00 +01:00
if camera != "all":
2020-11-16 14:27:56 +01:00
clauses.append((Event.camera == camera))
2021-01-09 18:26:46 +01:00
2022-02-26 20:11:00 +01:00
if label != "all":
2020-11-16 14:27:56 +01:00
clauses.append((Event.label == label))
2021-01-09 18:26:46 +01:00
if sub_label != "all":
clauses.append((Event.sub_label == sub_label))
2022-02-26 20:11:00 +01:00
if zone != "all":
2021-02-17 14:23:32 +01:00
clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
2021-01-09 18:26:46 +01:00
2020-11-16 14:27:56 +01:00
if after:
2022-02-26 20:11:00 +01:00
clauses.append((Event.start_time > after))
2021-01-09 18:26:46 +01:00
2020-11-16 14:27:56 +01:00
if before:
2022-02-26 20:11:00 +01:00
clauses.append((Event.start_time < before))
2020-11-16 14:27:56 +01:00
if not has_clip is None:
clauses.append((Event.has_clip == has_clip))
2021-01-26 16:04:03 +01:00
if not has_snapshot is None:
clauses.append((Event.has_snapshot == has_snapshot))
if not include_thumbnails:
excluded_fields.append(Event.thumbnail)
2022-04-30 15:38:54 +02:00
else:
selected_columns.append(Event.thumbnail)
2020-11-16 14:27:56 +01:00
if len(clauses) == 0:
clauses.append((True))
2020-11-16 14:27:56 +01:00
2021-02-17 14:23:32 +01:00
events = (
2022-04-30 15:38:54 +02:00
Event.select(*selected_columns)
2021-02-17 14:23:32 +01:00
.where(reduce(operator.and_, clauses))
.order_by(Event.start_time.desc())
.limit(limit)
)
2020-11-16 14:27:56 +01:00
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
2020-11-01 15:06:15 +01:00
2021-02-17 14:23:32 +01:00
@bp.route("/config")
2020-11-18 04:11:19 +01:00
def config():
2021-07-10 03:22:11 +02:00
config = current_app.frigate_config.dict()
# add in the ffmpeg_cmds
for camera_name, camera in current_app.frigate_config.cameras.items():
camera_dict = config["cameras"][camera_name]
2021-11-09 02:05:39 +01:00
camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
2021-07-10 03:22:11 +02:00
for cmd in camera_dict["ffmpeg_cmds"]:
cmd["cmd"] = clean_camera_user_pass(" ".join(cmd["cmd"]))
2021-07-10 03:22:11 +02:00
2022-07-22 14:56:28 +02:00
config["plus"] = {"enabled": current_app.plus_api.is_active()}
2022-04-03 22:00:11 +02:00
2021-07-10 03:22:11 +02:00
return jsonify(config)
2021-06-24 07:45:27 +02:00
@bp.route("/config/schema")
def config_schema():
return current_app.response_class(
current_app.frigate_config.schema_json(), mimetype="application/json"
)
2020-11-18 04:11:19 +01:00
2021-02-17 14:23:32 +01:00
@bp.route("/version")
2020-12-19 13:51:10 +01:00
def version():
return VERSION
2021-02-17 14:23:32 +01:00
@bp.route("/stats")
2020-11-01 22:37:51 +01:00
def stats():
stats = stats_snapshot(current_app.stats_tracking)
2020-11-01 22:37:51 +01:00
return jsonify(stats)
2021-02-17 14:23:32 +01:00
@bp.route("/<camera_name>")
2020-11-01 22:37:51 +01:00
def mjpeg_feed(camera_name):
2021-02-17 14:23:32 +01:00
fps = int(request.args.get("fps", "3"))
height = int(request.args.get("h", "360"))
draw_options = {
2021-02-17 14:23:32 +01:00
"bounding_boxes": request.args.get("bbox", type=int),
"timestamp": request.args.get("timestamp", type=int),
"zones": request.args.get("zones", type=int),
"mask": request.args.get("mask", type=int),
"motion_boxes": request.args.get("motion", type=int),
"regions": request.args.get("regions", type=int),
}
2020-11-03 15:15:58 +01:00
if camera_name in current_app.frigate_config.cameras:
2020-11-01 22:37:51 +01:00
# return a multipart response
2021-02-17 14:23:32 +01:00
return Response(
imagestream(
current_app.detected_frames_processor,
camera_name,
fps,
height,
draw_options,
),
mimetype="multipart/x-mixed-replace; boundary=frame",
)
2020-11-01 22:37:51 +01:00
else:
return "Camera named {} not found".format(camera_name), 404
2021-02-17 14:23:32 +01:00
@bp.route("/<camera_name>/latest.jpg")
2020-11-01 22:37:51 +01:00
def latest_frame(camera_name):
draw_options = {
2021-02-17 14:23:32 +01:00
"bounding_boxes": request.args.get("bbox", type=int),
"timestamp": request.args.get("timestamp", type=int),
"zones": request.args.get("zones", type=int),
"mask": request.args.get("mask", type=int),
"motion_boxes": request.args.get("motion", type=int),
"regions": request.args.get("regions", type=int),
}
2021-07-02 14:47:03 +02:00
resize_quality = request.args.get("quality", default=70, type=int)
2020-11-03 15:15:58 +01:00
if camera_name in current_app.frigate_config.cameras:
2021-02-17 14:23:32 +01:00
frame = current_app.detected_frames_processor.get_current_frame(
camera_name, draw_options
)
2020-11-01 22:37:51 +01:00
if frame is None:
2021-02-17 14:23:32 +01:00
frame = np.zeros((720, 1280, 3), np.uint8)
2020-11-01 22:37:51 +01:00
2021-02-17 14:23:32 +01:00
height = int(request.args.get("h", str(frame.shape[0])))
width = int(height * frame.shape[1] / frame.shape[0])
2020-11-01 22:37:51 +01:00
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
2021-07-02 14:47:03 +02:00
ret, jpg = cv2.imencode(
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
)
2020-11-01 22:37:51 +01:00
response = make_response(jpg.tobytes())
2021-12-31 00:03:29 +01:00
response.headers["Content-Type"] = "image/jpeg"
response.headers["Cache-Control"] = "no-store"
2020-11-01 22:37:51 +01:00
return response
else:
return "Camera named {} not found".format(camera_name), 404
2021-01-09 18:26:46 +01:00
2021-02-17 14:23:32 +01:00
2022-05-10 14:48:29 +02:00
# return hourly summary for recordings of camera
@bp.route("/<camera_name>/recordings/summary")
def recordings_summary(camera_name):
recording_groups = (
Recordings.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(Recordings.start_time, "unixepoch", "localtime"),
).alias("hour"),
fn.SUM(Recordings.duration).alias("duration"),
fn.SUM(Recordings.motion).alias("motion"),
fn.SUM(Recordings.objects).alias("objects"),
)
.where(Recordings.camera == camera_name)
2022-05-10 14:48:29 +02:00
.group_by(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(Recordings.start_time, "unixepoch", "localtime"),
)
)
.order_by(
fn.strftime(
"%Y-%m-%d H",
fn.datetime(Recordings.start_time, "unixepoch", "localtime"),
).desc()
)
)
2021-05-28 19:13:48 +02:00
2022-05-10 14:48:29 +02:00
event_groups = (
Event.select(
fn.strftime(
"%Y-%m-%d %H", fn.datetime(Event.start_time, "unixepoch", "localtime")
).alias("hour"),
fn.COUNT(Event.id).alias("count"),
)
.where(Event.camera == camera_name, Event.has_clip)
.group_by(
fn.strftime(
"%Y-%m-%d %H", fn.datetime(Event.start_time, "unixepoch", "localtime")
),
)
2022-05-10 14:48:29 +02:00
.objects()
)
2021-05-28 19:13:48 +02:00
2022-05-10 14:48:29 +02:00
event_map = {g.hour: g.count for g in event_groups}
2021-05-28 19:13:48 +02:00
2022-05-10 14:48:29 +02:00
days = {}
for recording_group in recording_groups.objects():
parts = recording_group.hour.split()
hour = parts[1]
day = parts[0]
events_count = event_map.get(recording_group.hour, 0)
hour_data = {
"hour": hour,
"events": events_count,
"motion": recording_group.motion,
"objects": recording_group.objects,
"duration": round(recording_group.duration),
}
if day not in days:
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
else:
days[day]["events"] += events_count
days[day]["hours"].append(hour_data)
return jsonify(list(days.values()))
# return hour of recordings data for camera
@bp.route("/<camera_name>/recordings")
def recordings(camera_name):
after = request.args.get(
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
2021-05-28 19:13:48 +02:00
)
2022-05-10 14:48:29 +02:00
before = request.args.get("before", type=float, default=datetime.now().timestamp())
recordings = (
Recordings.select(
Recordings.id,
Recordings.start_time,
Recordings.end_time,
Limit recording retention to available storage (#3942) * Add field and migration for segment size * Store the segment size in db * Add comment * Add default * Fix size parsing * Include segment size in recordings endpoint * Start adding storage maintainer * Add storage maintainer and calculate average sizes * Update comment * Store segment and hour avg sizes per camera * Formatting * Keep track of total segment and hour averages * Remove unused files * Cleanup 2 hours of recordings at a time * Formatting * Fix bug * Round segment size * Cleanup some comments * Handle case where segments are not deleted on initial run or is only retained segments * Improve cleanup log * Formatting * Fix typo and improve logging * Catch case where no recordings exist for camera * Specifically define sort * Handle edge case for cameras that only record part time * Increase definition of part time recorder * Remove warning about not supported storage based retention * Add note about storage based retention to recording docs * Add tests for storage maintenance calculation and cleanup * Format tests * Don't run for a camera with no recording segments * Get size of file from cache * Rework camera stats to be more efficient * Remove total and other inefficencies * Rewrite storage cleanup logic to be much more efficient * Fix existing tests * Fix bugs from tests * Add another test * Improve logging * Formatting * Set back correct loop time * Update name * Update comment * Only include segments that have a nonzero size * Catch case where camera has 0 nonzero segment durations * Add test to cover zero bandwidth migration case * Fix test * Incorrect boolean logic * Formatting * Explicity re-define iterator
2022-10-09 13:28:26 +02:00
Recordings.segment_size,
2022-05-10 14:48:29 +02:00
Recordings.motion,
Recordings.objects,
)
.where(
Recordings.camera == camera_name,
Recordings.end_time >= after,
Recordings.start_time <= before,
)
.order_by(Recordings.start_time)
)
return jsonify([e for e in recordings.dicts()])
2021-05-28 19:13:48 +02:00
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
@bp.route("/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
def recording_clip(camera_name, start_ts, end_ts):
download = request.args.get("download", type=bool)
2021-07-09 22:14:16 +02:00
recordings = (
Recordings.select()
.where(
(Recordings.start_time.between(start_ts, end_ts))
| (Recordings.end_time.between(start_ts, end_ts))
2021-07-11 06:22:45 +02:00
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
2021-07-09 22:14:16 +02:00
)
.where(Recordings.camera == camera_name)
2021-07-09 22:14:16 +02:00
.order_by(Recordings.start_time.asc())
)
playlist_lines = []
clip: Recordings
for clip in recordings:
playlist_lines.append(f"file '{clip.path}'")
# if this is the starting clip, add an inpoint
if clip.start_time < start_ts:
playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
# if this is the ending clip, add an outpoint
if clip.end_time > end_ts:
playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
file_name = f"clip_{camera_name}_{start_ts}-{end_ts}.mp4"
2021-08-10 15:27:31 +02:00
path = f"/tmp/cache/{file_name}"
2021-07-09 22:14:16 +02:00
if not os.path.exists(path):
ffmpeg_cmd = [
"ffmpeg",
"-y",
"-protocol_whitelist",
"pipe,file",
"-f",
"concat",
"-safe",
"0",
"-i",
"/dev/stdin",
"-c",
"copy",
"-movflags",
"+faststart",
path,
]
p = sp.run(
ffmpeg_cmd,
input="\n".join(playlist_lines),
encoding="ascii",
capture_output=True,
)
2021-07-09 22:14:16 +02:00
if p.returncode != 0:
logger.error(p.stderr)
return f"Could not create clip from recordings for {camera_name}.", 500
else:
logger.debug(
f"Ignoring subsequent request for {path} as it already exists in the cache."
)
2021-07-09 22:14:16 +02:00
2021-08-10 15:27:31 +02:00
response = make_response()
response.headers["Content-Description"] = "File Transfer"
response.headers["Cache-Control"] = "no-cache"
response.headers["Content-Type"] = "video/mp4"
if download:
2021-08-10 15:27:31 +02:00
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
response.headers["Content-Length"] = os.path.getsize(path)
response.headers[
"X-Accel-Redirect"
] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
2021-07-09 22:14:16 +02:00
return response
@bp.route("/vod/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
def vod_ts(camera_name, start_ts, end_ts):
recordings = (
Recordings.select()
.where(
2021-07-11 06:22:45 +02:00
Recordings.start_time.between(start_ts, end_ts)
| Recordings.end_time.between(start_ts, end_ts)
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
)
.where(Recordings.camera == camera_name)
.order_by(Recordings.start_time.asc())
)
2021-05-18 07:52:08 +02:00
clips = []
durations = []
recording: Recordings
for recording in recordings:
clip = {"type": "source", "path": recording.path}
duration = int(recording.duration * 1000)
# Determine if we need to end the last clip early
if recording.end_time > end_ts:
duration -= int((recording.end_time - end_ts) * 1000)
2021-11-21 14:25:01 +01:00
if duration > 0:
clip["keyFrameDurations"] = [duration]
2021-11-21 14:25:01 +01:00
clips.append(clip)
durations.append(duration)
else:
logger.warning(f"Recording clip is missing or empty: {recording.path}")
2021-05-28 19:13:48 +02:00
2021-07-09 22:14:16 +02:00
if not clips:
2021-11-21 14:25:36 +01:00
logger.error("No recordings found for the requested time range")
2021-07-09 22:14:16 +02:00
return "No recordings found.", 404
hour_ago = datetime.now() - timedelta(hours=1)
2021-05-18 07:52:08 +02:00
return jsonify(
{
2021-07-09 22:14:16 +02:00
"cache": hour_ago.timestamp() > start_ts,
2021-05-18 07:52:08 +02:00
"discontinuity": False,
"durations": durations,
"sequences": [{"clips": clips}],
}
)
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>")
def vod_hour(year_month, day, hour, camera_name):
2021-07-09 22:14:16 +02:00
start_date = datetime.strptime(f"{year_month}-{day} {hour}", "%Y-%m-%d %H")
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
start_ts = start_date.timestamp()
end_ts = end_date.timestamp()
return vod_ts(camera_name, start_ts, end_ts)
2021-07-09 22:14:16 +02:00
@bp.route("/vod/event/<id>")
def vod_event(id):
2021-07-12 02:58:39 +02:00
try:
event: Event = Event.get(Event.id == id)
except DoesNotExist:
2021-11-21 14:25:36 +01:00
logger.error(f"Event not found: {id}")
2021-07-09 22:14:16 +02:00
return "Event not found.", 404
if not event.has_clip:
2021-11-21 14:25:36 +01:00
logger.error(f"Event does not have recordings: {id}")
return "Recordings not available", 404
2021-07-09 22:14:16 +02:00
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
if not os.path.isfile(clip_path):
end_ts = (
datetime.now().timestamp() if event.end_time is None else event.end_time
)
vod_response = vod_ts(event.camera, event.start_time, end_ts)
# If the recordings are not found, set has_clip to false
if (
type(vod_response) == tuple
and len(vod_response) == 2
and vod_response[1] == 404
):
Event.update(has_clip=False).where(Event.id == id).execute()
return vod_response
2021-07-09 22:14:16 +02:00
duration = int((event.end_time - event.start_time) * 1000)
2021-07-09 22:14:16 +02:00
return jsonify(
{
"cache": True,
"discontinuity": False,
"durations": [duration],
"sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
}
)
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
2020-11-01 22:37:51 +01:00
while True:
# max out at specified FPS
2021-06-14 14:31:13 +02:00
time.sleep(1 / fps)
frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
2020-11-01 22:37:51 +01:00
if frame is None:
2021-02-17 14:23:32 +01:00
frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
2020-11-01 22:37:51 +01:00
2021-02-17 14:23:32 +01:00
width = int(height * frame.shape[1] / frame.shape[0])
2020-11-01 22:37:51 +01:00
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
2021-02-17 14:23:32 +01:00
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
yield (
b"--frame\r\n"
b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
)