2020-11-25 03:36:56 +01:00
|
|
|
import base64
|
2021-05-28 19:13:48 +02:00
|
|
|
from collections import OrderedDict
|
|
|
|
from datetime import datetime, timedelta
|
2021-11-09 02:05:39 +01:00
|
|
|
import copy
|
2021-02-13 16:33:32 +01:00
|
|
|
import json
|
2021-05-18 07:52:08 +02:00
|
|
|
import glob
|
2020-11-09 14:32:30 +01:00
|
|
|
import logging
|
2020-11-01 15:06:15 +01:00
|
|
|
import os
|
2021-05-28 19:13:48 +02:00
|
|
|
import re
|
2021-07-09 22:14:16 +02:00
|
|
|
import subprocess as sp
|
2020-11-01 22:37:51 +01:00
|
|
|
import time
|
2020-11-16 14:27:56 +01:00
|
|
|
from functools import reduce
|
2021-05-12 17:19:02 +02:00
|
|
|
from pathlib import Path
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
import cv2
|
2021-07-09 22:14:16 +02:00
|
|
|
from flask.helpers import send_file
|
2021-06-14 14:31:13 +02:00
|
|
|
|
2020-11-01 22:37:51 +01:00
|
|
|
import numpy as np
|
2021-02-17 14:23:32 +01:00
|
|
|
from flask import (
|
|
|
|
Blueprint,
|
|
|
|
Flask,
|
|
|
|
Response,
|
|
|
|
current_app,
|
|
|
|
jsonify,
|
|
|
|
make_response,
|
|
|
|
request,
|
|
|
|
)
|
2021-06-14 14:31:13 +02:00
|
|
|
|
2021-06-03 05:20:07 +02:00
|
|
|
from peewee import SqliteDatabase, operator, fn, DoesNotExist, Value
|
2020-11-01 15:06:15 +01:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
from frigate.const import CLIPS_DIR, PLUS_ENV_VAR
|
2021-06-07 03:24:36 +02:00
|
|
|
from frigate.models import Event, Recordings
|
2021-01-04 00:35:58 +01:00
|
|
|
from frigate.stats import stats_snapshot
|
2020-12-19 15:48:34 +01:00
|
|
|
from frigate.util import calculate_region
|
2020-12-19 13:51:10 +01:00
|
|
|
from frigate.version import VERSION
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2020-11-25 17:37:41 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
bp = Blueprint("frigate", __name__)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
def create_app(
|
|
|
|
frigate_config,
|
|
|
|
database: SqliteDatabase,
|
|
|
|
stats_tracking,
|
|
|
|
detected_frames_processor,
|
2022-04-03 22:00:11 +02:00
|
|
|
plus_api,
|
2021-02-17 14:23:32 +01:00
|
|
|
):
|
2020-11-01 15:06:15 +01:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
|
|
|
@app.before_request
|
|
|
|
def _db_connect():
|
2021-06-27 01:41:26 +02:00
|
|
|
if database.is_closed():
|
|
|
|
database.connect()
|
2020-11-01 15:06:15 +01:00
|
|
|
|
|
|
|
@app.teardown_request
|
|
|
|
def _db_close(exc):
|
|
|
|
if not database.is_closed():
|
|
|
|
database.close()
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
app.frigate_config = frigate_config
|
2021-01-04 00:35:58 +01:00
|
|
|
app.stats_tracking = stats_tracking
|
2020-11-01 22:37:51 +01:00
|
|
|
app.detected_frames_processor = detected_frames_processor
|
2022-04-03 22:00:11 +02:00
|
|
|
app.plus_api = plus_api
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
app.register_blueprint(bp)
|
2021-02-13 16:33:32 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
return app
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/")
|
2020-11-01 15:06:15 +01:00
|
|
|
def is_healthy():
|
|
|
|
return "Frigate is running. Alive and healthy!"
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/summary")
|
2020-11-18 04:11:19 +01:00
|
|
|
def events_summary():
|
2021-02-17 14:23:32 +01:00
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2021-01-16 16:26:26 +01:00
|
|
|
|
|
|
|
clauses = []
|
|
|
|
|
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2021-01-16 16:26:26 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2021-01-16 16:26:26 +01:00
|
|
|
|
2020-11-18 04:11:19 +01:00
|
|
|
groups = (
|
2021-02-17 14:23:32 +01:00
|
|
|
Event.select(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
|
|
|
|
).alias("day"),
|
|
|
|
Event.zones,
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.group_by(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
|
|
|
|
),
|
|
|
|
Event.zones,
|
2020-11-18 04:11:19 +01:00
|
|
|
)
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
|
|
|
return jsonify([e for e in groups.dicts()])
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-12 17:19:02 +02:00
|
|
|
@bp.route("/events/<id>", methods=("GET",))
|
2020-11-24 22:44:59 +01:00
|
|
|
def event(id):
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
return model_to_dict(Event.get(Event.id == id))
|
|
|
|
except DoesNotExist:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("POST",))
|
|
|
|
def set_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = True
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
@bp.route("/events/<id>/plus", methods=("POST",))
|
|
|
|
def send_to_plus(id):
|
|
|
|
if current_app.plus_api is None:
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": "PLUS_API_KEY environment variable is not set",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-04-03 22:00:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event" + id + " not found"}), 404
|
|
|
|
)
|
|
|
|
|
|
|
|
if event.plus_id:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Already submitted to plus"}), 400
|
|
|
|
)
|
|
|
|
|
|
|
|
# load clean.png
|
|
|
|
try:
|
|
|
|
filename = f"{event.camera}-{event.id}-clean.png"
|
|
|
|
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
|
|
|
|
except Exception:
|
|
|
|
return make_response(
|
|
|
|
jsonify(
|
|
|
|
{"success": False, "message": "Unable to load clean png for event"}
|
|
|
|
),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
plus_id = current_app.plus_api.upload_image(image, event.camera)
|
|
|
|
except Exception as ex:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": str(ex)}),
|
|
|
|
400,
|
|
|
|
)
|
|
|
|
|
|
|
|
# store image id in the database
|
|
|
|
event.plus_id = plus_id
|
|
|
|
event.save()
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
|
2022-04-03 22:00:11 +02:00
|
|
|
|
|
|
|
|
2022-02-22 05:03:01 +01:00
|
|
|
@bp.route("/events/<id>/retain", methods=("DELETE",))
|
|
|
|
def delete_retain(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.retain_indefinitely = False
|
|
|
|
event.save()
|
|
|
|
|
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
|
2022-02-22 05:03:01 +01:00
|
|
|
)
|
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2022-03-17 13:18:43 +01:00
|
|
|
@bp.route("/events/<id>/sub_label", methods=("POST",))
|
|
|
|
def set_sub_label(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
|
|
|
)
|
|
|
|
|
|
|
|
if request.json:
|
|
|
|
new_sub_label = request.json.get("subLabel")
|
|
|
|
else:
|
|
|
|
new_sub_label = None
|
|
|
|
|
|
|
|
if new_sub_label and len(new_sub_label) > 20:
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": False,
|
|
|
|
"message": new_sub_label
|
|
|
|
+ " exceeds the 20 character limit for sub_label",
|
|
|
|
}
|
|
|
|
),
|
|
|
|
400,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
event.sub_label = new_sub_label
|
|
|
|
event.save()
|
|
|
|
return make_response(
|
2022-04-10 15:09:41 +02:00
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"success": True,
|
|
|
|
"message": "Event " + id + " sub label set to " + new_sub_label,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
200,
|
2022-03-17 13:18:43 +01:00
|
|
|
)
|
2022-02-22 05:03:01 +01:00
|
|
|
|
2022-04-10 15:09:41 +02:00
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>", methods=("DELETE",))
|
2021-05-12 17:19:02 +02:00
|
|
|
def delete_event(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
|
|
|
if event.has_snapshot:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
|
|
media.unlink(missing_ok=True)
|
2022-02-02 14:29:01 +01:00
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
|
|
media.unlink(missing_ok=True)
|
2021-05-12 17:19:02 +02:00
|
|
|
if event.has_clip:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
|
|
media.unlink(missing_ok=True)
|
|
|
|
|
|
|
|
event.delete_instance()
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
2022-03-17 13:18:43 +01:00
|
|
|
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
|
2021-05-18 07:52:08 +02:00
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>/thumbnail.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_thumbnail(id):
|
2021-02-17 14:23:32 +01:00
|
|
|
format = request.args.get("format", "ios")
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = None
|
2022-02-26 20:11:00 +01:00
|
|
|
event_complete = False
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
2022-02-26 20:11:00 +01:00
|
|
|
if not event.end_time is None:
|
|
|
|
event_complete = True
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
2020-11-25 03:36:56 +01:00
|
|
|
except DoesNotExist:
|
2020-11-25 17:37:41 +01:00
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2020-11-25 17:37:41 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
2021-01-22 00:28:49 +01:00
|
|
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
2020-11-25 17:37:41 +01:00
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
if thumbnail_bytes is None:
|
2020-11-25 03:36:56 +01:00
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
# android notifications prefer a 2:1 ratio
|
2021-02-17 14:23:32 +01:00
|
|
|
if format == "android":
|
2020-12-02 02:28:26 +01:00
|
|
|
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
|
|
|
|
img = cv2.imdecode(jpg_as_np, flags=1)
|
2021-02-17 14:23:32 +01:00
|
|
|
thumbnail = cv2.copyMakeBorder(
|
|
|
|
img,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
cv2.BORDER_CONSTANT,
|
|
|
|
(0, 0, 0),
|
|
|
|
)
|
|
|
|
ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = jpg.tobytes()
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
response = make_response(thumbnail_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2022-02-26 20:11:00 +01:00
|
|
|
if event_complete:
|
|
|
|
response.headers["Cache-Control"] = "private, max-age=31536000"
|
2020-12-02 02:28:26 +01:00
|
|
|
return response
|
2020-11-24 22:44:59 +01:00
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/best.jpg")
|
|
|
|
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
|
|
|
|
def label_thumbnail(camera_name, label):
|
|
|
|
if label == "any":
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.label == label)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = event_query.get()
|
|
|
|
|
|
|
|
return event_thumbnail(event.id)
|
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((175, 175, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/<id>/snapshot.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_snapshot(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = None
|
|
|
|
try:
|
2021-10-23 23:18:13 +02:00
|
|
|
event = Event.get(Event.id == id, Event.end_time != None)
|
2021-01-22 01:49:20 +01:00
|
|
|
if not event.has_snapshot:
|
|
|
|
return "Snapshot not available", 404
|
|
|
|
# read snapshot from disk
|
2021-02-17 14:23:32 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
|
|
|
|
) as image_file:
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = image_file.read()
|
|
|
|
except DoesNotExist:
|
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2021-01-22 01:49:20 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
|
|
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
2021-02-17 14:23:32 +01:00
|
|
|
timestamp=request.args.get("timestamp", type=int),
|
|
|
|
bounding_box=request.args.get("bbox", type=int),
|
|
|
|
crop=request.args.get("crop", type=int),
|
|
|
|
height=request.args.get("h", type=int),
|
2021-07-02 14:47:03 +02:00
|
|
|
quality=request.args.get("quality", default=70, type=int),
|
2021-01-22 01:49:20 +01:00
|
|
|
)
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-06-27 01:42:09 +02:00
|
|
|
if jpg_bytes is None:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-01-22 01:49:20 +01:00
|
|
|
response = make_response(jpg_bytes)
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
|
|
|
response.headers[
|
|
|
|
"Content-Disposition"
|
|
|
|
] = f"attachment; filename=snapshot-{id}.jpg"
|
2021-01-22 01:49:20 +01:00
|
|
|
return response
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
|
2022-03-11 14:56:39 +01:00
|
|
|
@bp.route("/<camera_name>/<label>/snapshot.jpg")
|
|
|
|
def label_snapshot(camera_name, label):
|
|
|
|
if label == "any":
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
event_query = (
|
|
|
|
Event.select()
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.where(Event.label == label)
|
|
|
|
.where(Event.has_snapshot == True)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
event = event_query.get()
|
|
|
|
return event_snapshot(event.id)
|
|
|
|
except DoesNotExist:
|
|
|
|
frame = np.zeros((720, 1280, 3), np.uint8)
|
2022-04-03 22:00:11 +02:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2022-03-11 14:56:39 +01:00
|
|
|
|
|
|
|
response = make_response(jpg.tobytes())
|
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
@bp.route("/events/<id>/clip.mp4")
|
|
|
|
def event_clip(id):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-07-09 22:14:16 +02:00
|
|
|
return "Event not found.", 404
|
|
|
|
|
|
|
|
if not event.has_clip:
|
|
|
|
return "Clip not available", 404
|
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
file_name = f"{event.camera}-{id}.mp4"
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, file_name)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2022-02-09 22:41:38 +01:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
|
|
|
return recording_clip(event.camera, event.start_time, end_ts)
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
|
|
|
if download:
|
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(clip_path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/clips/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
|
|
|
return response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@bp.route("/events")
|
2020-11-01 15:06:15 +01:00
|
|
|
def events():
|
2021-02-17 14:23:32 +01:00
|
|
|
limit = request.args.get("limit", 100)
|
2022-02-26 20:11:00 +01:00
|
|
|
camera = request.args.get("camera", "all")
|
|
|
|
label = request.args.get("label", "all")
|
|
|
|
zone = request.args.get("zone", "all")
|
2021-02-17 14:23:32 +01:00
|
|
|
after = request.args.get("after", type=float)
|
|
|
|
before = request.args.get("before", type=float)
|
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
|
|
|
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
|
|
|
clauses = []
|
2021-01-30 14:19:52 +01:00
|
|
|
excluded_fields = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
if camera != "all":
|
2020-11-16 14:27:56 +01:00
|
|
|
clauses.append((Event.camera == camera))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
if label != "all":
|
2020-11-16 14:27:56 +01:00
|
|
|
clauses.append((Event.label == label))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2022-02-26 20:11:00 +01:00
|
|
|
if zone != "all":
|
2021-02-17 14:23:32 +01:00
|
|
|
clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if after:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time > after))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if before:
|
2022-02-26 20:11:00 +01:00
|
|
|
clauses.append((Event.start_time < before))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
if not include_thumbnails:
|
|
|
|
excluded_fields.append(Event.thumbnail)
|
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if len(clauses) == 0:
|
2021-06-10 06:15:48 +02:00
|
|
|
clauses.append((True))
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.limit(limit)
|
|
|
|
)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/config")
|
2020-11-18 04:11:19 +01:00
|
|
|
def config():
|
2021-07-10 03:22:11 +02:00
|
|
|
config = current_app.frigate_config.dict()
|
|
|
|
|
|
|
|
# add in the ffmpeg_cmds
|
|
|
|
for camera_name, camera in current_app.frigate_config.cameras.items():
|
|
|
|
camera_dict = config["cameras"][camera_name]
|
2021-11-09 02:05:39 +01:00
|
|
|
camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
|
2021-07-10 03:22:11 +02:00
|
|
|
for cmd in camera_dict["ffmpeg_cmds"]:
|
|
|
|
cmd["cmd"] = " ".join(cmd["cmd"])
|
|
|
|
|
2022-04-03 22:00:11 +02:00
|
|
|
config["plus"] = {"enabled": PLUS_ENV_VAR in os.environ}
|
|
|
|
|
2021-07-10 03:22:11 +02:00
|
|
|
return jsonify(config)
|
2021-06-24 07:45:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/config/schema")
|
|
|
|
def config_schema():
|
|
|
|
return current_app.response_class(
|
|
|
|
current_app.frigate_config.schema_json(), mimetype="application/json"
|
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/version")
|
2020-12-19 13:51:10 +01:00
|
|
|
def version():
|
|
|
|
return VERSION
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/stats")
|
2020-11-01 22:37:51 +01:00
|
|
|
def stats():
|
2021-01-04 00:35:58 +01:00
|
|
|
stats = stats_snapshot(current_app.stats_tracking)
|
2020-11-01 22:37:51 +01:00
|
|
|
return jsonify(stats)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>")
|
2020-11-01 22:37:51 +01:00
|
|
|
def mjpeg_feed(camera_name):
|
2021-02-17 14:23:32 +01:00
|
|
|
fps = int(request.args.get("fps", "3"))
|
|
|
|
height = int(request.args.get("h", "360"))
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
# return a multipart response
|
2021-02-17 14:23:32 +01:00
|
|
|
return Response(
|
|
|
|
imagestream(
|
|
|
|
current_app.detected_frames_processor,
|
|
|
|
camera_name,
|
|
|
|
fps,
|
|
|
|
height,
|
|
|
|
draw_options,
|
|
|
|
),
|
|
|
|
mimetype="multipart/x-mixed-replace; boundary=frame",
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>/latest.jpg")
|
2020-11-01 22:37:51 +01:00
|
|
|
def latest_frame(camera_name):
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2021-07-02 14:47:03 +02:00
|
|
|
resize_quality = request.args.get("quality", default=70, type=int)
|
|
|
|
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = current_app.detected_frames_processor.get_current_frame(
|
|
|
|
camera_name, draw_options
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((720, 1280, 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2021-07-02 14:47:03 +02:00
|
|
|
ret, jpg = cv2.imencode(
|
|
|
|
".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
response = make_response(jpg.tobytes())
|
2021-12-31 00:03:29 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpeg"
|
2020-11-01 22:37:51 +01:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-28 19:13:48 +02:00
|
|
|
@bp.route("/<camera_name>/recordings")
|
|
|
|
def recordings(camera_name):
|
2021-06-07 03:24:36 +02:00
|
|
|
dates = OrderedDict()
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
# Retrieve all recordings for this camera
|
|
|
|
recordings = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(Recordings.camera == camera_name)
|
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
last_end = 0
|
|
|
|
recording: Recordings
|
|
|
|
for recording in recordings:
|
|
|
|
date = datetime.fromtimestamp(recording.start_time)
|
|
|
|
key = date.strftime("%Y-%m-%d")
|
|
|
|
hour = date.strftime("%H")
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
# Create Day Record
|
|
|
|
if key not in dates:
|
|
|
|
dates[key] = OrderedDict()
|
|
|
|
|
|
|
|
# Create Hour Record
|
|
|
|
if hour not in dates[key]:
|
|
|
|
dates[key][hour] = {"delay": {}, "events": []}
|
|
|
|
|
|
|
|
# Check for delay
|
|
|
|
the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp()
|
|
|
|
# diff current recording start time and the greater of the previous end time or top of the hour
|
|
|
|
diff = recording.start_time - max(last_end, the_hour)
|
|
|
|
# Determine seconds into recording
|
|
|
|
seconds = 0
|
|
|
|
if datetime.fromtimestamp(last_end).strftime("%H") == hour:
|
|
|
|
seconds = int(last_end - the_hour)
|
|
|
|
# Determine the delay
|
|
|
|
delay = min(int(diff), 3600 - seconds)
|
|
|
|
if delay > 1:
|
|
|
|
# Add an offset for any delay greater than a second
|
|
|
|
dates[key][hour]["delay"][seconds] = delay
|
|
|
|
|
|
|
|
last_end = recording.end_time
|
2021-06-03 05:20:07 +02:00
|
|
|
|
|
|
|
# Packing intervals to return all events with same label and overlapping times as one row.
|
|
|
|
# See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
|
|
|
|
events = Event.raw(
|
|
|
|
"""WITH C1 AS
|
|
|
|
(
|
|
|
|
SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub
|
|
|
|
FROM event
|
|
|
|
WHERE camera = ?
|
|
|
|
UNION ALL
|
|
|
|
SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub
|
|
|
|
FROM event
|
|
|
|
WHERE camera = ?
|
|
|
|
),
|
|
|
|
C2 AS
|
|
|
|
(
|
|
|
|
SELECT C1.*,
|
|
|
|
SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC
|
|
|
|
ROWS BETWEEN UNBOUNDED PRECEDING
|
|
|
|
AND CURRENT ROW) - sub AS cnt
|
|
|
|
FROM C1
|
|
|
|
),
|
|
|
|
C3 AS
|
|
|
|
(
|
|
|
|
SELECT id, label, camera, top_score, ts,
|
|
|
|
(ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1
|
|
|
|
AS grpnum
|
|
|
|
FROM C2
|
|
|
|
WHERE cnt = 0
|
|
|
|
)
|
2022-02-09 01:17:42 +01:00
|
|
|
SELECT id, label, camera, top_score, start_time, end_time
|
|
|
|
FROM event
|
|
|
|
WHERE camera = ? AND end_time IS NULL
|
|
|
|
UNION ALL
|
2021-06-03 05:20:07 +02:00
|
|
|
SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time
|
|
|
|
FROM C3
|
|
|
|
GROUP BY label, grpnum
|
|
|
|
ORDER BY start_time;""",
|
|
|
|
camera_name,
|
|
|
|
camera_name,
|
2022-02-09 01:17:42 +01:00
|
|
|
camera_name,
|
2021-06-03 05:20:07 +02:00
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
event: Event
|
|
|
|
for event in events:
|
|
|
|
date = datetime.fromtimestamp(event.start_time)
|
2021-05-28 19:13:48 +02:00
|
|
|
key = date.strftime("%Y-%m-%d")
|
2021-06-02 10:27:07 +02:00
|
|
|
hour = date.strftime("%H")
|
2021-05-28 19:13:48 +02:00
|
|
|
if key in dates and hour in dates[key]:
|
2021-06-03 05:20:07 +02:00
|
|
|
dates[key][hour]["events"].append(
|
|
|
|
model_to_dict(
|
2021-06-07 03:24:36 +02:00
|
|
|
event,
|
2021-06-03 05:20:07 +02:00
|
|
|
exclude=[
|
|
|
|
Event.false_positive,
|
|
|
|
Event.zones,
|
|
|
|
Event.thumbnail,
|
|
|
|
Event.has_clip,
|
|
|
|
Event.has_snapshot,
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
2021-05-28 19:13:48 +02:00
|
|
|
|
|
|
|
return jsonify(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"date": date,
|
2021-06-03 07:07:26 +02:00
|
|
|
"events": sum([len(value["events"]) for value in hours.values()]),
|
2021-05-28 19:13:48 +02:00
|
|
|
"recordings": [
|
2021-06-03 05:20:07 +02:00
|
|
|
{"hour": hour, "delay": value["delay"], "events": value["events"]}
|
|
|
|
for hour, value in hours.items()
|
2021-05-28 19:13:48 +02:00
|
|
|
],
|
|
|
|
}
|
|
|
|
for date, hours in dates.items()
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
@bp.route("/<camera>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
|
|
|
@bp.route("/<camera>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
|
|
|
|
def recording_clip(camera, start_ts, end_ts):
|
2021-07-12 02:42:23 +02:00
|
|
|
download = request.args.get("download", type=bool)
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
recordings = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(
|
|
|
|
(Recordings.start_time.between(start_ts, end_ts))
|
|
|
|
| (Recordings.end_time.between(start_ts, end_ts))
|
2021-07-11 06:22:45 +02:00
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-07-09 22:14:16 +02:00
|
|
|
)
|
|
|
|
.where(Recordings.camera == camera)
|
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
|
|
|
|
|
|
|
playlist_lines = []
|
|
|
|
clip: Recordings
|
|
|
|
for clip in recordings:
|
|
|
|
playlist_lines.append(f"file '{clip.path}'")
|
|
|
|
# if this is the starting clip, add an inpoint
|
|
|
|
if clip.start_time < start_ts:
|
|
|
|
playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
|
|
|
|
# if this is the ending clip, add an outpoint
|
|
|
|
if clip.end_time > end_ts:
|
|
|
|
playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
|
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
file_name = f"clip_{camera}_{start_ts}-{end_ts}.mp4"
|
|
|
|
path = f"/tmp/cache/{file_name}"
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
ffmpeg_cmd = [
|
|
|
|
"ffmpeg",
|
|
|
|
"-y",
|
|
|
|
"-protocol_whitelist",
|
|
|
|
"pipe,file",
|
|
|
|
"-f",
|
|
|
|
"concat",
|
|
|
|
"-safe",
|
|
|
|
"0",
|
|
|
|
"-i",
|
2022-02-26 20:11:00 +01:00
|
|
|
"/dev/stdin",
|
2021-07-09 22:14:16 +02:00
|
|
|
"-c",
|
|
|
|
"copy",
|
|
|
|
"-movflags",
|
|
|
|
"+faststart",
|
|
|
|
path,
|
|
|
|
]
|
|
|
|
|
|
|
|
p = sp.run(
|
|
|
|
ffmpeg_cmd,
|
|
|
|
input="\n".join(playlist_lines),
|
|
|
|
encoding="ascii",
|
|
|
|
capture_output=True,
|
|
|
|
)
|
|
|
|
if p.returncode != 0:
|
|
|
|
logger.error(p.stderr)
|
|
|
|
return f"Could not create clip from recordings for {camera}.", 500
|
|
|
|
|
2021-08-10 15:27:31 +02:00
|
|
|
response = make_response()
|
|
|
|
response.headers["Content-Description"] = "File Transfer"
|
|
|
|
response.headers["Cache-Control"] = "no-cache"
|
|
|
|
response.headers["Content-Type"] = "video/mp4"
|
2021-07-12 02:42:23 +02:00
|
|
|
if download:
|
2021-08-10 15:27:31 +02:00
|
|
|
response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
|
|
|
|
response.headers["Content-Length"] = os.path.getsize(path)
|
|
|
|
response.headers[
|
|
|
|
"X-Accel-Redirect"
|
|
|
|
] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vod/<camera>/start/<int:start_ts>/end/<int:end_ts>")
|
|
|
|
@bp.route("/vod/<camera>/start/<float:start_ts>/end/<float:end_ts>")
|
|
|
|
def vod_ts(camera, start_ts, end_ts):
|
2021-06-07 03:24:36 +02:00
|
|
|
recordings = (
|
|
|
|
Recordings.select()
|
|
|
|
.where(
|
2021-07-11 06:22:45 +02:00
|
|
|
Recordings.start_time.between(start_ts, end_ts)
|
|
|
|
| Recordings.end_time.between(start_ts, end_ts)
|
|
|
|
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
2021-06-07 03:24:36 +02:00
|
|
|
)
|
|
|
|
.where(Recordings.camera == camera)
|
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
)
|
2021-05-18 07:52:08 +02:00
|
|
|
|
|
|
|
clips = []
|
|
|
|
durations = []
|
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
recording: Recordings
|
|
|
|
for recording in recordings:
|
|
|
|
clip = {"type": "source", "path": recording.path}
|
|
|
|
duration = int(recording.duration * 1000)
|
|
|
|
# Determine if offset is needed for first clip
|
|
|
|
if recording.start_time < start_ts:
|
|
|
|
offset = int((start_ts - recording.start_time) * 1000)
|
|
|
|
clip["clipFrom"] = offset
|
|
|
|
duration -= offset
|
|
|
|
# Determine if we need to end the last clip early
|
|
|
|
if recording.end_time > end_ts:
|
|
|
|
duration -= int((recording.end_time - end_ts) * 1000)
|
2021-11-21 14:25:01 +01:00
|
|
|
|
|
|
|
if duration > 0:
|
|
|
|
clips.append(clip)
|
|
|
|
durations.append(duration)
|
|
|
|
else:
|
|
|
|
logger.warning(f"Recording clip is missing or empty: {recording.path}")
|
2021-05-28 19:13:48 +02:00
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
if not clips:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error("No recordings found for the requested time range")
|
2021-07-09 22:14:16 +02:00
|
|
|
return "No recordings found.", 404
|
|
|
|
|
|
|
|
hour_ago = datetime.now() - timedelta(hours=1)
|
2021-05-18 07:52:08 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
2021-07-09 22:14:16 +02:00
|
|
|
"cache": hour_ago.timestamp() > start_ts,
|
2021-05-18 07:52:08 +02:00
|
|
|
"discontinuity": False,
|
|
|
|
"durations": durations,
|
|
|
|
"sequences": [{"clips": clips}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-07-09 22:14:16 +02:00
|
|
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera>")
|
|
|
|
def vod_hour(year_month, day, hour, camera):
|
|
|
|
start_date = datetime.strptime(f"{year_month}-{day} {hour}", "%Y-%m-%d %H")
|
|
|
|
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
|
|
|
start_ts = start_date.timestamp()
|
|
|
|
end_ts = end_date.timestamp()
|
|
|
|
|
|
|
|
return vod_ts(camera, start_ts, end_ts)
|
|
|
|
|
|
|
|
|
|
|
|
@bp.route("/vod/event/<id>")
|
|
|
|
def vod_event(id):
|
2021-07-12 02:58:39 +02:00
|
|
|
try:
|
|
|
|
event: Event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event not found: {id}")
|
2021-07-09 22:14:16 +02:00
|
|
|
return "Event not found.", 404
|
|
|
|
|
|
|
|
if not event.has_clip:
|
2021-11-21 14:25:36 +01:00
|
|
|
logger.error(f"Event does not have recordings: {id}")
|
|
|
|
return "Recordings not available", 404
|
2021-07-09 22:14:16 +02:00
|
|
|
|
|
|
|
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
|
|
|
|
|
|
|
|
if not os.path.isfile(clip_path):
|
2021-10-23 23:18:13 +02:00
|
|
|
end_ts = (
|
|
|
|
datetime.now().timestamp() if event.end_time is None else event.end_time
|
|
|
|
)
|
2022-02-06 16:49:01 +01:00
|
|
|
vod_response = vod_ts(event.camera, event.start_time, end_ts)
|
|
|
|
# If the recordings are not found, set has_clip to false
|
|
|
|
if (
|
|
|
|
type(vod_response) == tuple
|
|
|
|
and len(vod_response) == 2
|
|
|
|
and vod_response[1] == 404
|
|
|
|
):
|
|
|
|
Event.update(has_clip=False).where(Event.id == id).execute()
|
|
|
|
return vod_response
|
2021-07-09 22:14:16 +02:00
|
|
|
|
2021-09-11 15:34:27 +02:00
|
|
|
duration = int((event.end_time - event.start_time) * 1000)
|
2021-07-09 22:14:16 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"cache": True,
|
|
|
|
"discontinuity": False,
|
|
|
|
"durations": [duration],
|
|
|
|
"sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-19 15:22:31 +01:00
|
|
|
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
2020-11-01 22:37:51 +01:00
|
|
|
while True:
|
|
|
|
# max out at specified FPS
|
2021-06-14 14:31:13 +02:00
|
|
|
time.sleep(1 / fps)
|
2020-12-19 15:22:31 +01:00
|
|
|
frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
|
|
|
yield (
|
|
|
|
b"--frame\r\n"
|
|
|
|
b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
|
|
|
|
)
|