2020-11-25 03:36:56 +01:00
|
|
|
import base64
|
2021-05-28 19:13:48 +02:00
|
|
|
from collections import OrderedDict
|
|
|
|
from datetime import datetime, timedelta
|
2021-02-13 16:33:32 +01:00
|
|
|
import json
|
2021-05-18 07:52:08 +02:00
|
|
|
import glob
|
2020-11-09 14:32:30 +01:00
|
|
|
import logging
|
2020-11-01 15:06:15 +01:00
|
|
|
import os
|
2021-05-28 19:13:48 +02:00
|
|
|
import re
|
2020-11-01 22:37:51 +01:00
|
|
|
import time
|
2020-11-16 14:27:56 +01:00
|
|
|
from functools import reduce
|
2021-05-12 17:19:02 +02:00
|
|
|
from pathlib import Path
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
import cv2
|
2021-02-13 16:33:32 +01:00
|
|
|
import gevent
|
2020-11-01 22:37:51 +01:00
|
|
|
import numpy as np
|
2021-02-17 14:23:32 +01:00
|
|
|
from flask import (
|
|
|
|
Blueprint,
|
|
|
|
Flask,
|
|
|
|
Response,
|
|
|
|
current_app,
|
|
|
|
jsonify,
|
|
|
|
make_response,
|
|
|
|
request,
|
|
|
|
)
|
2021-02-13 15:09:44 +01:00
|
|
|
from flask_sockets import Sockets
|
2020-11-25 03:36:56 +01:00
|
|
|
from peewee import SqliteDatabase, operator, fn, DoesNotExist
|
2020-11-01 15:06:15 +01:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
from frigate.const import CLIPS_DIR, RECORD_DIR
|
2020-11-01 15:06:15 +01:00
|
|
|
from frigate.models import Event
|
2021-01-04 00:35:58 +01:00
|
|
|
from frigate.stats import stats_snapshot
|
2020-12-19 15:48:34 +01:00
|
|
|
from frigate.util import calculate_region
|
2020-12-19 13:51:10 +01:00
|
|
|
from frigate.version import VERSION
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2020-11-25 17:37:41 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
bp = Blueprint("frigate", __name__)
|
|
|
|
ws = Blueprint("ws", __name__)
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
class MqttBackend:
|
2021-02-13 16:33:32 +01:00
|
|
|
"""Interface for registering and updating WebSocket clients."""
|
|
|
|
|
|
|
|
def __init__(self, mqtt_client, topic_prefix):
|
|
|
|
self.clients = list()
|
|
|
|
self.mqtt_client = mqtt_client
|
|
|
|
self.topic_prefix = topic_prefix
|
|
|
|
|
|
|
|
def register(self, client):
|
|
|
|
"""Register a WebSocket connection for Mqtt updates."""
|
|
|
|
self.clients.append(client)
|
|
|
|
|
2021-02-13 16:54:20 +01:00
|
|
|
def publish(self, message):
|
2021-02-13 18:34:20 +01:00
|
|
|
try:
|
|
|
|
json_message = json.loads(message)
|
|
|
|
json_message = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"topic": f"{self.topic_prefix}/{json_message['topic']}",
|
2021-05-23 15:16:41 +02:00
|
|
|
"payload": json_message["payload"],
|
2021-02-17 14:23:32 +01:00
|
|
|
"retain": json_message.get("retain", False),
|
2021-02-13 18:34:20 +01:00
|
|
|
}
|
|
|
|
except:
|
|
|
|
logger.warning("Unable to parse websocket message as valid json.")
|
|
|
|
return
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
logger.debug(
|
|
|
|
f"Publishing mqtt message from websockets at {json_message['topic']}."
|
|
|
|
)
|
|
|
|
self.mqtt_client.publish(
|
|
|
|
json_message["topic"],
|
|
|
|
json_message["payload"],
|
|
|
|
retain=json_message["retain"],
|
|
|
|
)
|
2021-02-13 16:54:20 +01:00
|
|
|
|
2021-02-13 16:33:32 +01:00
|
|
|
def run(self):
|
|
|
|
def send(client, userdata, message):
|
|
|
|
"""Sends mqtt messages to clients."""
|
2021-02-13 16:54:20 +01:00
|
|
|
try:
|
2021-02-13 18:34:20 +01:00
|
|
|
logger.debug(f"Received mqtt message on {message.topic}.")
|
2021-02-17 14:23:32 +01:00
|
|
|
ws_message = json.dumps(
|
|
|
|
{
|
|
|
|
"topic": message.topic.replace(f"{self.topic_prefix}/", ""),
|
|
|
|
"payload": message.payload.decode(),
|
|
|
|
}
|
|
|
|
)
|
2021-02-13 16:54:20 +01:00
|
|
|
except:
|
|
|
|
# if the payload can't be decoded don't relay to clients
|
2021-02-17 14:23:32 +01:00
|
|
|
logger.debug(
|
|
|
|
f"MQTT payload for {message.topic} wasn't text. Skipping..."
|
|
|
|
)
|
2021-02-13 16:54:20 +01:00
|
|
|
return
|
|
|
|
|
2021-02-13 16:33:32 +01:00
|
|
|
for client in self.clients:
|
2021-02-13 16:54:20 +01:00
|
|
|
try:
|
|
|
|
client.send(ws_message)
|
|
|
|
except:
|
2021-02-17 14:23:32 +01:00
|
|
|
logger.debug(
|
|
|
|
"Removing websocket client due to a closed connection."
|
|
|
|
)
|
2021-02-13 16:54:20 +01:00
|
|
|
self.clients.remove(client)
|
2021-02-16 05:10:20 +01:00
|
|
|
|
2021-02-13 16:33:32 +01:00
|
|
|
self.mqtt_client.message_callback_add(f"{self.topic_prefix}/#", send)
|
|
|
|
|
|
|
|
def start(self):
|
|
|
|
"""Maintains mqtt subscription in the background."""
|
|
|
|
gevent.spawn(self.run)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
def create_app(
|
|
|
|
frigate_config,
|
|
|
|
database: SqliteDatabase,
|
|
|
|
stats_tracking,
|
|
|
|
detected_frames_processor,
|
|
|
|
mqtt_client,
|
|
|
|
):
|
2020-11-01 15:06:15 +01:00
|
|
|
app = Flask(__name__)
|
2021-02-13 15:09:44 +01:00
|
|
|
sockets = Sockets(app)
|
2020-11-01 15:06:15 +01:00
|
|
|
|
|
|
|
@app.before_request
|
|
|
|
def _db_connect():
|
|
|
|
database.connect()
|
|
|
|
|
|
|
|
@app.teardown_request
|
|
|
|
def _db_close(exc):
|
|
|
|
if not database.is_closed():
|
|
|
|
database.close()
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
app.frigate_config = frigate_config
|
2021-01-04 00:35:58 +01:00
|
|
|
app.stats_tracking = stats_tracking
|
2020-11-01 22:37:51 +01:00
|
|
|
app.detected_frames_processor = detected_frames_processor
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
app.register_blueprint(bp)
|
2021-02-13 15:09:44 +01:00
|
|
|
sockets.register_blueprint(ws)
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-13 16:33:32 +01:00
|
|
|
app.mqtt_backend = MqttBackend(mqtt_client, frigate_config.mqtt.topic_prefix)
|
|
|
|
app.mqtt_backend.start()
|
|
|
|
|
2020-11-01 15:06:15 +01:00
|
|
|
return app
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/")
|
2020-11-01 15:06:15 +01:00
|
|
|
def is_healthy():
|
|
|
|
return "Frigate is running. Alive and healthy!"
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/summary")
|
2020-11-18 04:11:19 +01:00
|
|
|
def events_summary():
|
2021-02-17 14:23:32 +01:00
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
2021-01-16 16:26:26 +01:00
|
|
|
|
|
|
|
clauses = []
|
|
|
|
|
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2021-01-16 16:26:26 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
|
|
|
if len(clauses) == 0:
|
|
|
|
clauses.append((1 == 1))
|
|
|
|
|
2020-11-18 04:11:19 +01:00
|
|
|
groups = (
|
2021-02-17 14:23:32 +01:00
|
|
|
Event.select(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
|
|
|
|
).alias("day"),
|
|
|
|
Event.zones,
|
|
|
|
fn.COUNT(Event.id).alias("count"),
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.group_by(
|
|
|
|
Event.camera,
|
|
|
|
Event.label,
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
|
|
|
|
),
|
|
|
|
Event.zones,
|
2020-11-18 04:11:19 +01:00
|
|
|
)
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-18 04:11:19 +01:00
|
|
|
|
|
|
|
return jsonify([e for e in groups.dicts()])
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-12 17:19:02 +02:00
|
|
|
@bp.route("/events/<id>", methods=("GET",))
|
2020-11-24 22:44:59 +01:00
|
|
|
def event(id):
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
return model_to_dict(Event.get(Event.id == id))
|
|
|
|
except DoesNotExist:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
|
|
|
|
@bp.route("/events/<id>", methods=("DELETE",))
|
2021-05-12 17:19:02 +02:00
|
|
|
def delete_event(id):
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
except DoesNotExist:
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Event" + id + " not found"}), 404
|
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
|
|
|
media_name = f"{event.camera}-{event.id}"
|
|
|
|
if event.has_snapshot:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
|
|
media.unlink(missing_ok=True)
|
|
|
|
if event.has_clip:
|
|
|
|
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
|
|
media.unlink(missing_ok=True)
|
|
|
|
|
|
|
|
event.delete_instance()
|
2021-05-18 07:52:08 +02:00
|
|
|
return make_response(
|
|
|
|
jsonify({"success": True, "message": "Event" + id + " deleted"}), 200
|
|
|
|
)
|
2021-05-12 17:19:02 +02:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/events/<id>/thumbnail.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_thumbnail(id):
|
2021-02-17 14:23:32 +01:00
|
|
|
format = request.args.get("format", "ios")
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = None
|
2020-11-25 03:36:56 +01:00
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
2020-11-25 03:36:56 +01:00
|
|
|
except DoesNotExist:
|
2020-11-25 17:37:41 +01:00
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2020-11-25 17:37:41 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
2021-01-22 00:28:49 +01:00
|
|
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
2020-11-25 17:37:41 +01:00
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
if thumbnail_bytes is None:
|
2020-11-25 03:36:56 +01:00
|
|
|
return "Event not found", 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
# android notifications prefer a 2:1 ratio
|
2021-02-17 14:23:32 +01:00
|
|
|
if format == "android":
|
2020-12-02 02:28:26 +01:00
|
|
|
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
|
|
|
|
img = cv2.imdecode(jpg_as_np, flags=1)
|
2021-02-17 14:23:32 +01:00
|
|
|
thumbnail = cv2.copyMakeBorder(
|
|
|
|
img,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
int(img.shape[1] * 0.5),
|
|
|
|
cv2.BORDER_CONSTANT,
|
|
|
|
(0, 0, 0),
|
|
|
|
)
|
|
|
|
ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-12-02 02:28:26 +01:00
|
|
|
thumbnail_bytes = jpg.tobytes()
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-12-02 02:28:26 +01:00
|
|
|
response = make_response(thumbnail_bytes)
|
2021-02-17 14:23:32 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpg"
|
2020-12-02 02:28:26 +01:00
|
|
|
return response
|
2020-11-24 22:44:59 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events/<id>/snapshot.jpg")
|
2021-01-22 01:49:20 +01:00
|
|
|
def event_snapshot(id):
|
|
|
|
jpg_bytes = None
|
|
|
|
try:
|
|
|
|
event = Event.get(Event.id == id)
|
|
|
|
if not event.has_snapshot:
|
|
|
|
return "Snapshot not available", 404
|
|
|
|
# read snapshot from disk
|
2021-02-17 14:23:32 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
|
|
|
|
) as image_file:
|
2021-01-22 01:49:20 +01:00
|
|
|
jpg_bytes = image_file.read()
|
|
|
|
except DoesNotExist:
|
|
|
|
# see if the object is currently being tracked
|
|
|
|
try:
|
2021-02-18 14:10:25 +01:00
|
|
|
camera_states = current_app.detected_frames_processor.camera_states.values()
|
|
|
|
for camera_state in camera_states:
|
2021-01-22 01:49:20 +01:00
|
|
|
if id in camera_state.tracked_objects:
|
|
|
|
tracked_obj = camera_state.tracked_objects.get(id)
|
|
|
|
if not tracked_obj is None:
|
|
|
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
2021-02-17 14:23:32 +01:00
|
|
|
timestamp=request.args.get("timestamp", type=int),
|
|
|
|
bounding_box=request.args.get("bbox", type=int),
|
|
|
|
crop=request.args.get("crop", type=int),
|
|
|
|
height=request.args.get("h", type=int),
|
2021-01-22 01:49:20 +01:00
|
|
|
)
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
except:
|
|
|
|
return "Event not found", 404
|
|
|
|
|
|
|
|
response = make_response(jpg_bytes)
|
2021-02-17 14:23:32 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpg"
|
2021-01-22 01:49:20 +01:00
|
|
|
return response
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/events")
|
2020-11-01 15:06:15 +01:00
|
|
|
def events():
|
2021-02-17 14:23:32 +01:00
|
|
|
limit = request.args.get("limit", 100)
|
|
|
|
camera = request.args.get("camera")
|
|
|
|
label = request.args.get("label")
|
|
|
|
zone = request.args.get("zone")
|
|
|
|
after = request.args.get("after", type=float)
|
|
|
|
before = request.args.get("before", type=float)
|
|
|
|
has_clip = request.args.get("has_clip", type=int)
|
|
|
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
|
|
|
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
|
|
|
clauses = []
|
2021-01-30 14:19:52 +01:00
|
|
|
excluded_fields = []
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if camera:
|
|
|
|
clauses.append((Event.camera == camera))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if label:
|
|
|
|
clauses.append((Event.label == label))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if zone:
|
2021-02-17 14:23:32 +01:00
|
|
|
clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if after:
|
|
|
|
clauses.append((Event.start_time >= after))
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if before:
|
|
|
|
clauses.append((Event.start_time <= before))
|
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_clip is None:
|
|
|
|
clauses.append((Event.has_clip == has_clip))
|
2021-01-26 16:04:03 +01:00
|
|
|
|
2020-12-24 15:09:15 +01:00
|
|
|
if not has_snapshot is None:
|
|
|
|
clauses.append((Event.has_snapshot == has_snapshot))
|
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
if not include_thumbnails:
|
|
|
|
excluded_fields.append(Event.thumbnail)
|
|
|
|
|
2020-11-16 14:27:56 +01:00
|
|
|
if len(clauses) == 0:
|
|
|
|
clauses.append((1 == 1))
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.limit(limit)
|
|
|
|
)
|
2020-11-16 14:27:56 +01:00
|
|
|
|
2021-01-30 14:19:52 +01:00
|
|
|
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
2020-11-01 15:06:15 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/config")
|
2020-11-18 04:11:19 +01:00
|
|
|
def config():
|
|
|
|
return jsonify(current_app.frigate_config.to_dict())
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/version")
|
2020-12-19 13:51:10 +01:00
|
|
|
def version():
|
|
|
|
return VERSION
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/stats")
|
2020-11-01 22:37:51 +01:00
|
|
|
def stats():
|
2021-01-04 00:35:58 +01:00
|
|
|
stats = stats_snapshot(current_app.stats_tracking)
|
2020-11-01 22:37:51 +01:00
|
|
|
return jsonify(stats)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>/<label>/best.jpg")
|
2020-11-01 22:37:51 +01:00
|
|
|
def best(camera_name, label):
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
best_object = current_app.detected_frames_processor.get_best(camera_name, label)
|
2021-02-17 14:23:32 +01:00
|
|
|
best_frame = best_object.get("frame")
|
2020-11-01 22:37:51 +01:00
|
|
|
if best_frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
best_frame = np.zeros((720, 1280, 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
else:
|
|
|
|
best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
crop = bool(request.args.get("crop", 0, type=int))
|
2020-11-01 22:37:51 +01:00
|
|
|
if crop:
|
2021-02-17 14:23:32 +01:00
|
|
|
box = best_object.get("box", (0, 0, 300, 300))
|
|
|
|
region = calculate_region(
|
|
|
|
best_frame.shape, box[0], box[1], box[2], box[3], 1.1
|
|
|
|
)
|
|
|
|
best_frame = best_frame[region[1] : region[3], region[0] : region[2]]
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
height = int(request.args.get("h", str(best_frame.shape[0])))
|
|
|
|
width = int(height * best_frame.shape[1] / best_frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
best_frame = cv2.resize(
|
|
|
|
best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA
|
|
|
|
)
|
|
|
|
ret, jpg = cv2.imencode(".jpg", best_frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-11-01 22:37:51 +01:00
|
|
|
response = make_response(jpg.tobytes())
|
2021-02-17 14:23:32 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpg"
|
2020-11-01 22:37:51 +01:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>")
|
2020-11-01 22:37:51 +01:00
|
|
|
def mjpeg_feed(camera_name):
|
2021-02-17 14:23:32 +01:00
|
|
|
fps = int(request.args.get("fps", "3"))
|
|
|
|
height = int(request.args.get("h", "360"))
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
# return a multipart response
|
2021-02-17 14:23:32 +01:00
|
|
|
return Response(
|
|
|
|
imagestream(
|
|
|
|
current_app.detected_frames_processor,
|
|
|
|
camera_name,
|
|
|
|
fps,
|
|
|
|
height,
|
|
|
|
draw_options,
|
|
|
|
),
|
|
|
|
mimetype="multipart/x-mixed-replace; boundary=frame",
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
@bp.route("/<camera_name>/latest.jpg")
|
2020-11-01 22:37:51 +01:00
|
|
|
def latest_frame(camera_name):
|
2020-12-19 15:22:31 +01:00
|
|
|
draw_options = {
|
2021-02-17 14:23:32 +01:00
|
|
|
"bounding_boxes": request.args.get("bbox", type=int),
|
|
|
|
"timestamp": request.args.get("timestamp", type=int),
|
|
|
|
"zones": request.args.get("zones", type=int),
|
|
|
|
"mask": request.args.get("mask", type=int),
|
|
|
|
"motion_boxes": request.args.get("motion", type=int),
|
|
|
|
"regions": request.args.get("regions", type=int),
|
2020-12-19 15:22:31 +01:00
|
|
|
}
|
2020-11-03 15:15:58 +01:00
|
|
|
if camera_name in current_app.frigate_config.cameras:
|
2020-11-01 22:37:51 +01:00
|
|
|
# max out at specified FPS
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = current_app.detected_frames_processor.get_current_frame(
|
|
|
|
camera_name, draw_options
|
|
|
|
)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((720, 1280, 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
height = int(request.args.get("h", str(frame.shape[0])))
|
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
|
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
2020-11-01 22:37:51 +01:00
|
|
|
response = make_response(jpg.tobytes())
|
2021-02-17 14:23:32 +01:00
|
|
|
response.headers["Content-Type"] = "image/jpg"
|
2020-11-01 22:37:51 +01:00
|
|
|
return response
|
|
|
|
else:
|
|
|
|
return "Camera named {} not found".format(camera_name), 404
|
2021-01-09 18:26:46 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2021-05-28 19:13:48 +02:00
|
|
|
@bp.route("/<camera_name>/recordings")
|
|
|
|
def recordings(camera_name):
|
|
|
|
files = glob.glob(f"{RECORD_DIR}/*/*/*/{camera_name}")
|
|
|
|
|
|
|
|
if len(files) == 0:
|
|
|
|
return "No recordings found.", 404
|
|
|
|
|
|
|
|
files.sort()
|
|
|
|
|
|
|
|
dates = OrderedDict()
|
|
|
|
for path in files:
|
|
|
|
search = re.search(r".+/(\d{4}[-]\d{2})/(\d{2})/(\d{2}).+", path)
|
|
|
|
if not search:
|
|
|
|
continue
|
|
|
|
date = f"{search.group(1)}-{search.group(2)}"
|
|
|
|
if date not in dates:
|
|
|
|
dates[date] = OrderedDict()
|
|
|
|
dates[date][search.group(3)] = 0
|
|
|
|
|
|
|
|
events = (
|
|
|
|
Event.select(
|
|
|
|
fn.DATE(Event.start_time, "unixepoch", "localtime"),
|
|
|
|
fn.STRFTIME("%H", Event.start_time, "unixepoch", "localtime"),
|
|
|
|
fn.COUNT(Event.id),
|
|
|
|
)
|
|
|
|
.where(Event.camera == camera_name)
|
|
|
|
.group_by(
|
|
|
|
fn.DATE(Event.start_time, "unixepoch", "localtime"),
|
|
|
|
fn.STRFTIME("%H", Event.start_time, "unixepoch", "localtime"),
|
|
|
|
)
|
|
|
|
.tuples()
|
|
|
|
)
|
|
|
|
|
|
|
|
for date, hour, count in events:
|
|
|
|
key = date.strftime("%Y-%m-%d")
|
|
|
|
if key in dates and hour in dates[key]:
|
|
|
|
dates[key][hour] = count
|
|
|
|
|
|
|
|
return jsonify(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"date": date,
|
|
|
|
"recordings": [
|
|
|
|
{"hour": hour, "events": events} for hour, events in hours.items()
|
|
|
|
],
|
|
|
|
}
|
|
|
|
for date, hours in dates.items()
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
@bp.route("/vod/<path:path>")
|
|
|
|
def vod(path):
|
|
|
|
if not os.path.isdir(f"{RECORD_DIR}/{path}"):
|
|
|
|
return "Recordings not found.", 404
|
|
|
|
|
|
|
|
files = glob.glob(f"{RECORD_DIR}/{path}/*.mp4")
|
|
|
|
files.sort()
|
|
|
|
|
|
|
|
clips = []
|
|
|
|
durations = []
|
|
|
|
for filename in files:
|
|
|
|
clips.append({"type": "source", "path": filename})
|
|
|
|
video = cv2.VideoCapture(filename)
|
|
|
|
duration = int(
|
|
|
|
video.get(cv2.CAP_PROP_FRAME_COUNT) / video.get(cv2.CAP_PROP_FPS) * 1000
|
|
|
|
)
|
|
|
|
durations.append(duration)
|
|
|
|
|
2021-05-28 19:13:48 +02:00
|
|
|
# Should we cache?
|
|
|
|
parts = path.split("/", 4)
|
|
|
|
date = datetime.strptime(f"{parts[0]}-{parts[1]} {parts[2]}", "%Y-%m-%d %H")
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
return jsonify(
|
|
|
|
{
|
2021-05-28 19:13:48 +02:00
|
|
|
"cache": datetime.now() - timedelta(hours=2) > date,
|
2021-05-18 07:52:08 +02:00
|
|
|
"discontinuity": False,
|
|
|
|
"durations": durations,
|
|
|
|
"sequences": [{"clips": clips}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-19 15:22:31 +01:00
|
|
|
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
2020-11-01 22:37:51 +01:00
|
|
|
while True:
|
|
|
|
# max out at specified FPS
|
2021-05-18 07:52:08 +02:00
|
|
|
gevent.sleep(1 / fps)
|
2020-12-19 15:22:31 +01:00
|
|
|
frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
|
2020-11-01 22:37:51 +01:00
|
|
|
if frame is None:
|
2021-02-17 14:23:32 +01:00
|
|
|
frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
|
2020-11-01 22:37:51 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
width = int(height * frame.shape[1] / frame.shape[0])
|
2020-11-01 22:37:51 +01:00
|
|
|
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
|
|
|
|
yield (
|
|
|
|
b"--frame\r\n"
|
|
|
|
b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
|
|
|
|
)
|
|
|
|
|
2021-02-13 15:09:44 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
@ws.route("/ws")
|
2021-02-13 15:09:44 +01:00
|
|
|
def echo_socket(socket):
|
2021-02-13 16:33:32 +01:00
|
|
|
current_app.mqtt_backend.register(socket)
|
|
|
|
|
2021-02-13 15:09:44 +01:00
|
|
|
while not socket.closed:
|
2021-02-13 16:54:20 +01:00
|
|
|
# Sleep to prevent *constant* context-switches.
|
2021-02-13 16:33:32 +01:00
|
|
|
gevent.sleep(0.1)
|
2021-02-13 16:54:20 +01:00
|
|
|
|
|
|
|
message = socket.receive()
|
|
|
|
if message:
|
|
|
|
current_app.mqtt_backend.publish(message)
|