blakeblackshear.frigate/frigate/http.py

247 lines
8.3 KiB
Python
Raw Normal View History

2020-11-25 03:36:56 +01:00
import base64
2020-11-16 14:27:56 +01:00
import datetime
2020-11-09 14:32:30 +01:00
import logging
2020-11-01 15:06:15 +01:00
import os
2020-11-01 22:37:51 +01:00
import time
2020-11-16 14:27:56 +01:00
from functools import reduce
2020-11-01 22:37:51 +01:00
import cv2
import numpy as np
2020-11-04 13:31:25 +01:00
from flask import (Blueprint, Flask, Response, current_app, jsonify,
make_response, request)
2020-11-25 03:36:56 +01:00
from peewee import SqliteDatabase, operator, fn, DoesNotExist
2020-11-01 15:06:15 +01:00
from playhouse.shortcuts import model_to_dict
from frigate.models import Event
2020-11-25 17:37:41 +01:00
logger = logging.getLogger(__name__)
2020-11-01 15:06:15 +01:00
bp = Blueprint('frigate', __name__)
2020-11-01 22:37:51 +01:00
def create_app(frigate_config, database: SqliteDatabase, camera_metrics, detectors, detected_frames_processor):
2020-11-01 15:06:15 +01:00
app = Flask(__name__)
@app.before_request
def _db_connect():
database.connect()
@app.teardown_request
def _db_close(exc):
if not database.is_closed():
database.close()
2020-11-01 22:37:51 +01:00
app.frigate_config = frigate_config
app.camera_metrics = camera_metrics
app.detectors = detectors
app.detected_frames_processor = detected_frames_processor
2020-11-01 15:06:15 +01:00
app.register_blueprint(bp)
return app
@bp.route('/')
def is_healthy():
return "Frigate is running. Alive and healthy!"
2020-11-18 04:11:19 +01:00
@bp.route('/events/summary')
def events_summary():
groups = (
Event
.select(
Event.camera,
Event.label,
2020-11-23 14:23:58 +01:00
fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')).alias('day'),
2020-11-24 22:44:20 +01:00
Event.zones,
2020-11-18 04:11:19 +01:00
fn.COUNT(Event.id).alias('count')
)
.group_by(
Event.camera,
Event.label,
2020-11-24 22:44:20 +01:00
fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')),
Event.zones
2020-11-18 04:11:19 +01:00
)
)
return jsonify([e for e in groups.dicts()])
2020-11-24 22:44:59 +01:00
@bp.route('/events/<id>')
def event(id):
2020-11-25 03:36:56 +01:00
try:
return model_to_dict(Event.get(Event.id == id))
except DoesNotExist:
return "Event not found", 404
@bp.route('/events/<id>/snapshot.jpg')
def event_snapshot(id):
format = request.args.get('format', 'ios')
thumbnail_bytes = None
2020-11-25 03:36:56 +01:00
try:
event = Event.get(Event.id == id)
thumbnail_bytes = base64.b64decode(event.thumbnail)
2020-11-25 03:36:56 +01:00
except DoesNotExist:
2020-11-25 17:37:41 +01:00
# see if the object is currently being tracked
try:
for camera_state in current_app.detected_frames_processor.camera_states.values():
if id in camera_state.tracked_objects:
tracked_obj = camera_state.tracked_objects.get(id)
if not tracked_obj is None:
thumbnail_bytes = tracked_obj.get_jpg_bytes()
2020-11-25 17:37:41 +01:00
except:
return "Event not found", 404
if thumbnail_bytes is None:
2020-11-25 03:36:56 +01:00
return "Event not found", 404
# android notifications prefer a 2:1 ratio
if format == 'android':
jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
img = cv2.imdecode(jpg_as_np, flags=1)
thumbnail = cv2.copyMakeBorder(img, 0, 0, int(img.shape[1]*0.5), int(img.shape[1]*0.5), cv2.BORDER_CONSTANT, (0,0,0))
ret, jpg = cv2.imencode('.jpg', thumbnail)
thumbnail_bytes = jpg.tobytes()
response = make_response(thumbnail_bytes)
response.headers['Content-Type'] = 'image/jpg'
return response
2020-11-24 22:44:59 +01:00
2020-11-01 15:06:15 +01:00
@bp.route('/events')
def events():
2020-11-16 14:27:56 +01:00
limit = request.args.get('limit', 100)
camera = request.args.get('camera')
label = request.args.get('label')
zone = request.args.get('zone')
after = request.args.get('after', type=int)
before = request.args.get('before', type=int)
clauses = []
if camera:
clauses.append((Event.camera == camera))
if label:
clauses.append((Event.label == label))
if zone:
clauses.append((Event.zones.cast('text') % f"*\"{zone}\"*"))
if after:
clauses.append((Event.start_time >= after))
if before:
clauses.append((Event.start_time <= before))
if len(clauses) == 0:
clauses.append((1 == 1))
events = (Event.select()
.where(reduce(operator.and_, clauses))
.order_by(Event.start_time.desc())
.limit(limit))
2020-11-01 15:06:15 +01:00
return jsonify([model_to_dict(e) for e in events])
2020-11-18 04:11:19 +01:00
@bp.route('/config')
def config():
return jsonify(current_app.frigate_config.to_dict())
@bp.route('/stats')
2020-11-01 22:37:51 +01:00
def stats():
camera_metrics = current_app.camera_metrics
stats = {}
total_detection_fps = 0
for name, camera_stats in camera_metrics.items():
total_detection_fps += camera_stats['detection_fps'].value
stats[name] = {
'camera_fps': round(camera_stats['camera_fps'].value, 2),
'process_fps': round(camera_stats['process_fps'].value, 2),
'skipped_fps': round(camera_stats['skipped_fps'].value, 2),
'detection_fps': round(camera_stats['detection_fps'].value, 2),
'pid': camera_stats['process'].pid,
'capture_pid': camera_stats['capture_process'].pid
}
2020-11-01 15:06:15 +01:00
2020-11-01 22:37:51 +01:00
stats['detectors'] = {}
for name, detector in current_app.detectors.items():
stats['detectors'][name] = {
'inference_speed': round(detector.avg_inference_speed.value*1000, 2),
'detection_start': detector.detection_start.value,
'pid': detector.detect_process.pid
}
stats['detection_fps'] = round(total_detection_fps, 2)
return jsonify(stats)
@bp.route('/<camera_name>/<label>/best.jpg')
def best(camera_name, label):
2020-11-03 15:15:58 +01:00
if camera_name in current_app.frigate_config.cameras:
2020-11-01 22:37:51 +01:00
best_object = current_app.detected_frames_processor.get_best(camera_name, label)
best_frame = best_object.get('frame')
if best_frame is None:
best_frame = np.zeros((720,1280,3), np.uint8)
else:
best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
2020-11-01 15:06:15 +01:00
2020-11-01 22:37:51 +01:00
crop = bool(request.args.get('crop', 0, type=int))
if crop:
region = best_object.get('region', [0,0,300,300])
best_frame = best_frame[region[1]:region[3], region[0]:region[2]]
2020-11-01 15:06:15 +01:00
2020-11-01 22:37:51 +01:00
height = int(request.args.get('h', str(best_frame.shape[0])))
width = int(height*best_frame.shape[1]/best_frame.shape[0])
best_frame = cv2.resize(best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
ret, jpg = cv2.imencode('.jpg', best_frame)
response = make_response(jpg.tobytes())
response.headers['Content-Type'] = 'image/jpg'
return response
else:
return "Camera named {} not found".format(camera_name), 404
@bp.route('/<camera_name>')
def mjpeg_feed(camera_name):
fps = int(request.args.get('fps', '3'))
height = int(request.args.get('h', '360'))
2020-11-03 15:15:58 +01:00
if camera_name in current_app.frigate_config.cameras:
2020-11-01 22:37:51 +01:00
# return a multipart response
return Response(imagestream(current_app.detected_frames_processor, camera_name, fps, height),
mimetype='multipart/x-mixed-replace; boundary=frame')
else:
return "Camera named {} not found".format(camera_name), 404
@bp.route('/<camera_name>/latest.jpg')
def latest_frame(camera_name):
2020-11-03 15:15:58 +01:00
if camera_name in current_app.frigate_config.cameras:
2020-11-01 22:37:51 +01:00
# max out at specified FPS
frame = current_app.detected_frames_processor.get_current_frame(camera_name)
if frame is None:
frame = np.zeros((720,1280,3), np.uint8)
height = int(request.args.get('h', str(frame.shape[0])))
width = int(height*frame.shape[1]/frame.shape[0])
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
ret, jpg = cv2.imencode('.jpg', frame)
response = make_response(jpg.tobytes())
response.headers['Content-Type'] = 'image/jpg'
return response
else:
return "Camera named {} not found".format(camera_name), 404
2020-11-01 15:06:15 +01:00
2020-11-01 22:37:51 +01:00
def imagestream(detected_frames_processor, camera_name, fps, height):
while True:
# max out at specified FPS
time.sleep(1/fps)
frame = detected_frames_processor.get_current_frame(camera_name, draw=True)
if frame is None:
frame = np.zeros((height,int(height*16/9),3), np.uint8)
width = int(height*frame.shape[1]/frame.shape[0])
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
ret, jpg = cv2.imencode('.jpg', frame)
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + jpg.tobytes() + b'\r\n\r\n')