2024-03-02 23:10:37 +01:00
|
|
|
"""Review apis."""
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from functools import reduce
|
2024-03-27 14:22:28 +01:00
|
|
|
from pathlib import Path
|
2024-03-02 23:10:37 +01:00
|
|
|
|
2024-03-05 20:55:44 +01:00
|
|
|
import pandas as pd
|
2024-03-14 20:57:14 +01:00
|
|
|
from flask import Blueprint, jsonify, make_response, request
|
2024-03-04 01:19:02 +01:00
|
|
|
from peewee import Case, DoesNotExist, fn, operator
|
2024-04-17 14:02:03 +02:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
2024-03-02 23:10:37 +01:00
|
|
|
|
2024-03-05 20:55:44 +01:00
|
|
|
from frigate.models import Recordings, ReviewSegment
|
2024-03-04 01:19:02 +01:00
|
|
|
from frigate.util.builtin import get_tz_modifiers
|
2024-03-02 23:10:37 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
ReviewBp = Blueprint("reviews", __name__)
|
|
|
|
|
|
|
|
|
|
|
|
@ReviewBp.route("/review")
|
|
|
|
def review():
|
|
|
|
cameras = request.args.get("cameras", "all")
|
|
|
|
labels = request.args.get("labels", "all")
|
2024-06-11 16:19:17 +02:00
|
|
|
zones = request.args.get("zones", "all")
|
2024-03-02 23:10:37 +01:00
|
|
|
reviewed = request.args.get("reviewed", type=int, default=0)
|
2024-03-07 15:33:36 +01:00
|
|
|
limit = request.args.get("limit", type=int, default=None)
|
2024-03-02 23:10:37 +01:00
|
|
|
severity = request.args.get("severity", None)
|
|
|
|
|
|
|
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
|
|
|
after = request.args.get(
|
2024-04-11 14:42:16 +02:00
|
|
|
"after", type=float, default=(datetime.now() - timedelta(hours=24)).timestamp()
|
2024-03-02 23:10:37 +01:00
|
|
|
)
|
|
|
|
|
2024-04-11 14:42:16 +02:00
|
|
|
clauses = [
|
|
|
|
(
|
|
|
|
(ReviewSegment.start_time > after)
|
|
|
|
& (
|
|
|
|
(ReviewSegment.end_time.is_null(True))
|
|
|
|
| (ReviewSegment.end_time < before)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
]
|
2024-03-02 23:10:37 +01:00
|
|
|
|
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((ReviewSegment.camera << camera_list))
|
|
|
|
|
|
|
|
if labels != "all":
|
|
|
|
# use matching so segments with multiple labels
|
|
|
|
# still match on a search where any label matches
|
|
|
|
label_clauses = []
|
|
|
|
filtered_labels = labels.split(",")
|
|
|
|
|
|
|
|
for label in filtered_labels:
|
|
|
|
label_clauses.append(
|
|
|
|
(ReviewSegment.data["objects"].cast("text") % f'*"{label}"*')
|
2024-04-11 14:42:16 +02:00
|
|
|
| (ReviewSegment.data["audio"].cast("text") % f'*"{label}"*')
|
2024-03-02 23:10:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
label_clause = reduce(operator.or_, label_clauses)
|
|
|
|
clauses.append((label_clause))
|
|
|
|
|
2024-06-11 16:19:17 +02:00
|
|
|
if zones != "all":
|
|
|
|
# use matching so segments with multiple zones
|
|
|
|
# still match on a search where any zone matches
|
|
|
|
zone_clauses = []
|
|
|
|
filtered_zones = zones.split(",")
|
|
|
|
|
|
|
|
for zone in filtered_zones:
|
|
|
|
zone_clauses.append(
|
|
|
|
(ReviewSegment.data["zones"].cast("text") % f'*"{zone}"*')
|
|
|
|
)
|
|
|
|
|
|
|
|
zone_clause = reduce(operator.or_, zone_clauses)
|
|
|
|
clauses.append((zone_clause))
|
|
|
|
|
2024-03-02 23:10:37 +01:00
|
|
|
if reviewed == 0:
|
|
|
|
clauses.append((ReviewSegment.has_been_reviewed == False))
|
|
|
|
|
|
|
|
if severity:
|
|
|
|
clauses.append((ReviewSegment.severity == severity))
|
|
|
|
|
|
|
|
review = (
|
|
|
|
ReviewSegment.select()
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(ReviewSegment.severity.asc())
|
|
|
|
.order_by(ReviewSegment.start_time.desc())
|
|
|
|
.limit(limit)
|
|
|
|
.dicts()
|
2024-03-27 14:22:28 +01:00
|
|
|
.iterator()
|
2024-03-02 23:10:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
return jsonify([r for r in review])
|
|
|
|
|
|
|
|
|
2024-04-17 14:02:03 +02:00
|
|
|
@ReviewBp.route("/review/<id>")
|
|
|
|
def get_review(id: str):
|
|
|
|
try:
|
|
|
|
return model_to_dict(ReviewSegment.get(ReviewSegment.id == id))
|
|
|
|
except DoesNotExist:
|
|
|
|
return "Review item not found", 404
|
|
|
|
|
|
|
|
|
2024-03-04 01:19:02 +01:00
|
|
|
@ReviewBp.route("/review/summary")
|
|
|
|
def review_summary():
|
|
|
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
|
|
|
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
|
2024-03-06 01:39:37 +01:00
|
|
|
day_ago = (datetime.now() - timedelta(hours=24)).timestamp()
|
2024-03-04 01:19:02 +01:00
|
|
|
month_ago = (datetime.now() - timedelta(days=30)).timestamp()
|
|
|
|
|
2024-03-05 13:02:34 +01:00
|
|
|
cameras = request.args.get("cameras", "all")
|
|
|
|
labels = request.args.get("labels", "all")
|
2024-06-11 16:19:17 +02:00
|
|
|
zones = request.args.get("zones", "all")
|
2024-03-05 13:02:34 +01:00
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
clauses = [(ReviewSegment.start_time > day_ago)]
|
|
|
|
|
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((ReviewSegment.camera << camera_list))
|
|
|
|
|
|
|
|
if labels != "all":
|
|
|
|
# use matching so segments with multiple labels
|
|
|
|
# still match on a search where any label matches
|
|
|
|
label_clauses = []
|
|
|
|
filtered_labels = labels.split(",")
|
|
|
|
|
|
|
|
for label in filtered_labels:
|
|
|
|
label_clauses.append(
|
|
|
|
(ReviewSegment.data["objects"].cast("text") % f'*"{label}"*')
|
2024-04-11 14:42:16 +02:00
|
|
|
| (ReviewSegment.data["audio"].cast("text") % f'*"{label}"*')
|
2024-03-06 01:39:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
label_clause = reduce(operator.or_, label_clauses)
|
|
|
|
clauses.append((label_clause))
|
|
|
|
|
2024-06-11 16:19:17 +02:00
|
|
|
if zones != "all":
|
|
|
|
# use matching so segments with multiple zones
|
|
|
|
# still match on a search where any zone matches
|
|
|
|
zone_clauses = []
|
|
|
|
filtered_zones = zones.split(",")
|
|
|
|
|
|
|
|
for zone in filtered_zones:
|
|
|
|
zone_clauses.append(
|
|
|
|
(ReviewSegment.data["zones"].cast("text") % f'*"{zone}"*')
|
|
|
|
)
|
|
|
|
|
|
|
|
zone_clause = reduce(operator.or_, zone_clauses)
|
|
|
|
clauses.append((zone_clause))
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
last_24 = (
|
|
|
|
ReviewSegment.select(
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "alert"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_alert"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "detection"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_detection"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "significant_motion"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_motion"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "alert"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_alert"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "detection"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_detection"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "significant_motion"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_motion"),
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.dicts()
|
|
|
|
.get()
|
|
|
|
)
|
|
|
|
|
2024-03-05 13:02:34 +01:00
|
|
|
clauses = [(ReviewSegment.start_time > month_ago)]
|
|
|
|
|
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((ReviewSegment.camera << camera_list))
|
|
|
|
|
|
|
|
if labels != "all":
|
|
|
|
# use matching so segments with multiple labels
|
|
|
|
# still match on a search where any label matches
|
|
|
|
label_clauses = []
|
|
|
|
filtered_labels = labels.split(",")
|
|
|
|
|
|
|
|
for label in filtered_labels:
|
|
|
|
label_clauses.append(
|
|
|
|
(ReviewSegment.data["objects"].cast("text") % f'*"{label}"*')
|
|
|
|
)
|
|
|
|
|
|
|
|
label_clause = reduce(operator.or_, label_clauses)
|
|
|
|
clauses.append((label_clause))
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
last_month = (
|
2024-03-04 01:19:02 +01:00
|
|
|
ReviewSegment.select(
|
|
|
|
fn.strftime(
|
|
|
|
"%Y-%m-%d",
|
|
|
|
fn.datetime(
|
|
|
|
ReviewSegment.start_time,
|
|
|
|
"unixepoch",
|
|
|
|
hour_modifier,
|
|
|
|
minute_modifier,
|
|
|
|
),
|
|
|
|
).alias("day"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "alert"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_alert"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "detection"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_detection"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "significant_motion"),
|
|
|
|
ReviewSegment.has_been_reviewed,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("reviewed_motion"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "alert"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_alert"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "detection"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_detection"),
|
|
|
|
fn.SUM(
|
|
|
|
Case(
|
|
|
|
None,
|
|
|
|
[
|
|
|
|
(
|
|
|
|
(ReviewSegment.severity == "significant_motion"),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
).alias("total_motion"),
|
|
|
|
)
|
2024-03-05 13:02:34 +01:00
|
|
|
.where(reduce(operator.and_, clauses))
|
2024-03-04 01:19:02 +01:00
|
|
|
.group_by(
|
|
|
|
(ReviewSegment.start_time + seconds_offset).cast("int") / (3600 * 24),
|
|
|
|
)
|
|
|
|
.order_by(ReviewSegment.start_time.desc())
|
|
|
|
)
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
data = {
|
|
|
|
"last24Hours": last_24,
|
|
|
|
}
|
2024-03-04 01:19:02 +01:00
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
for e in last_month.dicts().iterator():
|
|
|
|
data[e["day"]] = e
|
2024-03-04 01:19:02 +01:00
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
return jsonify(data)
|
2024-03-02 23:10:37 +01:00
|
|
|
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
@ReviewBp.route("/reviews/viewed", methods=("POST",))
|
|
|
|
def set_multiple_reviewed():
|
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
list_of_ids = json.get("ids", "")
|
2024-03-02 23:10:37 +01:00
|
|
|
|
|
|
|
if not list_of_ids or len(list_of_ids) == 0:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
|
|
|
)
|
|
|
|
|
|
|
|
ReviewSegment.update(has_been_reviewed=True).where(
|
|
|
|
ReviewSegment.id << list_of_ids
|
|
|
|
).execute()
|
|
|
|
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": True, "message": "Reviewed multiple items"}), 200
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@ReviewBp.route("/review/<id>/viewed", methods=("DELETE",))
|
|
|
|
def set_not_reviewed(id):
|
|
|
|
try:
|
|
|
|
review: ReviewSegment = ReviewSegment.get(ReviewSegment.id == id)
|
|
|
|
except DoesNotExist:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Review " + id + " not found"}), 404
|
|
|
|
)
|
|
|
|
|
|
|
|
review.has_been_reviewed = False
|
|
|
|
review.save()
|
|
|
|
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": True, "message": "Reviewed " + id + " not viewed"}), 200
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-03-27 14:22:28 +01:00
|
|
|
@ReviewBp.route("/reviews/delete", methods=("POST",))
|
|
|
|
def delete_reviews():
|
|
|
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
|
|
|
list_of_ids = json.get("ids", "")
|
2024-03-02 23:10:37 +01:00
|
|
|
|
|
|
|
if not list_of_ids or len(list_of_ids) == 0:
|
|
|
|
return make_response(
|
|
|
|
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
|
|
|
)
|
|
|
|
|
2024-03-27 14:22:28 +01:00
|
|
|
reviews = (
|
|
|
|
ReviewSegment.select(
|
|
|
|
ReviewSegment.camera,
|
|
|
|
ReviewSegment.start_time,
|
|
|
|
ReviewSegment.end_time,
|
|
|
|
)
|
|
|
|
.where(ReviewSegment.id << list_of_ids)
|
|
|
|
.dicts()
|
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
recording_ids = []
|
|
|
|
|
|
|
|
for review in reviews:
|
|
|
|
start_time = review["start_time"]
|
|
|
|
end_time = review["end_time"]
|
|
|
|
camera_name = review["camera"]
|
|
|
|
recordings = (
|
|
|
|
Recordings.select(Recordings.id, Recordings.path)
|
|
|
|
.where(
|
|
|
|
Recordings.start_time.between(start_time, end_time)
|
|
|
|
| Recordings.end_time.between(start_time, end_time)
|
|
|
|
| (
|
|
|
|
(start_time > Recordings.start_time)
|
|
|
|
& (end_time < Recordings.end_time)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.where(Recordings.camera == camera_name)
|
|
|
|
.dicts()
|
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
|
|
|
|
for recording in recordings:
|
|
|
|
Path(recording["path"]).unlink(missing_ok=True)
|
|
|
|
recording_ids.append(recording["id"])
|
|
|
|
|
|
|
|
# delete recordings and review segments
|
|
|
|
Recordings.delete().where(Recordings.id << recording_ids).execute()
|
2024-03-02 23:10:37 +01:00
|
|
|
ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute()
|
|
|
|
|
|
|
|
return make_response(jsonify({"success": True, "message": "Delete reviews"}), 200)
|
2024-03-05 20:55:44 +01:00
|
|
|
|
|
|
|
|
2024-03-09 15:08:06 +01:00
|
|
|
@ReviewBp.route("/review/activity/motion")
|
|
|
|
def motion_activity():
|
2024-03-05 20:55:44 +01:00
|
|
|
"""Get motion and audio activity."""
|
2024-03-06 01:39:37 +01:00
|
|
|
cameras = request.args.get("cameras", "all")
|
2024-03-05 20:55:44 +01:00
|
|
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
|
|
|
after = request.args.get(
|
|
|
|
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
|
|
|
)
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
clauses = [(Recordings.start_time > after) & (Recordings.end_time < before)]
|
2024-03-25 17:00:28 +01:00
|
|
|
clauses.append((Recordings.motion > 0))
|
2024-03-06 01:39:37 +01:00
|
|
|
|
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((Recordings.camera << camera_list))
|
2024-03-05 20:55:44 +01:00
|
|
|
|
2024-03-14 20:57:14 +01:00
|
|
|
data: list[Recordings] = (
|
2024-03-05 20:55:44 +01:00
|
|
|
Recordings.select(
|
2024-03-25 17:00:28 +01:00
|
|
|
Recordings.camera,
|
2024-03-05 20:55:44 +01:00
|
|
|
Recordings.start_time,
|
2024-03-15 20:13:40 +01:00
|
|
|
Recordings.motion,
|
2024-03-05 20:55:44 +01:00
|
|
|
)
|
2024-03-06 01:39:37 +01:00
|
|
|
.where(reduce(operator.and_, clauses))
|
2024-03-05 20:55:44 +01:00
|
|
|
.order_by(Recordings.start_time.asc())
|
2024-03-14 20:57:14 +01:00
|
|
|
.dicts()
|
2024-03-05 20:55:44 +01:00
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
|
2024-03-06 01:39:37 +01:00
|
|
|
# get scale in seconds
|
|
|
|
scale = request.args.get("scale", type=int, default=30)
|
|
|
|
|
2024-03-05 20:55:44 +01:00
|
|
|
# resample data using pandas to get activity on scaled basis
|
2024-03-25 17:00:28 +01:00
|
|
|
df = pd.DataFrame(data, columns=["start_time", "motion", "camera"])
|
2024-05-28 19:33:28 +02:00
|
|
|
|
|
|
|
if df.empty:
|
|
|
|
logger.warning("No motion data found for the requested time range")
|
|
|
|
return jsonify([])
|
|
|
|
|
2024-04-29 13:23:08 +02:00
|
|
|
df = df.astype(dtype={"motion": "float16"})
|
2024-03-05 20:55:44 +01:00
|
|
|
|
|
|
|
# set date as datetime index
|
|
|
|
df["start_time"] = pd.to_datetime(df["start_time"], unit="s")
|
|
|
|
df.set_index(["start_time"], inplace=True)
|
|
|
|
|
|
|
|
# normalize data
|
2024-03-25 17:00:28 +01:00
|
|
|
motion = (
|
|
|
|
df["motion"]
|
2024-04-11 22:54:09 +02:00
|
|
|
.resample(f"{scale}s")
|
2024-03-14 20:57:14 +01:00
|
|
|
.apply(lambda x: max(x, key=abs, default=0.0))
|
|
|
|
.fillna(0.0)
|
2024-03-25 17:00:28 +01:00
|
|
|
.to_frame()
|
2024-03-05 20:55:44 +01:00
|
|
|
)
|
2024-04-10 00:51:38 +02:00
|
|
|
cameras = df["camera"].resample(f"{scale}s").agg(lambda x: ",".join(set(x)))
|
2024-03-25 17:00:28 +01:00
|
|
|
df = motion.join(cameras)
|
2024-03-17 13:30:39 +01:00
|
|
|
|
|
|
|
length = df.shape[0]
|
|
|
|
chunk = int(60 * (60 / scale))
|
|
|
|
|
|
|
|
for i in range(0, length, chunk):
|
|
|
|
part = df.iloc[i : i + chunk]
|
|
|
|
df.iloc[i : i + chunk, 0] = (
|
|
|
|
(part["motion"] - part["motion"].min())
|
|
|
|
/ (part["motion"].max() - part["motion"].min())
|
|
|
|
* 100
|
2024-03-19 04:21:09 +01:00
|
|
|
).fillna(0.0)
|
2024-03-09 15:08:06 +01:00
|
|
|
|
|
|
|
# change types for output
|
|
|
|
df.index = df.index.astype(int) // (10**9)
|
|
|
|
normalized = df.reset_index().to_dict("records")
|
|
|
|
return jsonify(normalized)
|
|
|
|
|
|
|
|
|
|
|
|
@ReviewBp.route("/review/activity/audio")
|
|
|
|
def audio_activity():
|
|
|
|
"""Get motion and audio activity."""
|
|
|
|
cameras = request.args.get("cameras", "all")
|
|
|
|
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
|
|
|
after = request.args.get(
|
|
|
|
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
|
|
|
)
|
|
|
|
|
|
|
|
clauses = [(Recordings.start_time > after) & (Recordings.end_time < before)]
|
|
|
|
|
|
|
|
if cameras != "all":
|
|
|
|
camera_list = cameras.split(",")
|
|
|
|
clauses.append((Recordings.camera << camera_list))
|
|
|
|
|
|
|
|
all_recordings: list[Recordings] = (
|
|
|
|
Recordings.select(
|
|
|
|
Recordings.start_time,
|
|
|
|
Recordings.duration,
|
|
|
|
Recordings.objects,
|
|
|
|
Recordings.dBFS,
|
|
|
|
)
|
|
|
|
.where(reduce(operator.and_, clauses))
|
|
|
|
.order_by(Recordings.start_time.asc())
|
|
|
|
.iterator()
|
|
|
|
)
|
|
|
|
|
|
|
|
# format is: { timestamp: segment_start_ts, motion: [0-100], audio: [0 - -100] }
|
|
|
|
# periods where active objects / audio was detected will cause audio to be scaled down
|
|
|
|
data: list[dict[str, float]] = []
|
|
|
|
|
|
|
|
for rec in all_recordings:
|
|
|
|
data.append(
|
|
|
|
{
|
|
|
|
"start_time": rec.start_time,
|
|
|
|
"audio": rec.dBFS if rec.objects == 0 else 0,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
# get scale in seconds
|
|
|
|
scale = request.args.get("scale", type=int, default=30)
|
|
|
|
|
|
|
|
# resample data using pandas to get activity on scaled basis
|
|
|
|
df = pd.DataFrame(data, columns=["start_time", "audio"])
|
2024-04-29 13:23:08 +02:00
|
|
|
df = df.astype(dtype={"audio": "float16"})
|
2024-03-09 15:08:06 +01:00
|
|
|
|
|
|
|
# set date as datetime index
|
|
|
|
df["start_time"] = pd.to_datetime(df["start_time"], unit="s")
|
|
|
|
df.set_index(["start_time"], inplace=True)
|
|
|
|
|
|
|
|
# normalize data
|
|
|
|
df = df.resample(f"{scale}S").mean().fillna(0.0)
|
2024-03-05 20:55:44 +01:00
|
|
|
df["audio"] = (
|
|
|
|
(df["audio"] - df["audio"].max())
|
|
|
|
/ (df["audio"].min() - df["audio"].max())
|
|
|
|
* -100
|
|
|
|
)
|
|
|
|
|
|
|
|
# change types for output
|
|
|
|
df.index = df.index.astype(int) // (10**9)
|
|
|
|
normalized = df.reset_index().to_dict("records")
|
|
|
|
return jsonify(normalized)
|