Improve graph using pandas (#9234)

* Ensure viewport is always full screen

* Protect against hour with no cards and ensure data is consistent

* Reduce grouped up image refreshes

* Include current hour and fix scrubbing bugginess

* Scroll initially selected timeline in to view

* Expand timelne class type

* Use poster image for preview on video player instead of using separate image view

* Fix available streaming modes

* Incrase timing for grouping timline items

* Fix audio activity listener

* Fix player not switching views correctly

* Use player time to convert to timeline time

* Update sub labels for previous timeline items

* Show mini timeline bar for non selected items

* Rewrite desktop timeline to use separate dynamic video player component

* Extend improvements to mobile as well

* Improve time formatting

* Fix scroll

* Fix no preview case

* Mobile fixes

* Audio toggle fixes

* More fixes for mobile

* Improve scaling of graph motion activity

* Add keyboard shortcut hook and support shortcuts for playback page

* Fix sizing of dialog

* Improve height scaling of dialog

* simplify and fix layout system for timeline

* Fix timeilne items not working

* Implement basic Frigate+ submitting from timeline
This commit is contained in:
Nicolas Mowen 2024-01-31 05:29:18 -07:00 committed by Blake Blackshear
parent 9c4b69191b
commit af3f6dadcb
28 changed files with 1379 additions and 852 deletions

View File

@ -7,6 +7,7 @@ numpy == 1.23.*
onvif_zeep == 0.2.12 onvif_zeep == 0.2.12
opencv-python-headless == 4.7.0.* opencv-python-headless == 4.7.0.*
paho-mqtt == 1.6.* paho-mqtt == 1.6.*
pandas == 2.1.4
peewee == 3.17.* peewee == 3.17.*
peewee_migrate == 1.12.* peewee_migrate == 1.12.*
psutil == 5.9.* psutil == 5.9.*

View File

@ -16,6 +16,7 @@ from urllib.parse import unquote
import cv2 import cv2
import numpy as np import numpy as np
import pandas as pd
import pytz import pytz
import requests import requests
from flask import ( from flask import (
@ -390,6 +391,17 @@ def set_sub_label(id):
new_sub_label = json.get("subLabel") new_sub_label = json.get("subLabel")
new_score = json.get("subLabelScore") new_score = json.get("subLabelScore")
if new_sub_label is None:
return make_response(
jsonify(
{
"success": False,
"message": "A sub label must be supplied",
}
),
400,
)
if new_sub_label and len(new_sub_label) > 100: if new_sub_label and len(new_sub_label) > 100:
return make_response( return make_response(
jsonify( jsonify(
@ -415,6 +427,7 @@ def set_sub_label(id):
) )
if not event.end_time: if not event.end_time:
# update tracked object
tracked_obj: TrackedObject = ( tracked_obj: TrackedObject = (
current_app.detected_frames_processor.camera_states[ current_app.detected_frames_processor.camera_states[
event.camera event.camera
@ -424,6 +437,11 @@ def set_sub_label(id):
if tracked_obj: if tracked_obj:
tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score) tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score)
# update timeline items
Timeline.update(
data=Timeline.data.update({"sub_label": (new_sub_label, new_score)})
).where(Timeline.source_id == id).execute()
event.sub_label = new_sub_label event.sub_label = new_sub_label
if new_score: if new_score:
@ -739,41 +757,59 @@ def hourly_timeline_activity(camera_name: str):
# set initial start so data is representative of full hour # set initial start so data is representative of full hour
hours[int(key.timestamp())].append( hours[int(key.timestamp())].append(
{ [
"date": key.timestamp(), key.timestamp(),
"count": 0, 0,
"type": "motion", False,
} ]
) )
for recording in all_recordings: for recording in all_recordings:
if recording.start_time > check: if recording.start_time > check:
hours[int(key.timestamp())].append( hours[int(key.timestamp())].append(
{ [
"date": (key + timedelta(hours=1)).timestamp(), (key + timedelta(minutes=59, seconds=59)).timestamp(),
"count": 0, 0,
"type": "motion", False,
} ]
) )
key = key + timedelta(hours=1) key = key + timedelta(hours=1)
check = (key + timedelta(hours=1)).timestamp() check = (key + timedelta(hours=1)).timestamp()
hours[int(key.timestamp())].append( hours[int(key.timestamp())].append(
{ [
"date": key.timestamp(), key.timestamp(),
"count": 0, 0,
"type": "motion", False,
} ]
) )
data_type = "motion" if recording.objects == 0 else "objects" data_type = recording.objects > 0
count = recording.motion + recording.objects
hours[int(key.timestamp())].append( hours[int(key.timestamp())].append(
{ [
"date": recording.start_time + (recording.duration / 2), recording.start_time + (recording.duration / 2),
"count": recording.motion, 0 if count == 0 else np.log2(count),
"type": data_type, data_type,
} ]
) )
# resample data using pandas to get activity on minute to minute basis
for key, data in hours.items():
df = pd.DataFrame(data, columns=["date", "count", "hasObjects"])
# set date as datetime index
df["date"] = pd.to_datetime(df["date"], unit="s")
df.set_index(["date"], inplace=True)
# normalize data
df = df.resample("T").mean().fillna(0)
# change types for output
df.index = df.index.astype(int) // (10**9)
df["count"] = df["count"].astype(int)
df["hasObjects"] = df["hasObjects"].astype(bool)
hours[key] = df.reset_index().to_dict("records")
return jsonify(hours) return jsonify(hours)
@ -1840,6 +1876,7 @@ def recordings(camera_name):
Recordings.segment_size, Recordings.segment_size,
Recordings.motion, Recordings.motion,
Recordings.objects, Recordings.objects,
Recordings.duration,
) )
.where( .where(
Recordings.camera == camera_name, Recordings.camera == camera_name,

View File

@ -7,7 +7,6 @@ from multiprocessing import Queue
from multiprocessing.synchronize import Event as MpEvent from multiprocessing.synchronize import Event as MpEvent
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.const import ALL_ATTRIBUTE_LABELS
from frigate.events.maintainer import EventTypeEnum from frigate.events.maintainer import EventTypeEnum
from frigate.models import Timeline from frigate.models import Timeline
from frigate.util.builtin import to_relative_box from frigate.util.builtin import to_relative_box
@ -85,12 +84,13 @@ class TimelineProcessor(threading.Thread):
"""Handle object detection.""" """Handle object detection."""
save = False save = False
camera_config = self.config.cameras[camera] camera_config = self.config.cameras[camera]
event_id = event_data["id"]
timeline_entry = { timeline_entry = {
Timeline.timestamp: event_data["frame_time"], Timeline.timestamp: event_data["frame_time"],
Timeline.camera: camera, Timeline.camera: camera,
Timeline.source: "tracked_object", Timeline.source: "tracked_object",
Timeline.source_id: event_data["id"], Timeline.source_id: event_id,
Timeline.data: { Timeline.data: {
"box": to_relative_box( "box": to_relative_box(
camera_config.detect.width, camera_config.detect.width,
@ -107,6 +107,16 @@ class TimelineProcessor(threading.Thread):
"attribute": "", "attribute": "",
}, },
} }
# update sub labels for existing entries that haven't been added yet
if (
prev_event_data != None
and prev_event_data["sub_label"] != event_data["sub_label"]
and event_id in self.pre_event_cache.keys()
):
for e in self.pre_event_cache[event_id]:
e[Timeline.data]["sub_label"] = event_data["sub_label"]
if event_type == "start": if event_type == "start":
timeline_entry[Timeline.class_type] = "visible" timeline_entry[Timeline.class_type] = "visible"
save = True save = True
@ -129,13 +139,6 @@ class TimelineProcessor(threading.Thread):
event_data["attributes"].keys() event_data["attributes"].keys()
)[0] )[0]
save = True save = True
elif not prev_event_data.get("sub_label") and event_data.get("sub_label"):
sub_label = event_data["sub_label"][0]
if sub_label not in ALL_ATTRIBUTE_LABELS:
timeline_entry[Timeline.class_type] = "sub_label"
timeline_entry[Timeline.data]["sub_label"] = sub_label
save = True
elif event_type == "end": elif event_type == "end":
timeline_entry[Timeline.class_type] = "gone" timeline_entry[Timeline.class_type] = "gone"
save = True save = True

View File

@ -378,7 +378,7 @@ def auto_detect_hwaccel() -> str:
try: try:
cuda = False cuda = False
vaapi = False vaapi = False
resp = requests.get("http://192.168.50.106:1984/api/ffmpeg/hardware", timeout=3) resp = requests.get("http://127.0.0.1:1984/api/ffmpeg/hardware", timeout=3)
if resp.status_code == 200: if resp.status_code == 200:
data: dict[str, list[dict[str, str]]] = resp.json() data: dict[str, list[dict[str, str]]] = resp.json()

View File

@ -5,7 +5,7 @@ type TWrapperProps = {
}; };
const Wrapper = ({ children }: TWrapperProps) => { const Wrapper = ({ children }: TWrapperProps) => {
return <main className="flex flex-col max-h-screen">{children}</main>; return <main className="flex flex-col h-screen">{children}</main>;
}; };
export default Wrapper; export default Wrapper;

View File

@ -0,0 +1,191 @@
import { FrigateConfig } from "@/types/frigateConfig";
import { GraphDataPoint } from "@/types/graph";
import { formatUnixTimestampToDateTime } from "@/utils/dateUtil";
import useSWR from "swr";
import ActivityIndicator from "../ui/activity-indicator";
type TimelineBarProps = {
startTime: number;
graphData:
| {
objects: number[];
motion: GraphDataPoint[];
}
| undefined;
onClick?: () => void;
};
export default function TimelineBar({
startTime,
graphData,
onClick,
}: TimelineBarProps) {
const { data: config } = useSWR<FrigateConfig>("config");
if (!config) {
return <ActivityIndicator />;
}
return (
<div
className="my-1 p-1 w-full h-18 border rounded cursor-pointer hover:bg-secondary hover:bg-opacity-30"
onClick={onClick}
>
{graphData != undefined && (
<div className="relative w-full h-8 flex">
{getHourBlocks().map((idx) => {
return (
<div
key={idx}
className={`h-2 flex-auto ${
(graphData.motion.at(idx)?.y || 0) == 0
? ""
: graphData.objects.includes(idx)
? "bg-object"
: "bg-motion"
}`}
/>
);
})}
<div className="absolute left-0 top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:00" : "%I:00%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[8.3%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:05" : "%I:05%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[16.7%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:10" : "%I:10%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[25%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:15" : "%I:15%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[33.3%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:20" : "%I:20%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[41.7%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:25" : "%I:25%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[50%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:30" : "%I:30%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[58.3%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:35" : "%I:35%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[66.7%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:40" : "%I:40%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[75%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:45" : "%I:45%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[83.3%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:50" : "%I:50%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
<div className="absolute left-[91.7%] top-0 bottom-0 align-bottom border-l border-gray-500">
<div className="absolute ml-1 bottom-0 text-sm text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config?.ui.time_format == "24hour" ? "%H:55" : "%I:55%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
</div>
)}
<div className="text-gray-500">
{formatUnixTimestampToDateTime(startTime, {
strftime_fmt:
config.ui.time_format == "24hour" ? "%m/%d %H:%M" : "%m/%d %I:%M%P",
time_style: "medium",
date_style: "medium",
})}
</div>
</div>
);
}
function getHourBlocks() {
const arr = [];
for (let x = 0; x <= 59; x++) {
arr.push(x);
}
return arr;
}

View File

@ -1,11 +1,15 @@
import { useCallback, useEffect, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
import { AspectRatio } from "../ui/aspect-ratio"; import { AspectRatio } from "../ui/aspect-ratio";
import CameraImage from "./CameraImage"; import CameraImage from "./CameraImage";
import { LuEar } from "react-icons/lu"; import { LuEar } from "react-icons/lu";
import { CameraConfig } from "@/types/frigateConfig"; import { CameraConfig } from "@/types/frigateConfig";
import { TbUserScan } from "react-icons/tb"; import { TbUserScan } from "react-icons/tb";
import { MdLeakAdd } from "react-icons/md"; import { MdLeakAdd } from "react-icons/md";
import { useFrigateEvents, useMotionActivity } from "@/api/ws"; import {
useAudioActivity,
useFrigateEvents,
useMotionActivity,
} from "@/api/ws";
type DynamicCameraImageProps = { type DynamicCameraImageProps = {
camera: CameraConfig; camera: CameraConfig;
@ -21,10 +25,14 @@ export default function DynamicCameraImage({
}: DynamicCameraImageProps) { }: DynamicCameraImageProps) {
const [key, setKey] = useState(Date.now()); const [key, setKey] = useState(Date.now());
const [activeObjects, setActiveObjects] = useState<string[]>([]); const [activeObjects, setActiveObjects] = useState<string[]>([]);
const hasActiveObjects = useMemo(
() => activeObjects.length > 0,
[activeObjects]
);
const { payload: detectingMotion } = useMotionActivity(camera.name); const { payload: detectingMotion } = useMotionActivity(camera.name);
const { payload: event } = useFrigateEvents(); const { payload: event } = useFrigateEvents();
const { payload: audioRms } = useMotionActivity(camera.name); const { payload: audioRms } = useAudioActivity(camera.name);
useEffect(() => { useEffect(() => {
if (!event) { if (!event) {
@ -50,7 +58,6 @@ export default function DynamicCameraImage({
if (eventIndex == -1) { if (eventIndex == -1) {
const newActiveObjects = [...activeObjects, event.after.id]; const newActiveObjects = [...activeObjects, event.after.id];
setActiveObjects(newActiveObjects); setActiveObjects(newActiveObjects);
setKey(Date.now());
} }
} }
} }
@ -58,8 +65,9 @@ export default function DynamicCameraImage({
const handleLoad = useCallback(() => { const handleLoad = useCallback(() => {
const loadTime = Date.now() - key; const loadTime = Date.now() - key;
const loadInterval = const loadInterval = hasActiveObjects
activeObjects.length > 0 ? INTERVAL_ACTIVE_MS : INTERVAL_INACTIVE_MS; ? INTERVAL_ACTIVE_MS
: INTERVAL_INACTIVE_MS;
setTimeout( setTimeout(
() => { () => {
@ -67,7 +75,7 @@ export default function DynamicCameraImage({
}, },
loadTime > loadInterval ? 1 : loadInterval loadTime > loadInterval ? 1 : loadInterval
); );
}, [activeObjects, key]); }, [key]);
return ( return (
<AspectRatio <AspectRatio
@ -91,7 +99,7 @@ export default function DynamicCameraImage({
activeObjects.length > 0 ? "text-object" : "text-gray-600" activeObjects.length > 0 ? "text-object" : "text-gray-600"
}`} }`}
/> />
{camera.audio.enabled && ( {camera.audio.enabled_in_config && (
<LuEar <LuEar
className={`${ className={`${
parseInt(audioRms) >= camera.audio.min_volume parseInt(audioRms) >= camera.audio.min_volume

View File

@ -6,6 +6,20 @@ import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig"; import { FrigateConfig } from "@/types/frigateConfig";
import VideoPlayer from "../player/VideoPlayer"; import VideoPlayer from "../player/VideoPlayer";
import { Card } from "../ui/card"; import { Card } from "../ui/card";
import { useApiHost } from "@/api";
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "../ui/alert-dialog";
import { useCallback } from "react";
import axios from "axios";
type TimelineItemCardProps = { type TimelineItemCardProps = {
timeline: Timeline; timeline: Timeline;
@ -18,37 +32,55 @@ export default function TimelineItemCard({
onSelect, onSelect,
}: TimelineItemCardProps) { }: TimelineItemCardProps) {
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
const apiHost = useApiHost();
const onSubmitToPlus = useCallback(
async (falsePositive: boolean) => {
falsePositive
? await axios.put(`events/${timeline.source_id}/false_positive`)
: await axios.post(`events/${timeline.source_id}/plus`, {
include_annotation: 1,
});
},
[timeline]
);
return ( return (
<Card className="relative m-2 flex w-full h-32 cursor-pointer" onClick={onSelect}> <Card
<div className="w-1/2 p-2"> className="relative m-2 flex w-full h-20 xl:h-24 3xl:h-28 4xl:h-36 cursor-pointer"
{relevantPreview && ( onClick={onSelect}
<VideoPlayer >
options={{ <div className="w-32 xl:w-40 3xl:w-44 4xl:w-60 p-2">
preload: "auto", <VideoPlayer
height: "114", options={{
width: "202", preload: "auto",
autoplay: true, autoplay: true,
controls: false, controls: false,
fluid: false, aspectRatio: "16:9",
muted: true, muted: true,
loadingSpinner: false, loadingSpinner: false,
sources: [ poster: relevantPreview
{ ? ""
src: `${relevantPreview.src}`, : `${apiHost}api/preview/${timeline.camera}/${timeline.timestamp}/thumbnail.jpg`,
type: "video/mp4", sources: relevantPreview
}, ? [
], {
}} src: `${relevantPreview.src}`,
seekOptions={{}} type: "video/mp4",
onReady={(player) => { },
]
: [],
}}
seekOptions={{}}
onReady={(player) => {
if (relevantPreview) {
player.pause(); // autoplay + pause is required for iOS player.pause(); // autoplay + pause is required for iOS
player.currentTime(timeline.timestamp - relevantPreview.start); player.currentTime(timeline.timestamp - relevantPreview.start);
}} }
/> }}
)} />
</div> </div>
<div className="px-2 py-1 w-1/2"> <div className="py-1">
<div className="capitalize font-semibold text-sm"> <div className="capitalize font-semibold text-sm">
{getTimelineItemDescription(timeline)} {getTimelineItemDescription(timeline)}
</div> </div>
@ -60,16 +92,52 @@ export default function TimelineItemCard({
date_style: "medium", date_style: "medium",
})} })}
</div> </div>
<Button {timeline.source == "tracked_object" && (
className="absolute bottom-1 right-1" <AlertDialog>
size="sm" <AlertDialogTrigger asChild>
variant="secondary" <Button
> className="absolute bottom-1 right-1 hidden xl:flex"
<div className="w-8 h-8"> size="sm"
<Logo /> variant="secondary"
</div> >
+ <div className="w-8 h-8">
</Button> <Logo />
</div>
+
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Submit To Frigate+</AlertDialogTitle>
<AlertDialogDescription>
Objects in locations you want to avoid are not false
positives. Submitting them as false positives will confuse the
model.
</AlertDialogDescription>
</AlertDialogHeader>
<img
className="flex-grow-0"
src={`${apiHost}api/events/${timeline.source_id}/snapshot.jpg`}
alt={`${timeline.data.label}`}
/>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction
className="bg-success"
onClick={() => onSubmitToPlus(false)}
>
This is a {timeline.data.label}
</AlertDialogAction>
<AlertDialogAction
className="bg-danger"
onClick={() => onSubmitToPlus(true)}
>
This is not a {timeline.data.label}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
)}
</div> </div>
</Card> </Card>
); );

View File

@ -4,17 +4,34 @@ import Chart from "react-apexcharts";
type TimelineGraphProps = { type TimelineGraphProps = {
id: string; id: string;
data: GraphData[]; data: GraphData[];
start: number;
end: number;
objects: number[];
}; };
/** /**
* A graph meant to be overlaid on top of a timeline * A graph meant to be overlaid on top of a timeline
*/ */
export default function TimelineGraph({ id, data }: TimelineGraphProps) { export default function TimelineGraph({
id,
data,
start,
end,
objects,
}: TimelineGraphProps) {
return ( return (
<Chart <Chart
type="bar" type="bar"
options={{ options={{
colors: ["#991b1b", "#06b6d4", "#ea580c"], colors: [
({ dataPointIndex }: { dataPointIndex: number }) => {
if (objects.includes(dataPointIndex)) {
return "#06b6d4";
} else {
return "#991b1b";
}
},
],
chart: { chart: {
id: id, id: id,
selection: { selection: {
@ -30,11 +47,27 @@ export default function TimelineGraph({ id, data }: TimelineGraphProps) {
dataLabels: { enabled: false }, dataLabels: { enabled: false },
grid: { grid: {
show: false, show: false,
padding: {
bottom: 2,
top: -12,
left: -20,
right: 0,
},
}, },
legend: { legend: {
show: false, show: false,
position: "top", position: "top",
}, },
plotOptions: {
bar: {
columnWidth: "100%",
barHeight: "100%",
hideZeroBarsWhenGrouped: true,
},
},
stroke: {
width: 0,
},
tooltip: { tooltip: {
enabled: false, enabled: false,
}, },
@ -49,13 +82,16 @@ export default function TimelineGraph({ id, data }: TimelineGraphProps) {
labels: { labels: {
show: false, show: false,
}, },
min: start,
max: end,
}, },
yaxis: { yaxis: {
axisBorder: {
show: false,
},
labels: { labels: {
show: false, show: false,
}, },
logarithmic: true,
logBase: 10,
}, },
}} }}
series={data} series={data}

View File

@ -14,6 +14,10 @@ export default function TimelineEventOverlay({
timeline, timeline,
cameraConfig, cameraConfig,
}: TimelineEventOverlayProps) { }: TimelineEventOverlayProps) {
if (!timeline.data.box) {
return null;
}
const boxLeftEdge = Math.round(timeline.data.box[0] * 100); const boxLeftEdge = Math.round(timeline.data.box[0] * 100);
const boxTopEdge = Math.round(timeline.data.box[1] * 100); const boxTopEdge = Math.round(timeline.data.box[1] * 100);
const boxRightEdge = Math.round( const boxRightEdge = Math.round(
@ -25,6 +29,10 @@ export default function TimelineEventOverlay({
const [isHovering, setIsHovering] = useState<boolean>(false); const [isHovering, setIsHovering] = useState<boolean>(false);
const getHoverStyle = () => { const getHoverStyle = () => {
if (!timeline.data.box) {
return {};
}
if (boxLeftEdge < 15) { if (boxLeftEdge < 15) {
// show object stats on right side // show object stats on right side
return { return {
@ -40,12 +48,20 @@ export default function TimelineEventOverlay({
}; };
const getObjectArea = () => { const getObjectArea = () => {
if (!timeline.data.box) {
return 0;
}
const width = timeline.data.box[2] * cameraConfig.detect.width; const width = timeline.data.box[2] * cameraConfig.detect.width;
const height = timeline.data.box[3] * cameraConfig.detect.height; const height = timeline.data.box[3] * cameraConfig.detect.height;
return Math.round(width * height); return Math.round(width * height);
}; };
const getObjectRatio = () => { const getObjectRatio = () => {
if (!timeline.data.box) {
return 0.0;
}
const width = timeline.data.box[2] * cameraConfig.detect.width; const width = timeline.data.box[2] * cameraConfig.detect.width;
const height = timeline.data.box[3] * cameraConfig.detect.height; const height = timeline.data.box[3] * cameraConfig.detect.height;
return Math.round(100 * (width / height)) / 100; return Math.round(100 * (width / height)) / 100;

View File

@ -0,0 +1,411 @@
import {
MutableRefObject,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from "react";
import VideoPlayer from "./VideoPlayer";
import Player from "video.js/dist/types/player";
import TimelineEventOverlay from "../overlay/TimelineDataOverlay";
import { useApiHost } from "@/api";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import ActivityIndicator from "../ui/activity-indicator";
import useKeyboardListener from "@/hooks/use-keyboard-listener";
/**
* Dynamically switches between video playback and scrubbing preview player.
*/
type DynamicVideoPlayerProps = {
className?: string;
camera: string;
timeRange: { start: number; end: number };
cameraPreviews: Preview[];
onControllerReady?: (controller: DynamicVideoController) => void;
};
export default function DynamicVideoPlayer({
className,
camera,
timeRange,
cameraPreviews,
onControllerReady,
}: DynamicVideoPlayerProps) {
const apiHost = useApiHost();
const { data: config } = useSWR<FrigateConfig>("config");
const timezone = useMemo(
() =>
config?.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone,
[config]
);
// controlling playback
const playerRef = useRef<Player | undefined>(undefined);
const previewRef = useRef<Player | undefined>(undefined);
const [isScrubbing, setIsScrubbing] = useState(false);
const [hasPreview, setHasPreview] = useState(false);
const [focusedItem, setFocusedItem] = useState<Timeline | undefined>(
undefined
);
const controller = useMemo(() => {
if (!config) {
return undefined;
}
return new DynamicVideoController(
playerRef,
previewRef,
(config.cameras[camera]?.detect?.annotation_offset || 0) / 1000,
setIsScrubbing,
setFocusedItem
);
}, [config]);
// keyboard control
const onKeyboardShortcut = useCallback(
(key: string, down: boolean, repeat: boolean) => {
switch (key) {
case "ArrowLeft":
if (down) {
const currentTime = playerRef.current?.currentTime();
if (currentTime) {
playerRef.current?.currentTime(Math.max(0, currentTime - 5));
}
}
break;
case "ArrowRight":
if (down) {
const currentTime = playerRef.current?.currentTime();
if (currentTime) {
playerRef.current?.currentTime(currentTime + 5);
}
}
break;
case "m":
if (down && !repeat && playerRef.current) {
playerRef.current.muted(!playerRef.current.muted());
}
break;
case " ":
if (down && playerRef.current) {
if (playerRef.current.paused()) {
playerRef.current.play();
} else {
playerRef.current.pause();
}
}
break;
}
},
[playerRef]
);
useKeyboardListener(
["ArrowLeft", "ArrowRight", "m", " "],
onKeyboardShortcut
);
// initial state
const initialPlaybackSource = useMemo(() => {
const date = new Date(timeRange.start * 1000);
return {
src: `${apiHost}vod/${date.getFullYear()}-${
date.getMonth() + 1
}/${date.getDate()}/${date.getHours()}/${camera}/${timezone.replaceAll(
"/",
","
)}/master.m3u8`,
type: "application/vnd.apple.mpegurl",
};
}, []);
const initialPreviewSource = useMemo(() => {
const preview = cameraPreviews.find(
(preview) =>
Math.round(preview.start) >= timeRange.start &&
Math.floor(preview.end) <= timeRange.end
);
if (preview) {
setHasPreview(true);
return {
src: preview.src,
type: preview.type,
};
} else {
setHasPreview(false);
return undefined;
}
}, []);
// state of playback player
const recordingParams = useMemo(() => {
return {
before: timeRange.end,
after: timeRange.start,
};
}, [timeRange]);
const { data: recordings } = useSWR<Recording[]>(
[`${camera}/recordings`, recordingParams],
{ revalidateOnFocus: false }
);
useEffect(() => {
if (!controller || !recordings || recordings.length == 0) {
return;
}
const date = new Date(timeRange.start * 1000);
const playbackUri = `${apiHost}vod/${date.getFullYear()}-${
date.getMonth() + 1
}/${date.getDate()}/${date.getHours()}/${camera}/${timezone.replaceAll(
"/",
","
)}/master.m3u8`;
const preview = cameraPreviews.find(
(preview) =>
Math.round(preview.start) >= timeRange.start &&
Math.floor(preview.end) <= timeRange.end
);
setHasPreview(preview != undefined);
controller.newPlayback({
recordings,
playbackUri,
preview,
});
}, [controller, recordings]);
if (!controller) {
return <ActivityIndicator />;
}
return (
<div className={className}>
<div
className={`w-full relative ${
hasPreview && isScrubbing ? "hidden" : "visible"
}`}
>
<VideoPlayer
options={{
preload: "auto",
autoplay: true,
sources: [initialPlaybackSource],
controlBar: {
remainingTimeDisplay: false,
progressControl: {
seekBar: false,
},
},
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={(player) => {
playerRef.current = player;
player.on("playing", () => setFocusedItem(undefined));
player.on("timeupdate", () => {
controller.updateProgress(player.currentTime() || 0);
});
if (onControllerReady) {
onControllerReady(controller);
}
}}
onDispose={() => {
playerRef.current = undefined;
}}
>
{config && focusedItem && (
<TimelineEventOverlay
timeline={focusedItem}
cameraConfig={config.cameras[camera]}
/>
)}
</VideoPlayer>
</div>
<div
className={`w-full ${hasPreview && isScrubbing ? "visible" : "hidden"}`}
>
<VideoPlayer
options={{
preload: "auto",
autoplay: true,
controls: false,
muted: true,
loadingSpinner: false,
sources: hasPreview ? initialPreviewSource : null,
}}
seekOptions={{}}
onReady={(player) => {
previewRef.current = player;
player.pause();
player.on("seeked", () => controller.finishedSeeking());
}}
onDispose={() => {
previewRef.current = undefined;
}}
/>
</div>
</div>
);
}
export class DynamicVideoController {
// main state
private playerRef: MutableRefObject<Player | undefined>;
private previewRef: MutableRefObject<Player | undefined>;
private setScrubbing: (isScrubbing: boolean) => void;
private setFocusedItem: (timeline: Timeline) => void;
private playerMode: "playback" | "scrubbing" = "playback";
// playback
private recordings: Recording[] = [];
private onPlaybackTimestamp: ((time: number) => void) | undefined = undefined;
private annotationOffset: number;
private timeToStart: number | undefined = undefined;
// preview
private preview: Preview | undefined = undefined;
private timeToSeek: number | undefined = undefined;
private seeking = false;
constructor(
playerRef: MutableRefObject<Player | undefined>,
previewRef: MutableRefObject<Player | undefined>,
annotationOffset: number,
setScrubbing: (isScrubbing: boolean) => void,
setFocusedItem: (timeline: Timeline) => void
) {
this.playerRef = playerRef;
this.previewRef = previewRef;
this.annotationOffset = annotationOffset;
this.setScrubbing = setScrubbing;
this.setFocusedItem = setFocusedItem;
}
newPlayback(newPlayback: DynamicPlayback) {
this.recordings = newPlayback.recordings;
this.playerRef.current?.src({
src: newPlayback.playbackUri,
type: "application/vnd.apple.mpegurl",
});
if (this.timeToStart) {
this.seekToTimestamp(this.timeToStart);
this.timeToStart = undefined;
}
this.preview = newPlayback.preview;
if (this.preview && this.previewRef.current) {
this.previewRef.current.src({
src: this.preview.src,
type: this.preview.type,
});
}
}
seekToTimestamp(time: number, play: boolean = false) {
if (this.playerMode != "playback") {
this.playerMode = "playback";
this.setScrubbing(false);
this.timeToSeek = undefined;
this.seeking = false;
}
if (this.recordings.length == 0) {
this.timeToStart = time;
}
let seekSeconds = 0;
(this.recordings || []).every((segment) => {
// if the next segment is past the desired time, stop calculating
if (segment.start_time > time) {
return false;
}
if (segment.end_time < time) {
seekSeconds += segment.end_time - segment.start_time;
return true;
}
seekSeconds +=
segment.end_time - segment.start_time - (segment.end_time - time);
return true;
});
this.playerRef.current?.currentTime(seekSeconds);
if (play) {
this.playerRef.current?.play();
}
}
seekToTimelineItem(timeline: Timeline) {
this.playerRef.current?.pause();
this.seekToTimestamp(timeline.timestamp + this.annotationOffset);
this.setFocusedItem(timeline);
}
updateProgress(playerTime: number) {
if (this.onPlaybackTimestamp) {
// take a player time in seconds and convert to timestamp in timeline
let timestamp = 0;
let totalTime = 0;
(this.recordings || []).every((segment) => {
if (totalTime + segment.duration > playerTime) {
// segment is here
timestamp = segment.start_time + (playerTime - totalTime);
return false;
} else {
totalTime += segment.duration;
return true;
}
});
this.onPlaybackTimestamp(timestamp);
}
}
onPlayerTimeUpdate(listener: (timestamp: number) => void) {
this.onPlaybackTimestamp = listener;
}
scrubToTimestamp(time: number) {
if (this.playerMode != "scrubbing") {
this.playerMode = "scrubbing";
this.playerRef.current?.pause();
this.setScrubbing(true);
}
if (this.preview) {
if (this.seeking) {
this.timeToSeek = time;
} else {
this.previewRef.current?.currentTime(time - this.preview.start);
this.seeking = true;
}
}
}
finishedSeeking() {
if (!this.preview || this.playerMode == "playback") {
return;
}
if (
this.timeToSeek &&
this.timeToSeek != this.previewRef.current?.currentTime()
) {
this.previewRef.current?.currentTime(
this.timeToSeek - this.preview.start
);
} else {
this.seeking = false;
}
}
}

View File

@ -1,6 +1,4 @@
import { FrigateConfig } from "@/types/frigateConfig";
import VideoPlayer from "./VideoPlayer"; import VideoPlayer from "./VideoPlayer";
import useSWR from "swr";
import React, { import React, {
useCallback, useCallback,
useEffect, useEffect,
@ -12,6 +10,7 @@ import { useApiHost } from "@/api";
import Player from "video.js/dist/types/player"; import Player from "video.js/dist/types/player";
import { AspectRatio } from "../ui/aspect-ratio"; import { AspectRatio } from "../ui/aspect-ratio";
import { LuPlayCircle } from "react-icons/lu"; import { LuPlayCircle } from "react-icons/lu";
import { isCurrentHour } from "@/utils/dateUtil";
type PreviewPlayerProps = { type PreviewPlayerProps = {
camera: string; camera: string;
@ -38,7 +37,6 @@ export default function PreviewThumbnailPlayer({
isMobile, isMobile,
onClick, onClick,
}: PreviewPlayerProps) { }: PreviewPlayerProps) {
const { data: config } = useSWR("config");
const playerRef = useRef<Player | null>(null); const playerRef = useRef<Player | null>(null);
const isSafari = useMemo(() => { const isSafari = useMemo(() => {
return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); return /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
@ -54,7 +52,10 @@ export default function PreviewThumbnailPlayer({
} }
if (!playerRef.current) { if (!playerRef.current) {
setIsInitiallyVisible(true); if (isHovered) {
setIsInitiallyVisible(true);
}
return; return;
} }
@ -105,6 +106,7 @@ export default function PreviewThumbnailPlayer({
{ {
threshold: 1.0, threshold: 1.0,
root: document.getElementById("pageRoot"), root: document.getElementById("pageRoot"),
rootMargin: "-10% 0px -25% 0px",
} }
); );
if (node) autoPlayObserver.current.observe(node); if (node) autoPlayObserver.current.observe(node);
@ -131,7 +133,6 @@ export default function PreviewThumbnailPlayer({
isInitiallyVisible={isInitiallyVisible} isInitiallyVisible={isInitiallyVisible}
startTs={startTs} startTs={startTs}
camera={camera} camera={camera}
config={config}
eventId={eventId} eventId={eventId}
isMobile={isMobile} isMobile={isMobile}
isSafari={isSafari} isSafari={isSafari}
@ -143,7 +144,6 @@ export default function PreviewThumbnailPlayer({
type PreviewContentProps = { type PreviewContentProps = {
playerRef: React.MutableRefObject<Player | null>; playerRef: React.MutableRefObject<Player | null>;
config: FrigateConfig;
camera: string; camera: string;
relevantPreview: Preview | undefined; relevantPreview: Preview | undefined;
eventId: string; eventId: string;
@ -156,7 +156,6 @@ type PreviewContentProps = {
}; };
function PreviewContent({ function PreviewContent({
playerRef, playerRef,
config,
camera, camera,
relevantPreview, relevantPreview,
eventId, eventId,
@ -195,22 +194,13 @@ function PreviewContent({
if (relevantPreview && !isVisible) { if (relevantPreview && !isVisible) {
return <div />; return <div />;
} else if (!relevantPreview) { } else if (!relevantPreview && !isCurrentHour(startTs)) {
if (isCurrentHour(startTs)) { return (
return ( <img
<img className="w-[160px]"
className={`${getPreviewWidth(camera, config)}`} src={`${apiHost}api/events/${eventId}/thumbnail.jpg`}
src={`${apiHost}api/preview/${camera}/${startTs}/thumbnail.jpg`} />
/> );
);
} else {
return (
<img
className="w-[160px]"
src={`${apiHost}api/events/${eventId}/thumbnail.jpg`}
/>
);
}
} else { } else {
return ( return (
<> <>
@ -223,17 +213,26 @@ function PreviewContent({
controls: false, controls: false,
muted: true, muted: true,
loadingSpinner: false, loadingSpinner: false,
sources: [ poster: relevantPreview
{ ? ""
src: `${relevantPreview.src}`, : `${apiHost}api/preview/${camera}/${startTs}/thumbnail.jpg`,
type: "video/mp4", sources: relevantPreview
}, ? [
], {
src: `${relevantPreview.src}`,
type: "video/mp4",
},
]
: [],
}} }}
seekOptions={{}} seekOptions={{}}
onReady={(player) => { onReady={(player) => {
playerRef.current = player; playerRef.current = player;
if (!relevantPreview) {
return;
}
if (!isInitiallyVisible) { if (!isInitiallyVisible) {
player.pause(); // autoplay + pause is required for iOS player.pause(); // autoplay + pause is required for iOS
} }
@ -249,28 +248,10 @@ function PreviewContent({
}} }}
/> />
</div> </div>
<LuPlayCircle className="absolute z-10 left-1 bottom-1 w-4 h-4 text-white text-opacity-60" /> {relevantPreview && (
<LuPlayCircle className="absolute z-10 left-1 bottom-1 w-4 h-4 text-white text-opacity-60" />
)}
</> </>
); );
} }
} }
function isCurrentHour(timestamp: number) {
const now = new Date();
now.setMinutes(0, 0, 0);
return timestamp > now.getTime() / 1000;
}
function getPreviewWidth(camera: string, config: FrigateConfig) {
const detect = config.cameras[camera].detect;
if (detect.width / detect.height < 1) {
return "w-1/2";
}
if (detect.width / detect.height < 16 / 9) {
return "w-2/3";
}
return "w-full";
}

View File

@ -9,6 +9,8 @@ import {
} from "vis-timeline"; } from "vis-timeline";
import type { DataGroup, DataItem, TimelineEvents } from "vis-timeline/types"; import type { DataGroup, DataItem, TimelineEvents } from "vis-timeline/types";
import "./scrubber.css"; import "./scrubber.css";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
export type TimelineEventsWithMissing = export type TimelineEventsWithMissing =
| TimelineEvents | TimelineEvents
@ -89,14 +91,13 @@ function ActivityScrubber({
options, options,
...eventHandlers ...eventHandlers
}: ActivityScrubberProps) { }: ActivityScrubberProps) {
const { data: config } = useSWR<FrigateConfig>("config");
const containerRef = useRef<HTMLDivElement>(null); const containerRef = useRef<HTMLDivElement>(null);
const timelineRef = useRef<{ timeline: VisTimeline | null }>({ const timelineRef = useRef<{ timeline: VisTimeline | null }>({
timeline: null, timeline: null,
}); });
const [currentTime, setCurrentTime] = useState(Date.now()); const [currentTime, setCurrentTime] = useState(Date.now());
const [_, setCustomTimes] = useState< const [_, setCustomTimes] = useState<{ id: IdType; time: DateType }[]>([]);
{ id: IdType; time: DateType }[]
>([]);
const defaultOptions: TimelineOptions = { const defaultOptions: TimelineOptions = {
width: "100%", width: "100%",
@ -110,8 +111,11 @@ function ActivityScrubber({
max: currentTime, max: currentTime,
format: { format: {
minorLabels: { minorLabels: {
minute: "h:mma", minute: config?.ui.time_format == "24hour" ? "HH:mm" : "hh:mma",
hour: "ha", },
majorLabels: {
minute:
config?.ui.time_format == "24hour" ? "MM/DD HH:mm" : "MM/DD hh:mma",
}, },
}, },
}; };
@ -139,8 +143,8 @@ function ActivityScrubber({
const timelineInstance = new VisTimeline( const timelineInstance = new VisTimeline(
divElement, divElement,
items as DataItem[], (items || []) as DataItem[],
groups as DataGroup[], (groups || []) as DataGroup[],
timelineOptions timelineOptions
); );

View File

@ -0,0 +1,43 @@
import { useCallback, useEffect } from "react";
export default function useKeyboardListener(
keys: string[],
listener: (key: string, down: boolean, repeat: boolean) => void
) {
const keyDownListener = useCallback(
(e: KeyboardEvent) => {
if (!e) {
return;
}
if (keys.includes(e.key)) {
e.preventDefault();
listener(e.key, true, e.repeat);
}
},
[listener]
);
const keyUpListener = useCallback(
(e: KeyboardEvent) => {
if (!e) {
return;
}
if (keys.includes(e.key)) {
e.preventDefault();
listener(e.key, false, false);
}
},
[listener]
);
useEffect(() => {
document.addEventListener("keydown", keyDownListener);
document.addEventListener("keyup", keyUpListener);
return () => {
document.removeEventListener("keydown", keyDownListener);
document.removeEventListener("keyup", keyUpListener);
};
}, [listener]);
}

View File

@ -122,7 +122,7 @@ function ConfigEditor() {
} }
return ( return (
<div className="absolute top-24 bottom-16 right-0 left-0 md:left-24 lg:left-40"> <div className="absolute top-28 bottom-16 right-0 left-0 md:left-24 lg:left-60">
<div className="lg:flex justify-between mr-1"> <div className="lg:flex justify-between mr-1">
<Heading as="h2">Config</Heading> <Heading as="h2">Config</Heading>
<div> <div>

View File

@ -159,7 +159,7 @@ function Camera({ camera }: { camera: CameraConfig }) {
onClick={(e) => { onClick={(e) => {
e.stopPropagation(); e.stopPropagation();
e.preventDefault(); e.preventDefault();
sendAudio(detectValue == "ON" ? "OFF" : "ON"); sendAudio(audioValue == "ON" ? "OFF" : "ON");
}} }}
> >
<LuEar /> <LuEar />

View File

@ -268,7 +268,7 @@ function TimelineViewer({
return ( return (
<Dialog open={playback != undefined} onOpenChange={(_) => onClose()}> <Dialog open={playback != undefined} onOpenChange={(_) => onClose()}>
<DialogContent className="md:max-w-2xl lg:max-w-4xl xl:max-w-6xl 2xl:max-w-7xl 3xl:max-w-[1720px]"> <DialogContent className="w-[70%] max-w-[1920px] h-[90%]">
{timelineData && playback && ( {timelineData && playback && (
<DesktopTimelineView <DesktopTimelineView
timelineData={timelineData} timelineData={timelineData}

View File

@ -48,7 +48,7 @@ function Live() {
const defaultLiveMode = useMemo(() => { const defaultLiveMode = useMemo(() => {
if (cameraConfig) { if (cameraConfig) {
if (restreamEnabled) { if (restreamEnabled) {
return cameraConfig.ui.live_mode; return cameraConfig.ui.live_mode || config?.ui.live_mode;
} }
return "jsmpeg"; return "jsmpeg";
@ -65,55 +65,75 @@ function Live() {
<div className=" w-full"> <div className=" w-full">
<div className="flex justify-between"> <div className="flex justify-between">
<Heading as="h2">Live</Heading> <Heading as="h2">Live</Heading>
<div> <div className="flex">
<DropdownMenu> <div className="mx-1">
<DropdownMenuTrigger asChild> <DropdownMenu>
<Button className="capitalize" variant="outline"> <DropdownMenuTrigger asChild>
{camera?.replaceAll("_", " ") || "Select A Camera"} <Button className="capitalize" variant="outline">
</Button> {camera?.replaceAll("_", " ") || "Select A Camera"}
</DropdownMenuTrigger> </Button>
<DropdownMenuContent> </DropdownMenuTrigger>
<DropdownMenuLabel>Select A Camera</DropdownMenuLabel> <DropdownMenuContent>
<DropdownMenuSeparator /> <DropdownMenuLabel>Select A Camera</DropdownMenuLabel>
<DropdownMenuRadioGroup value={camera} onValueChange={setCamera}> <DropdownMenuSeparator />
{sortedCameras.map((item) => ( <DropdownMenuRadioGroup
<DropdownMenuRadioItem value={camera}
className="capitalize" onValueChange={setCamera}
key={item.name} >
value={item.name} {config?.birdseye.enabled && (
> <DropdownMenuRadioItem value="birdseye">
{item.name.replaceAll("_", " ")} Birdseye
</DropdownMenuRadioItem>
)}
{sortedCameras.map((item) => (
<DropdownMenuRadioItem
className="capitalize"
key={item.name}
value={item.name}
>
{item.name.replaceAll("_", " ")}
</DropdownMenuRadioItem>
))}
</DropdownMenuRadioGroup>
</DropdownMenuContent>
</DropdownMenu>
</div>
<div className="mx-1">
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button className="capitalize" variant="outline">
{viewSource || defaultLiveMode || "Select A Live Mode"}
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent>
<DropdownMenuLabel>Select A Live Mode</DropdownMenuLabel>
<DropdownMenuSeparator />
<DropdownMenuRadioGroup
value={`${viewSource}`}
onValueChange={setViewSource}
>
{restreamEnabled && (
<DropdownMenuRadioItem value="webrtc">
Webrtc
</DropdownMenuRadioItem>
)}
{restreamEnabled && (
<DropdownMenuRadioItem value="mse">
MSE
</DropdownMenuRadioItem>
)}
<DropdownMenuRadioItem value="jsmpeg">
Jsmpeg
</DropdownMenuRadioItem> </DropdownMenuRadioItem>
))} {camera != "birdseye" && (
</DropdownMenuRadioGroup> <DropdownMenuRadioItem value="debug">
</DropdownMenuContent> Debug
</DropdownMenu> </DropdownMenuRadioItem>
<DropdownMenu> )}
<DropdownMenuTrigger asChild> </DropdownMenuRadioGroup>
<Button className="capitalize" variant="outline"> </DropdownMenuContent>
{viewSource || defaultLiveMode || "Select A Live Mode"} </DropdownMenu>
</Button> </div>
</DropdownMenuTrigger>
<DropdownMenuContent>
<DropdownMenuLabel>Select A Live Mode</DropdownMenuLabel>
<DropdownMenuSeparator />
<DropdownMenuRadioGroup
value={`${viewSource}`}
onValueChange={setViewSource}
>
<DropdownMenuRadioItem value="webrtc">
Webrtc
</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="mse">MSE</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="jsmpeg">
Jsmpeg
</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="debug">
Debug
</DropdownMenuRadioItem>
</DropdownMenuRadioGroup>
</DropdownMenuContent>
</DropdownMenu>
</div> </div>
</div> </div>
{config && camera == "birdseye" && sourceIsLoaded && ( {config && camera == "birdseye" && sourceIsLoaded && (

View File

@ -21,33 +21,6 @@ type Preview = {
end: number; end: number;
}; };
type Timeline = {
camera: string;
timestamp: number;
data: {
[key: string]: any;
};
class_type:
| "visible"
| "gone"
| "sub_label"
| "entered_zone"
| "attribute"
| "active"
| "stationary"
| "heard"
| "external";
source_id: string;
source: string;
};
type HourlyTimeline = {
start: number;
end: number;
count: number;
hours: { [key: string]: Timeline[] };
};
interface HistoryFilter extends FilterType { interface HistoryFilter extends FilterType {
cameras: string[]; cameras: string[];
labels: string[]; labels: string[];

View File

@ -0,0 +1,5 @@
type DynamicPlayback = {
recordings: Recording[];
playbackUri: string;
preview: Preview | undefined;
};

View File

@ -5,6 +5,7 @@ type Recording = {
end_time: number; end_time: number;
path: string; path: string;
segment_size: number; segment_size: number;
duration: number;
motion: number; motion: number;
objects: number; objects: number;
dBFS: number; dBFS: number;
@ -17,6 +18,7 @@ type RecordingSegment = {
motion: number; motion: number;
objects: number; objects: number;
segment_size: number; segment_size: number;
duration: number;
}; };
type RecordingActivity = { type RecordingActivity = {
@ -26,5 +28,5 @@ type RecordingActivity = {
type RecordingSegmentActivity = { type RecordingSegmentActivity = {
date: number; date: number;
count: number; count: number;
type: "motion" | "objects"; hasObjects: boolean;
}; };

31
web/src/types/timeline.ts Normal file
View File

@ -0,0 +1,31 @@
type Timeline = {
camera: string;
timestamp: number;
data: {
camera: string;
label: string;
sub_label: string;
box?: [number, number, number, number];
region: [number, number, number, number];
attribute: string;
zones: string[];
};
class_type:
| "visible"
| "gone"
| "entered_zone"
| "attribute"
| "active"
| "stationary"
| "heard"
| "external";
source_id: string;
source: string;
};
type HourlyTimeline = {
start: number;
end: number;
count: number;
hours: { [key: string]: Timeline[] };
};

View File

@ -282,6 +282,14 @@ export function getRangeForTimestamp(timestamp: number) {
date.setMinutes(0, 0, 0); date.setMinutes(0, 0, 0);
const start = date.getTime() / 1000; const start = date.getTime() / 1000;
date.setHours(date.getHours() + 1); date.setHours(date.getHours() + 1);
const end = date.getTime() / 1000;
// ensure not to go past current time
const end = Math.min(new Date().getTime() / 1000, date.getTime() / 1000);
return { start, end }; return { start, end };
} }
export function isCurrentHour(timestamp: number) {
const now = new Date();
now.setMinutes(0, 0, 0);
return timestamp > now.getTime() / 1000;
}

View File

@ -1,158 +1,178 @@
// group history cards by 60 seconds of activity // group history cards by 120 seconds of activity
const GROUP_SECONDS = 60; const GROUP_SECONDS = 120;
export function getHourlyTimelineData( export function getHourlyTimelineData(
timelinePages: HourlyTimeline[], timelinePages: HourlyTimeline[],
detailLevel: string detailLevel: string
): CardsData { ): CardsData {
const cards: CardsData = {}; const cards: CardsData = {};
const allHours: { [key: string]: Timeline[] } = {};
timelinePages.forEach((hourlyTimeline) => { timelinePages.forEach((hourlyTimeline) => {
Object.keys(hourlyTimeline["hours"]) Object.entries(hourlyTimeline.hours).forEach(([key, values]) => {
.reverse() if (key in allHours) {
.forEach((hour) => { // only occurs when multiple pages contain elements in the same hour
const day = new Date(parseInt(hour) * 1000); allHours[key] = allHours[key]
day.setHours(0, 0, 0, 0); .concat(values)
const dayKey = (day.getTime() / 1000).toString(); .sort((a, b) => a.timestamp - b.timestamp);
} else {
// build a map of course to the types that are included in this hour allHours[key] = values;
// which allows us to know what items to keep depending on detail level }
const source_to_types: { [key: string]: string[] } = {}; });
let cardTypeStart: { [camera: string]: number } = {};
Object.values(hourlyTimeline["hours"][hour]).forEach((i) => {
if (i.timestamp > (cardTypeStart[i.camera] ?? 0) + GROUP_SECONDS) {
cardTypeStart[i.camera] = i.timestamp;
}
const groupKey = `${i.source_id}-${cardTypeStart[i.camera]}`;
if (groupKey in source_to_types) {
source_to_types[groupKey].push(i.class_type);
} else {
source_to_types[groupKey] = [i.class_type];
}
});
if (!(dayKey in cards)) {
cards[dayKey] = {};
}
if (!(hour in cards[dayKey])) {
cards[dayKey][hour] = {};
}
let cardStart: { [camera: string]: number } = {};
Object.values(hourlyTimeline["hours"][hour]).forEach((i) => {
if (i.timestamp > (cardStart[i.camera] ?? 0) + GROUP_SECONDS) {
cardStart[i.camera] = i.timestamp;
}
const time = new Date(i.timestamp * 1000);
const groupKey = `${i.camera}-${cardStart[i.camera]}`;
const sourceKey = `${i.source_id}-${cardStart[i.camera]}`;
const uniqueKey = `${i.source_id}-${i.class_type}`;
// detail level for saving items
// detail level determines which timeline items for each moment is returned
// values can be normal, extra, or full
// normal: return all items except active / attribute / gone / stationary / visible unless that is the only item.
// extra: return all items except attribute / gone / visible unless that is the only item
// full: return all items
let add = true;
if (detailLevel == "normal") {
if (
source_to_types[sourceKey].length > 1 &&
["active", "attribute", "gone", "stationary", "visible"].includes(
i.class_type
)
) {
add = false;
}
} else if (detailLevel == "extra") {
if (
source_to_types[sourceKey].length > 1 &&
i.class_type in ["attribute", "gone", "visible"]
) {
add = false;
}
}
if (add) {
if (groupKey in cards[dayKey][hour]) {
if (
!cards[dayKey][hour][groupKey].uniqueKeys.includes(uniqueKey) ||
detailLevel == "full"
) {
cards[dayKey][hour][groupKey].entries.push(i);
cards[dayKey][hour][groupKey].uniqueKeys.push(uniqueKey);
}
} else {
cards[dayKey][hour][groupKey] = {
camera: i.camera,
time: time.getTime() / 1000,
entries: [i],
uniqueKeys: [uniqueKey],
};
}
}
});
});
}); });
Object.keys(allHours)
.sort((a, b) => a.localeCompare(b))
.reverse()
.forEach((hour) => {
const day = new Date(parseInt(hour) * 1000);
day.setHours(0, 0, 0, 0);
const dayKey = (day.getTime() / 1000).toString();
// build a map of course to the types that are included in this hour
// which allows us to know what items to keep depending on detail level
const sourceToTypes: { [key: string]: string[] } = {};
let cardTypeStart: { [camera: string]: number } = {};
Object.values(allHours[hour]).forEach((i) => {
if (i.timestamp > (cardTypeStart[i.camera] ?? 0) + GROUP_SECONDS) {
cardTypeStart[i.camera] = i.timestamp;
}
const groupKey = `${i.source_id}-${cardTypeStart[i.camera]}`;
if (groupKey in sourceToTypes) {
sourceToTypes[groupKey].push(i.class_type);
} else {
sourceToTypes[groupKey] = [i.class_type];
}
});
if (!(dayKey in cards)) {
cards[dayKey] = {};
}
if (!(hour in cards[dayKey])) {
cards[dayKey][hour] = {};
}
let cardStart: { [camera: string]: number } = {};
Object.values(allHours[hour]).forEach((i) => {
if (i.timestamp > (cardStart[i.camera] ?? 0) + GROUP_SECONDS) {
cardStart[i.camera] = i.timestamp;
}
const time = new Date(i.timestamp * 1000);
const groupKey = `${i.camera}-${cardStart[i.camera]}`;
const sourceKey = `${i.source_id}-${cardStart[i.camera]}`;
const uniqueKey = `${i.source_id}-${i.class_type}`;
// detail level for saving items
// detail level determines which timeline items for each moment is returned
// values can be normal, extra, or full
// normal: return all items except active / attribute / gone / stationary / visible unless that is the only item.
// extra: return all items except attribute / gone / visible unless that is the only item
// full: return all items
let add = true;
const sourceType = sourceToTypes[sourceKey];
let hiddenItems: string[] = [];
if (detailLevel == "normal") {
hiddenItems = [
"active",
"attribute",
"gone",
"stationary",
"visible",
];
} else if (detailLevel == "extra") {
hiddenItems = ["attribute", "gone", "visible"];
}
if (sourceType.length > 1) {
// we have multiple timeline items for this card
if (
sourceType.find((type) => hiddenItems.includes(type) == false) ==
undefined
) {
// all of the attribute items for this card make it hidden, but we need to show one
if (sourceType.indexOf(i.class_type) != 0) {
add = false;
}
} else if (hiddenItems.includes(i.class_type)) {
add = false;
}
}
if (add) {
if (groupKey in cards[dayKey][hour]) {
if (
!cards[dayKey][hour][groupKey].uniqueKeys.includes(uniqueKey) ||
detailLevel == "full"
) {
cards[dayKey][hour][groupKey].entries.push(i);
cards[dayKey][hour][groupKey].uniqueKeys.push(uniqueKey);
}
} else {
cards[dayKey][hour][groupKey] = {
camera: i.camera,
time: time.getTime() / 1000,
entries: [i],
uniqueKeys: [uniqueKey],
};
}
}
});
});
return cards; return cards;
} }
export function getTimelineHoursForDay( export function getTimelineHoursForDay(
camera: string, camera: string,
cards: CardsData, cards: CardsData,
allPreviews: Preview[], cameraPreviews: Preview[],
timestamp: number timestamp: number
): HistoryTimeline { ): HistoryTimeline {
const now = new Date(); const endOfThisHour = new Date();
endOfThisHour.setHours(endOfThisHour.getHours() + 1, 0, 0, 0);
const data: TimelinePlayback[] = []; const data: TimelinePlayback[] = [];
const startDay = new Date(timestamp * 1000); const startDay = new Date(timestamp * 1000);
startDay.setHours(23, 59, 59, 999); startDay.setHours(23, 59, 59, 999);
const dayEnd = startDay.getTime() / 1000;
startDay.setHours(0, 0, 0, 0); startDay.setHours(0, 0, 0, 0);
const startTimestamp = startDay.getTime() / 1000; const startTimestamp = startDay.getTime() / 1000;
let start = startDay.getTime() / 1000; let start = startDay.getTime() / 1000;
let end = 0; let end = 0;
const relevantPreviews = allPreviews.filter((preview) => {
return (
preview.camera == camera &&
preview.start >= start &&
Math.floor(preview.end - 1) <= dayEnd
);
});
const dayIdx = Object.keys(cards).find((day) => { const dayIdx = Object.keys(cards).find((day) => {
if (parseInt(day) > start) { if (parseInt(day) < start) {
return false; return false;
} }
return true; return true;
}); });
if (dayIdx == undefined) { let day: {
return { start: 0, end: 0, playbackItems: [] }; [hour: string]: {
} [groupKey: string]: Card;
};
} = {};
const day = cards[dayIdx]; if (dayIdx != undefined) {
day = cards[dayIdx];
}
for (let i = 0; i < 24; i++) { for (let i = 0; i < 24; i++) {
startDay.setHours(startDay.getHours() + 1); startDay.setHours(startDay.getHours() + 1);
if (startDay > now) { if (startDay > endOfThisHour) {
break; break;
} }
end = startDay.getTime() / 1000; end = startDay.getTime() / 1000;
const hour = Object.values(day).find((cards) => { const hour = Object.values(day).find((cards) => {
if ( const card = Object.values(cards)[0];
Object.values(cards)[0].time < start || if (card == undefined || card.time < start || card.time > end) {
Object.values(cards)[0].time > end
) {
return false; return false;
} }
@ -167,7 +187,7 @@ export function getTimelineHoursForDay(
return []; return [];
}) })
: []; : [];
const relevantPreview = relevantPreviews.find( const relevantPreview = cameraPreviews.find(
(preview) => (preview) =>
Math.round(preview.start) >= start && Math.floor(preview.end) <= end Math.round(preview.start) >= start && Math.floor(preview.end) <= end
); );

View File

@ -42,15 +42,6 @@ export function getTimelineIcon(timelineItem: Timeline) {
default: default:
return <LuTruck className="w-4 mr-1" />; return <LuTruck className="w-4 mr-1" />;
} }
case "sub_label":
switch (timelineItem.data.label) {
case "person":
return <MdFaceUnlock className="w-4 mr-1" />;
case "car":
return <MdOutlinePictureInPictureAlt className="w-4 mr-1" />;
default:
return <LuCircleDot className="w-4 mr-1" />;
}
case "heard": case "heard":
return <LuEar className="w-4 mr-1" />; return <LuEar className="w-4 mr-1" />;
case "external": case "external":
@ -119,8 +110,6 @@ export function getTimelineItemDescription(timelineItem: Timeline) {
} }
return title; return title;
} }
case "sub_label":
return `${timelineItem.data.label} recognized as ${timelineItem.data.sub_label}`;
case "gone": case "gone":
return `${label} left`; return `${label} left`;
case "heard": case "heard":

View File

@ -1,17 +1,17 @@
import { useApiHost } from "@/api";
import TimelineEventOverlay from "@/components/overlay/TimelineDataOverlay";
import VideoPlayer from "@/components/player/VideoPlayer";
import ActivityScrubber from "@/components/scrubber/ActivityScrubber"; import ActivityScrubber from "@/components/scrubber/ActivityScrubber";
import ActivityIndicator from "@/components/ui/activity-indicator"; import ActivityIndicator from "@/components/ui/activity-indicator";
import { FrigateConfig } from "@/types/frigateConfig"; import { FrigateConfig } from "@/types/frigateConfig";
import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useEffect, useMemo, useRef, useState } from "react";
import useSWR from "swr"; import useSWR from "swr";
import Player from "video.js/dist/types/player";
import TimelineItemCard from "@/components/card/TimelineItemCard"; import TimelineItemCard from "@/components/card/TimelineItemCard";
import { getTimelineHoursForDay } from "@/utils/historyUtil"; import { getTimelineHoursForDay } from "@/utils/historyUtil";
import { GraphDataPoint } from "@/types/graph"; import { GraphDataPoint } from "@/types/graph";
import TimelineGraph from "@/components/graph/TimelineGraph"; import TimelineGraph from "@/components/graph/TimelineGraph";
import TimelineBar from "@/components/bar/TimelineBar";
import DynamicVideoPlayer, {
DynamicVideoController,
} from "@/components/player/DynamicVideoPlayer";
type DesktopTimelineViewProps = { type DesktopTimelineViewProps = {
timelineData: CardsData; timelineData: CardsData;
@ -24,7 +24,6 @@ export default function DesktopTimelineView({
allPreviews, allPreviews,
initialPlayback, initialPlayback,
}: DesktopTimelineViewProps) { }: DesktopTimelineViewProps) {
const apiHost = useApiHost();
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
const timezone = useMemo( const timezone = useMemo(
() => () =>
@ -32,137 +31,31 @@ export default function DesktopTimelineView({
[config] [config]
); );
const controllerRef = useRef<DynamicVideoController | undefined>(undefined);
const initialScrollRef = useRef<HTMLDivElement | null>(null);
const [selectedPlayback, setSelectedPlayback] = useState(initialPlayback); const [selectedPlayback, setSelectedPlayback] = useState(initialPlayback);
const [timelineTime, setTimelineTime] = useState(0);
const playerRef = useRef<Player | undefined>(undefined); // handle scrolling to initial timeline item
const previewRef = useRef<Player | undefined>(undefined);
const [scrubbing, setScrubbing] = useState(false);
const [focusedItem, setFocusedItem] = useState<Timeline | undefined>(
undefined
);
const [seeking, setSeeking] = useState(false);
const [timeToSeek, setTimeToSeek] = useState<number | undefined>(undefined);
const [timelineTime, setTimelineTime] = useState(
initialPlayback.timelineItems.length > 0
? initialPlayback.timelineItems[0].timestamp - initialPlayback.range.start
: 0
);
const annotationOffset = useMemo(() => {
if (!config) {
return 0;
}
return (
(config.cameras[initialPlayback.camera]?.detect?.annotation_offset || 0) /
1000
);
}, [config]);
const recordingParams = useMemo(() => {
return {
before: selectedPlayback.range.end,
after: selectedPlayback.range.start,
};
}, [selectedPlayback]);
const { data: recordings } = useSWR<Recording[]>(
selectedPlayback
? [`${selectedPlayback.camera}/recordings`, recordingParams]
: null,
{ revalidateOnFocus: false }
);
const playbackUri = useMemo(() => {
if (!selectedPlayback) {
return "";
}
const date = new Date(selectedPlayback.range.start * 1000);
return `${apiHost}vod/${date.getFullYear()}-${
date.getMonth() + 1
}/${date.getDate()}/${date.getHours()}/${
selectedPlayback.camera
}/${timezone.replaceAll("/", ",")}/master.m3u8`;
}, [selectedPlayback]);
const onSelectItem = useCallback(
(timeline: Timeline | undefined) => {
if (timeline) {
setFocusedItem(timeline);
const selected = timeline.timestamp;
playerRef.current?.pause();
let seekSeconds = 0;
(recordings || []).every((segment) => {
// if the next segment is past the desired time, stop calculating
if (segment.start_time > selected) {
return false;
}
if (segment.end_time < selected) {
seekSeconds += segment.end_time - segment.start_time;
return true;
}
seekSeconds +=
segment.end_time -
segment.start_time -
(segment.end_time - selected);
return true;
});
playerRef.current?.currentTime(seekSeconds);
} else {
setFocusedItem(undefined);
}
},
[annotationOffset, recordings, playerRef]
);
// handle seeking to next frame when seek is finished
useEffect(() => { useEffect(() => {
if (seeking) { if (initialScrollRef.current != null) {
return; initialScrollRef.current.scrollIntoView();
} }
}, [initialScrollRef]);
if (timeToSeek && timeToSeek != previewRef.current?.currentTime()) { const cameraPreviews = useMemo(() => {
setSeeking(true); return allPreviews.filter((preview) => {
previewRef.current?.currentTime(timeToSeek); return preview.camera == initialPlayback.camera;
}
}, [timeToSeek, seeking]);
// handle loading main / preview playback when selected hour changes
useEffect(() => {
if (!playerRef.current || !previewRef.current) {
return;
}
setTimelineTime(
selectedPlayback.timelineItems.length > 0
? selectedPlayback.timelineItems[0].timestamp
: selectedPlayback.range.start
);
playerRef.current.src({
src: playbackUri,
type: "application/vnd.apple.mpegurl",
}); });
}, []);
if (selectedPlayback.relevantPreview) {
previewRef.current.src({
src: selectedPlayback.relevantPreview.src,
type: selectedPlayback.relevantPreview.type,
});
}
}, [playerRef, previewRef, selectedPlayback]);
const timelineStack = useMemo( const timelineStack = useMemo(
() => () =>
getTimelineHoursForDay( getTimelineHoursForDay(
selectedPlayback.camera, selectedPlayback.camera,
timelineData, timelineData,
allPreviews, cameraPreviews,
selectedPlayback.range.start + 60 selectedPlayback.range.start + 60
), ),
[] []
@ -179,31 +72,29 @@ export default function DesktopTimelineView({
], ],
{ revalidateOnFocus: false } { revalidateOnFocus: false }
); );
const timelineGraphData = useMemo(() => { const timelineGraphData = useMemo(() => {
if (!activity) { if (!activity) {
return {}; return {};
} }
const graphData: { const graphData: {
[hour: string]: { objects: GraphDataPoint[]; motion: GraphDataPoint[] }; [hour: string]: { objects: number[]; motion: GraphDataPoint[] };
} = {}; } = {};
Object.entries(activity).forEach(([hour, data]) => { Object.entries(activity).forEach(([hour, data]) => {
const objects: GraphDataPoint[] = []; const objects: number[] = [];
const motion: GraphDataPoint[] = []; const motion: GraphDataPoint[] = [];
data.forEach((seg) => { data.forEach((seg, idx) => {
if (seg.type == "objects") { if (seg.hasObjects) {
objects.push({ objects.push(idx);
x: new Date(seg.date * 1000),
y: seg.count,
});
} else {
motion.push({
x: new Date(seg.date * 1000),
y: seg.count,
});
} }
motion.push({
x: new Date(seg.date * 1000),
y: seg.count,
});
}); });
graphData[hour] = { objects, motion }; graphData[hour] = { objects, motion };
@ -217,191 +108,133 @@ export default function DesktopTimelineView({
} }
return ( return (
<div className="w-full"> <div className="w-full flex flex-col">
<div className="flex"> <div className="flex max-h-[60%]">
<> <DynamicVideoPlayer
<div className="w-2/3 bg-black flex justify-center items-center"> className="w-2/3 bg-black flex justify-center items-center"
<div camera={initialPlayback.camera}
className={`w-full relative ${ timeRange={selectedPlayback.range}
selectedPlayback.relevantPreview != undefined && scrubbing cameraPreviews={cameraPreviews}
? "hidden" onControllerReady={(controller) => {
: "visible" controllerRef.current = controller;
}`} controllerRef.current.onPlayerTimeUpdate((timestamp: number) => {
> setTimelineTime(timestamp);
<VideoPlayer });
options={{
preload: "auto",
autoplay: true,
sources: [
{
src: playbackUri,
type: "application/vnd.apple.mpegurl",
},
],
controlBar: {
remainingTimeDisplay: false,
progressControl: {
seekBar: false,
},
},
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={(player) => {
playerRef.current = player;
if (selectedPlayback.timelineItems.length > 0) { if (initialPlayback.timelineItems.length > 0) {
player.currentTime( controllerRef.current?.seekToTimestamp(
selectedPlayback.timelineItems[0].timestamp - selectedPlayback.timelineItems[0].timestamp,
selectedPlayback.range.start true
); );
} else { }
player.currentTime(0); }}
} />
player.on("playing", () => onSelectItem(undefined)); <div className="px-2 h-full w-1/3 overflow-y-auto overflow-x-hidden">
player.on("timeupdate", () => {
setTimelineTime(Math.floor(player.currentTime() || 0));
});
}}
onDispose={() => {
playerRef.current = undefined;
}}
>
{focusedItem && (
<TimelineEventOverlay
timeline={focusedItem}
cameraConfig={config.cameras[selectedPlayback.camera]}
/>
)}
</VideoPlayer>
</div>
{selectedPlayback.relevantPreview && (
<div className={`w-full ${scrubbing ? "visible" : "hidden"}`}>
<VideoPlayer
options={{
preload: "auto",
autoplay: false,
controls: false,
muted: true,
loadingSpinner: false,
sources: [
{
src: `${selectedPlayback.relevantPreview?.src}`,
type: "video/mp4",
},
],
}}
seekOptions={{}}
onReady={(player) => {
previewRef.current = player;
player.on("seeked", () => setSeeking(false));
}}
onDispose={() => {
previewRef.current = undefined;
}}
/>
</div>
)}
</div>
</>
<div className="px-2 h-[608px] overflow-auto">
{selectedPlayback.timelineItems.map((timeline) => { {selectedPlayback.timelineItems.map((timeline) => {
return ( return (
<TimelineItemCard <TimelineItemCard
key={timeline.timestamp} key={timeline.timestamp}
timeline={timeline} timeline={timeline}
relevantPreview={selectedPlayback.relevantPreview} relevantPreview={selectedPlayback.relevantPreview}
onSelect={() => onSelectItem(timeline)} onSelect={() => {
controllerRef.current?.seekToTimelineItem(timeline);
}}
/> />
); );
})} })}
</div> </div>
</div> </div>
<div className="m-1 max-h-72 2xl:max-h-80 3xl:max-h-96 overflow-auto"> <div className="mt-4 w-full h-full relative">
{timelineStack.playbackItems.map((timeline) => { <div className="absolute left-0 top-0 right-0 bottom-0 overflow-auto">
const isSelected = {timelineStack.playbackItems.map((timeline) => {
timeline.range.start == selectedPlayback.range.start; const isInitiallySelected =
const graphData = timelineGraphData[timeline.range.start]; initialPlayback.range.start == timeline.range.start;
const isSelected =
timeline.range.start == selectedPlayback.range.start;
const graphData = timelineGraphData[timeline.range.start];
return ( return (
<div <div
key={timeline.range.start} ref={isInitiallySelected ? initialScrollRef : null}
className={`relative p-2 ${ key={timeline.range.start}
isSelected ? "bg-secondary bg-opacity-30 rounded-md" : "" >
}`} {isSelected ? (
> <div className="p-2 relative bg-secondary bg-opacity-30 rounded-md">
<ActivityScrubber <ActivityScrubber
items={[]} timeBars={
timeBars={ isSelected
isSelected && selectedPlayback.relevantPreview ? [
? [ {
{ time: new Date(
time: new Date( Math.max(timeline.range.start, timelineTime) *
(timeline.range.start + timelineTime) * 1000 1000
), ),
id: "playback", id: "playback",
}, },
] ]
: [] : []
} }
options={{ options={{
snap: null, snap: null,
min: new Date(timeline.range.start * 1000), min: new Date(timeline.range.start * 1000),
max: new Date(timeline.range.end * 1000), max: new Date(timeline.range.end * 1000),
zoomable: false, start: new Date(timeline.range.start * 1000),
}} end: new Date(timeline.range.end * 1000),
timechangeHandler={(data) => { zoomable: false,
if (!timeline.relevantPreview) { height: "120px",
return; }}
} timechangeHandler={(data) => {
controllerRef.current?.scrubToTimestamp(
data.time.getTime() / 1000
);
setTimelineTime(data.time.getTime() / 1000);
}}
timechangedHandler={(data) => {
controllerRef.current?.seekToTimestamp(
data.time.getTime() / 1000,
true
);
}}
/>
{isSelected && graphData && (
<div className="absolute left-2 right-2 top-0 h-[84px]">
<TimelineGraph
id={timeline.range.start.toString()}
data={[
{
name: "Motion",
data: graphData.motion,
},
]}
objects={graphData.objects}
start={graphData.motion[0].x.getTime()}
end={graphData.motion.at(-1)!!.x.getTime()}
/>
</div>
)}
</div>
) : (
<TimelineBar
startTime={timeline.range.start}
graphData={graphData}
onClick={() => {
setSelectedPlayback(timeline);
if (playerRef.current?.paused() == false) { let startTs;
setScrubbing(true); if (timeline.timelineItems.length > 0) {
playerRef.current?.pause(); startTs = selectedPlayback.timelineItems[0].timestamp;
} } else {
startTs = timeline.range.start;
}
const seekTimestamp = data.time.getTime() / 1000; controllerRef.current?.seekToTimestamp(startTs, true);
const seekTime = }}
seekTimestamp - timeline.relevantPreview.start;
setTimelineTime(seekTimestamp - timeline.range.start);
setTimeToSeek(Math.round(seekTime));
}}
timechangedHandler={(data) => {
const playbackTime = data.time.getTime() / 1000;
playerRef.current?.currentTime(
playbackTime - timeline.range.start
);
setScrubbing(false);
playerRef.current?.play();
}}
selectHandler={(data) => {
if (data.items.length > 0) {
const selected = data.items[0];
onSelectItem(
selectedPlayback.timelineItems.find(
(timeline) => timeline.timestamp == selected
)
);
}
}}
doubleClickHandler={() => setSelectedPlayback(timeline)}
/>
{isSelected && graphData && (
<div className="w-full absolute left-0 top-0 h-[84px]">
<TimelineGraph
id={timeline.range.start.toString()}
data={[
{
name: "Motion",
data: graphData.motion,
},
{ name: "Active Objects", data: graphData.objects },
]}
/> />
</div> )}
)} </div>
</div> );
); })}
})} </div>
</div> </div>
</div> </div>
); );

View File

@ -1,6 +1,3 @@
import { useApiHost } from "@/api";
import TimelineEventOverlay from "@/components/overlay/TimelineDataOverlay";
import VideoPlayer from "@/components/player/VideoPlayer";
import ActivityScrubber, { import ActivityScrubber, {
ScrubberItem, ScrubberItem,
} from "@/components/scrubber/ActivityScrubber"; } from "@/components/scrubber/ActivityScrubber";
@ -11,9 +8,11 @@ import {
getTimelineIcon, getTimelineIcon,
} from "@/utils/timelineUtil"; } from "@/utils/timelineUtil";
import { renderToStaticMarkup } from "react-dom/server"; import { renderToStaticMarkup } from "react-dom/server";
import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useMemo, useRef, useState } from "react";
import useSWR from "swr"; import useSWR from "swr";
import Player from "video.js/dist/types/player"; import DynamicVideoPlayer, {
DynamicVideoController,
} from "@/components/player/DynamicVideoPlayer";
type MobileTimelineViewProps = { type MobileTimelineViewProps = {
playback: TimelinePlayback; playback: TimelinePlayback;
@ -22,34 +21,9 @@ type MobileTimelineViewProps = {
export default function MobileTimelineView({ export default function MobileTimelineView({
playback, playback,
}: MobileTimelineViewProps) { }: MobileTimelineViewProps) {
const apiHost = useApiHost();
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
const timezone = useMemo(
() =>
config?.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone,
[config]
);
const playerRef = useRef<Player | undefined>(undefined); const controllerRef = useRef<DynamicVideoController | undefined>(undefined);
const previewRef = useRef<Player | undefined>(undefined);
const [scrubbing, setScrubbing] = useState(false);
const [focusedItem, setFocusedItem] = useState<Timeline | undefined>(
undefined
);
const [seeking, setSeeking] = useState(false);
const [timeToSeek, setTimeToSeek] = useState<number | undefined>(undefined);
const annotationOffset = useMemo(() => {
if (!config) {
return 0;
}
return (
(config.cameras[playback.camera]?.detect?.annotation_offset || 0) / 1000
);
}, [config]);
const [timelineTime, setTimelineTime] = useState( const [timelineTime, setTimelineTime] = useState(
playback.timelineItems.length > 0 playback.timelineItems.length > 0
@ -68,197 +42,69 @@ export default function MobileTimelineView({
{ revalidateOnFocus: false } { revalidateOnFocus: false }
); );
const playbackUri = useMemo(() => {
if (!playback) {
return "";
}
const date = new Date(playback.range.start * 1000);
return `${apiHost}vod/${date.getFullYear()}-${
date.getMonth() + 1
}/${date.getDate()}/${date.getHours()}/${
playback.camera
}/${timezone.replaceAll("/", ",")}/master.m3u8`;
}, [playback]);
const onSelectItem = useCallback(
(timeline: Timeline | undefined) => {
if (timeline) {
setFocusedItem(timeline);
const selected = timeline.timestamp;
playerRef.current?.pause();
let seekSeconds = 0;
(recordings || []).every((segment) => {
// if the next segment is past the desired time, stop calculating
if (segment.start_time > selected) {
return false;
}
if (segment.end_time < selected) {
seekSeconds += segment.end_time - segment.start_time;
return true;
}
seekSeconds +=
segment.end_time -
segment.start_time -
(segment.end_time - selected);
return true;
});
playerRef.current?.currentTime(seekSeconds);
} else {
setFocusedItem(undefined);
}
},
[annotationOffset, recordings, playerRef]
);
const onScrubTime = useCallback(
(data: { time: Date }) => {
if (!playback.relevantPreview) {
return;
}
if (playerRef.current?.paused() == false) {
setScrubbing(true);
playerRef.current?.pause();
}
const seekTimestamp = data.time.getTime() / 1000;
const seekTime = seekTimestamp - playback.relevantPreview.start;
setTimelineTime(seekTimestamp);
setTimeToSeek(Math.round(seekTime));
},
[scrubbing, playerRef, playback]
);
const onStopScrubbing = useCallback(
(data: { time: Date }) => {
const playbackTime = data.time.getTime() / 1000;
playerRef.current?.currentTime(playbackTime - playback.range.start);
setScrubbing(false);
playerRef.current?.play();
},
[playback, playerRef]
);
// handle seeking to next frame when seek is finished
useEffect(() => {
if (seeking) {
return;
}
if (timeToSeek && timeToSeek != previewRef.current?.currentTime()) {
setSeeking(true);
previewRef.current?.currentTime(timeToSeek);
}
}, [timeToSeek, seeking]);
if (!config || !recordings) { if (!config || !recordings) {
return <ActivityIndicator />; return <ActivityIndicator />;
} }
return ( return (
<div className="w-full"> <div className="w-full">
<> <DynamicVideoPlayer
<div camera={playback.camera}
className={`relative ${ timeRange={playback.range}
playback.relevantPreview && scrubbing ? "hidden" : "visible" cameraPreviews={
}`} playback.relevantPreview ? [playback.relevantPreview] : []
> }
<VideoPlayer onControllerReady={(controller) => {
options={{ controllerRef.current = controller;
preload: "auto", controllerRef.current.onPlayerTimeUpdate((timestamp: number) => {
autoplay: true, setTimelineTime(timestamp);
sources: [ });
{
src: playbackUri, if (playback.timelineItems.length > 0) {
type: "application/vnd.apple.mpegurl", controllerRef.current?.seekToTimestamp(
}, playback.timelineItems[0].timestamp,
], true
}} );
seekOptions={{ forward: 10, backward: 5 }} }
onReady={(player) => { }}
playerRef.current = player; />
player.currentTime(timelineTime - playback.range.start);
player.on("playing", () => {
onSelectItem(undefined);
});
}}
onDispose={() => {
playerRef.current = undefined;
}}
>
{config && focusedItem ? (
<TimelineEventOverlay
timeline={focusedItem}
cameraConfig={config.cameras[playback.camera]}
/>
) : undefined}
</VideoPlayer>
</div>
{playback.relevantPreview && (
<div className={`${scrubbing ? "visible" : "hidden"}`}>
<VideoPlayer
options={{
preload: "auto",
autoplay: true,
controls: false,
muted: true,
loadingSpinner: false,
sources: [
{
src: `${playback.relevantPreview?.src}`,
type: "video/mp4",
},
],
}}
seekOptions={{}}
onReady={(player) => {
previewRef.current = player;
player.pause();
player.on("seeked", () => setSeeking(false));
}}
onDispose={() => {
previewRef.current = undefined;
}}
/>
</div>
)}
</>
<div className="m-1"> <div className="m-1">
{playback != undefined && ( {playback != undefined && (
<ActivityScrubber <ActivityScrubber
items={timelineItemsToScrubber(playback.timelineItems)} items={timelineItemsToScrubber(playback.timelineItems)}
timeBars={ timeBars={[{ time: new Date(timelineTime * 1000), id: "playback" }]}
playback.relevantPreview
? [{ time: new Date(timelineTime * 1000), id: "playback" }]
: []
}
options={{ options={{
start: new Date( start: new Date(playback.range.start * 1000),
Math.max(playback.range.start, timelineTime - 300) * 1000 end: new Date(playback.range.end * 1000),
),
end: new Date(
Math.min(playback.range.end, timelineTime + 300) * 1000
),
snap: null, snap: null,
min: new Date(playback.range.start * 1000), min: new Date(playback.range.start * 1000),
max: new Date(playback.range.end * 1000), max: new Date(playback.range.end * 1000),
timeAxis: { scale: "minute", step: 5 }, timeAxis: { scale: "minute", step: 15 },
zoomable: false,
}}
timechangeHandler={(data) => {
controllerRef.current?.scrubToTimestamp(
data.time.getTime() / 1000
);
setTimelineTime(data.time.getTime() / 1000);
}}
timechangedHandler={(data) => {
controllerRef.current?.seekToTimestamp(
data.time.getTime() / 1000,
true
);
}} }}
timechangeHandler={onScrubTime}
timechangedHandler={onStopScrubbing}
selectHandler={(data) => { selectHandler={(data) => {
if (data.items.length > 0) { if (data.items.length > 0) {
const selected = parseFloat(data.items[0].split("-")[0]); const selected = parseFloat(data.items[0].split("-")[0]);
onSelectItem( const timeline = playback.timelineItems.find(
playback.timelineItems.find( (timeline) => timeline.timestamp == selected
(timeline) => timeline.timestamp == selected
)
); );
if (timeline) {
controllerRef.current?.seekToTimelineItem(timeline);
}
} }
}} }}
/> />

View File

@ -80,6 +80,7 @@ module.exports = {
"xs": "480px", "xs": "480px",
"2xl": "1440px", "2xl": "1440px",
"3xl": "1920px", "3xl": "1920px",
"4xl": "2560px",
}, },
}, },
}, },