2024-02-10 13:30:53 +01:00
|
|
|
import {
|
|
|
|
useAudioActivity,
|
|
|
|
useFrigateEvents,
|
|
|
|
useMotionActivity,
|
|
|
|
} from "@/api/ws";
|
|
|
|
import { CameraConfig } from "@/types/frigateConfig";
|
2024-03-25 17:19:55 +01:00
|
|
|
import { MotionData, ReviewSegment } from "@/types/review";
|
2024-02-10 13:30:53 +01:00
|
|
|
import { useEffect, useMemo, useState } from "react";
|
2024-03-25 17:19:55 +01:00
|
|
|
import { useTimelineUtils } from "./use-timeline-utils";
|
2024-02-10 13:30:53 +01:00
|
|
|
|
|
|
|
type useCameraActivityReturn = {
|
|
|
|
activeTracking: boolean;
|
|
|
|
activeMotion: boolean;
|
|
|
|
activeAudio: boolean;
|
|
|
|
};
|
|
|
|
|
2024-03-23 20:49:31 +01:00
|
|
|
export function useCameraActivity(
|
2024-02-28 23:23:56 +01:00
|
|
|
camera: CameraConfig,
|
2024-02-10 13:30:53 +01:00
|
|
|
): useCameraActivityReturn {
|
|
|
|
const [activeObjects, setActiveObjects] = useState<string[]>([]);
|
|
|
|
const hasActiveObjects = useMemo(
|
|
|
|
() => activeObjects.length > 0,
|
2024-02-28 23:23:56 +01:00
|
|
|
[activeObjects],
|
2024-02-10 13:30:53 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
const { payload: detectingMotion } = useMotionActivity(camera.name);
|
|
|
|
const { payload: event } = useFrigateEvents();
|
|
|
|
const { payload: audioRms } = useAudioActivity(camera.name);
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
if (!event) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (event.after.camera != camera.name) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const eventIndex = activeObjects.indexOf(event.after.id);
|
|
|
|
|
|
|
|
if (event.type == "end") {
|
|
|
|
if (eventIndex != -1) {
|
|
|
|
const newActiveObjects = [...activeObjects];
|
|
|
|
newActiveObjects.splice(eventIndex, 1);
|
|
|
|
setActiveObjects(newActiveObjects);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (eventIndex == -1) {
|
|
|
|
// add unknown event to list if not stationary
|
|
|
|
if (!event.after.stationary) {
|
|
|
|
const newActiveObjects = [...activeObjects, event.after.id];
|
|
|
|
setActiveObjects(newActiveObjects);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// remove known event from list if it has become stationary
|
|
|
|
if (event.after.stationary) {
|
|
|
|
activeObjects.splice(eventIndex, 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-02-28 23:23:56 +01:00
|
|
|
}, [camera, event, activeObjects]);
|
2024-02-10 13:30:53 +01:00
|
|
|
|
|
|
|
return {
|
|
|
|
activeTracking: hasActiveObjects,
|
|
|
|
activeMotion: detectingMotion == "ON",
|
|
|
|
activeAudio: camera.audio.enabled_in_config
|
|
|
|
? audioRms >= camera.audio.min_volume
|
|
|
|
: false,
|
|
|
|
};
|
|
|
|
}
|
2024-03-25 17:19:55 +01:00
|
|
|
|
|
|
|
export function useCameraMotionNextTimestamp(
|
|
|
|
timeRangeSegmentEnd: number,
|
|
|
|
segmentDuration: number,
|
|
|
|
motionOnly: boolean,
|
|
|
|
reviewItems: ReviewSegment[],
|
|
|
|
motionData: MotionData[],
|
|
|
|
currentTime: number,
|
|
|
|
) {
|
|
|
|
const { alignStartDateToTimeline } = useTimelineUtils({
|
|
|
|
segmentDuration,
|
|
|
|
});
|
|
|
|
|
|
|
|
const noMotionRanges = useMemo(() => {
|
2024-03-30 16:51:03 +01:00
|
|
|
if (!motionData?.length || !reviewItems) {
|
2024-03-25 17:19:55 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!motionOnly) {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
|
|
|
const ranges = [];
|
|
|
|
let currentSegmentStart = null;
|
|
|
|
let currentSegmentEnd = null;
|
|
|
|
|
|
|
|
// align motion start to timeline start
|
|
|
|
const offset =
|
|
|
|
(motionData[0].start_time -
|
|
|
|
alignStartDateToTimeline(timeRangeSegmentEnd)) %
|
|
|
|
segmentDuration;
|
|
|
|
|
2024-03-26 22:36:28 +01:00
|
|
|
const startIndex = Math.abs(Math.floor(offset / 15));
|
2024-03-25 17:19:55 +01:00
|
|
|
|
|
|
|
for (
|
|
|
|
let i = startIndex;
|
|
|
|
i < motionData.length;
|
|
|
|
i = i + segmentDuration / 15
|
|
|
|
) {
|
|
|
|
const motionStart = motionData[i].start_time;
|
|
|
|
const motionEnd = motionStart + segmentDuration;
|
|
|
|
|
|
|
|
const segmentMotion = motionData
|
|
|
|
.slice(i, i + segmentDuration / 15)
|
|
|
|
.some(({ motion }) => motion !== undefined && motion > 0);
|
|
|
|
const overlappingReviewItems = reviewItems.some(
|
|
|
|
(item) =>
|
|
|
|
(item.start_time >= motionStart && item.start_time < motionEnd) ||
|
|
|
|
(item.end_time > motionStart && item.end_time <= motionEnd) ||
|
|
|
|
(item.start_time <= motionStart && item.end_time >= motionEnd),
|
|
|
|
);
|
|
|
|
|
|
|
|
if (!segmentMotion || overlappingReviewItems) {
|
|
|
|
if (currentSegmentStart === null) {
|
|
|
|
currentSegmentStart = motionStart;
|
|
|
|
}
|
|
|
|
currentSegmentEnd = motionEnd;
|
|
|
|
} else {
|
|
|
|
if (currentSegmentStart !== null) {
|
|
|
|
ranges.push([currentSegmentStart, currentSegmentEnd]);
|
|
|
|
currentSegmentStart = null;
|
|
|
|
currentSegmentEnd = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (currentSegmentStart !== null) {
|
|
|
|
ranges.push([currentSegmentStart, currentSegmentEnd]);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ranges;
|
|
|
|
}, [
|
|
|
|
motionData,
|
|
|
|
reviewItems,
|
|
|
|
motionOnly,
|
|
|
|
alignStartDateToTimeline,
|
|
|
|
segmentDuration,
|
|
|
|
timeRangeSegmentEnd,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const nextTimestamp = useMemo(() => {
|
|
|
|
if (!noMotionRanges) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!motionOnly) {
|
|
|
|
return currentTime + 0.5;
|
|
|
|
}
|
|
|
|
|
|
|
|
let currentRange = 0;
|
|
|
|
let nextTimestamp = currentTime + 0.5;
|
|
|
|
|
|
|
|
while (currentRange < noMotionRanges.length) {
|
|
|
|
const [start, end] = noMotionRanges[currentRange];
|
|
|
|
|
|
|
|
if (start && end) {
|
|
|
|
// If the current time is before the start of the current range
|
|
|
|
if (currentTime < start) {
|
|
|
|
// The next timestamp is either the start of the current range or currentTime + 0.5, whichever is smaller
|
|
|
|
nextTimestamp = Math.min(start, nextTimestamp);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// If the current time is within the current range
|
|
|
|
else if (currentTime >= start && currentTime < end) {
|
|
|
|
// The next timestamp is the end of the current range
|
|
|
|
nextTimestamp = end;
|
|
|
|
currentRange++;
|
|
|
|
}
|
|
|
|
// If the current time is past the end of the current range
|
|
|
|
else {
|
|
|
|
currentRange++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nextTimestamp;
|
|
|
|
}, [currentTime, noMotionRanges, motionOnly]);
|
|
|
|
|
|
|
|
return nextTimestamp;
|
|
|
|
}
|