2024-04-30 15:09:50 +02:00
|
|
|
import {
|
|
|
|
useFrigateEvents,
|
|
|
|
useInitialCameraState,
|
|
|
|
useMotionActivity,
|
|
|
|
} from "@/api/ws";
|
2024-05-01 02:35:23 +02:00
|
|
|
import { ATTRIBUTE_LABELS, CameraConfig } from "@/types/frigateConfig";
|
2024-03-25 17:19:55 +01:00
|
|
|
import { MotionData, ReviewSegment } from "@/types/review";
|
2024-05-01 16:07:56 +02:00
|
|
|
import { useCallback, useEffect, useMemo, useState } from "react";
|
2024-03-25 17:19:55 +01:00
|
|
|
import { useTimelineUtils } from "./use-timeline-utils";
|
2024-04-30 15:09:50 +02:00
|
|
|
import { ObjectType } from "@/types/ws";
|
|
|
|
import useDeepMemo from "./use-deep-memo";
|
2024-05-01 16:07:56 +02:00
|
|
|
import { isEqual } from "lodash";
|
2024-04-29 23:23:14 +02:00
|
|
|
|
2024-02-10 13:30:53 +01:00
|
|
|
type useCameraActivityReturn = {
|
|
|
|
activeTracking: boolean;
|
|
|
|
activeMotion: boolean;
|
2024-04-30 15:09:50 +02:00
|
|
|
objects: ObjectType[];
|
2024-02-10 13:30:53 +01:00
|
|
|
};
|
|
|
|
|
2024-03-23 20:49:31 +01:00
|
|
|
export function useCameraActivity(
|
2024-02-28 23:23:56 +01:00
|
|
|
camera: CameraConfig,
|
2024-05-14 15:38:03 +02:00
|
|
|
refreshOnStart: boolean = true,
|
2024-02-10 13:30:53 +01:00
|
|
|
): useCameraActivityReturn {
|
2024-04-30 15:09:50 +02:00
|
|
|
const [objects, setObjects] = useState<ObjectType[]>([]);
|
|
|
|
|
|
|
|
// init camera activity
|
|
|
|
|
2024-05-14 15:38:03 +02:00
|
|
|
const { payload: initialCameraState } = useInitialCameraState(
|
|
|
|
camera.name,
|
|
|
|
refreshOnStart,
|
|
|
|
);
|
2024-04-30 15:09:50 +02:00
|
|
|
|
|
|
|
const updatedCameraState = useDeepMemo(initialCameraState);
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
if (updatedCameraState) {
|
|
|
|
setObjects(updatedCameraState.objects);
|
|
|
|
}
|
2024-05-01 16:07:56 +02:00
|
|
|
}, [updatedCameraState, camera]);
|
2024-04-30 15:09:50 +02:00
|
|
|
|
|
|
|
// handle camera activity
|
|
|
|
|
2024-02-10 13:30:53 +01:00
|
|
|
const hasActiveObjects = useMemo(
|
2024-04-30 15:09:50 +02:00
|
|
|
() => objects.filter((obj) => !obj.stationary).length > 0,
|
|
|
|
[objects],
|
2024-02-10 13:30:53 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
const { payload: detectingMotion } = useMotionActivity(camera.name);
|
|
|
|
const { payload: event } = useFrigateEvents();
|
2024-04-30 15:09:50 +02:00
|
|
|
const updatedEvent = useDeepMemo(event);
|
2024-02-10 13:30:53 +01:00
|
|
|
|
2024-05-01 16:07:56 +02:00
|
|
|
const handleSetObjects = useCallback(
|
|
|
|
(newObjects: ObjectType[]) => {
|
|
|
|
if (!isEqual(objects, newObjects)) {
|
|
|
|
setObjects(newObjects);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
[objects],
|
|
|
|
);
|
|
|
|
|
2024-02-10 13:30:53 +01:00
|
|
|
useEffect(() => {
|
2024-04-30 15:09:50 +02:00
|
|
|
if (!updatedEvent) {
|
2024-02-10 13:30:53 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-05-01 16:07:56 +02:00
|
|
|
if (updatedEvent.after.camera !== camera.name) {
|
2024-02-10 13:30:53 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-04-30 15:09:50 +02:00
|
|
|
const updatedEventIndex = objects.findIndex(
|
|
|
|
(obj) => obj.id === updatedEvent.after.id,
|
2024-04-29 23:23:14 +02:00
|
|
|
);
|
2024-02-10 13:30:53 +01:00
|
|
|
|
2024-05-01 16:07:56 +02:00
|
|
|
let newObjects: ObjectType[] = [...objects];
|
|
|
|
|
|
|
|
if (updatedEvent.type === "end") {
|
|
|
|
if (updatedEventIndex !== -1) {
|
|
|
|
newObjects.splice(updatedEventIndex, 1);
|
2024-02-10 13:30:53 +01:00
|
|
|
}
|
|
|
|
} else {
|
2024-05-01 16:07:56 +02:00
|
|
|
if (updatedEventIndex === -1) {
|
2024-04-30 15:09:50 +02:00
|
|
|
// add unknown updatedEvent to list if not stationary
|
|
|
|
if (!updatedEvent.after.stationary) {
|
|
|
|
const newActiveObject: ObjectType = {
|
|
|
|
id: updatedEvent.after.id,
|
|
|
|
label: updatedEvent.after.label,
|
|
|
|
stationary: updatedEvent.after.stationary,
|
2024-05-01 16:07:56 +02:00
|
|
|
area: updatedEvent.after.area,
|
|
|
|
ratio: updatedEvent.after.ratio,
|
|
|
|
score: updatedEvent.after.score,
|
|
|
|
sub_label: updatedEvent.after.sub_label?.[0] ?? "",
|
2024-04-29 23:23:14 +02:00
|
|
|
};
|
2024-05-01 16:07:56 +02:00
|
|
|
newObjects = [...objects, newActiveObject];
|
2024-02-10 13:30:53 +01:00
|
|
|
}
|
2024-04-30 15:09:50 +02:00
|
|
|
} else {
|
|
|
|
const newObjects = [...objects];
|
2024-05-01 02:35:23 +02:00
|
|
|
|
|
|
|
let label = updatedEvent.after.label;
|
|
|
|
|
|
|
|
if (updatedEvent.after.sub_label) {
|
|
|
|
const sub_label = updatedEvent.after.sub_label[0];
|
|
|
|
|
|
|
|
if (ATTRIBUTE_LABELS.includes(sub_label)) {
|
|
|
|
label = sub_label;
|
|
|
|
} else {
|
|
|
|
label = `${label}-verified`;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
newObjects[updatedEventIndex].label = label;
|
2024-04-30 15:09:50 +02:00
|
|
|
newObjects[updatedEventIndex].stationary =
|
|
|
|
updatedEvent.after.stationary;
|
2024-02-10 13:30:53 +01:00
|
|
|
}
|
|
|
|
}
|
2024-05-01 16:07:56 +02:00
|
|
|
|
|
|
|
handleSetObjects(newObjects);
|
|
|
|
}, [camera, updatedEvent, objects, handleSetObjects]);
|
2024-02-10 13:30:53 +01:00
|
|
|
|
|
|
|
return {
|
|
|
|
activeTracking: hasActiveObjects,
|
2024-04-30 15:09:50 +02:00
|
|
|
activeMotion: detectingMotion
|
2024-05-01 16:07:56 +02:00
|
|
|
? detectingMotion === "ON"
|
|
|
|
: initialCameraState?.motion === true,
|
2024-04-30 15:09:50 +02:00
|
|
|
objects,
|
2024-02-10 13:30:53 +01:00
|
|
|
};
|
|
|
|
}
|
2024-03-25 17:19:55 +01:00
|
|
|
|
|
|
|
export function useCameraMotionNextTimestamp(
|
|
|
|
timeRangeSegmentEnd: number,
|
|
|
|
segmentDuration: number,
|
|
|
|
motionOnly: boolean,
|
|
|
|
reviewItems: ReviewSegment[],
|
|
|
|
motionData: MotionData[],
|
|
|
|
currentTime: number,
|
|
|
|
) {
|
|
|
|
const { alignStartDateToTimeline } = useTimelineUtils({
|
|
|
|
segmentDuration,
|
|
|
|
});
|
|
|
|
|
|
|
|
const noMotionRanges = useMemo(() => {
|
2024-03-30 16:51:03 +01:00
|
|
|
if (!motionData?.length || !reviewItems) {
|
2024-03-25 17:19:55 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!motionOnly) {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
|
|
|
const ranges = [];
|
|
|
|
let currentSegmentStart = null;
|
|
|
|
let currentSegmentEnd = null;
|
|
|
|
|
|
|
|
// align motion start to timeline start
|
|
|
|
const offset =
|
|
|
|
(motionData[0].start_time -
|
|
|
|
alignStartDateToTimeline(timeRangeSegmentEnd)) %
|
|
|
|
segmentDuration;
|
|
|
|
|
2024-03-26 22:36:28 +01:00
|
|
|
const startIndex = Math.abs(Math.floor(offset / 15));
|
2024-03-25 17:19:55 +01:00
|
|
|
|
|
|
|
for (
|
|
|
|
let i = startIndex;
|
|
|
|
i < motionData.length;
|
|
|
|
i = i + segmentDuration / 15
|
|
|
|
) {
|
|
|
|
const motionStart = motionData[i].start_time;
|
|
|
|
const motionEnd = motionStart + segmentDuration;
|
|
|
|
|
|
|
|
const segmentMotion = motionData
|
|
|
|
.slice(i, i + segmentDuration / 15)
|
|
|
|
.some(({ motion }) => motion !== undefined && motion > 0);
|
|
|
|
const overlappingReviewItems = reviewItems.some(
|
|
|
|
(item) =>
|
|
|
|
(item.start_time >= motionStart && item.start_time < motionEnd) ||
|
2024-04-11 14:42:16 +02:00
|
|
|
((item.end_time ?? Date.now() / 1000) > motionStart &&
|
|
|
|
(item.end_time ?? Date.now() / 1000) <= motionEnd) ||
|
|
|
|
(item.start_time <= motionStart &&
|
|
|
|
(item.end_time ?? Date.now() / 1000) >= motionEnd),
|
2024-03-25 17:19:55 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
if (!segmentMotion || overlappingReviewItems) {
|
|
|
|
if (currentSegmentStart === null) {
|
|
|
|
currentSegmentStart = motionStart;
|
|
|
|
}
|
|
|
|
currentSegmentEnd = motionEnd;
|
|
|
|
} else {
|
|
|
|
if (currentSegmentStart !== null) {
|
|
|
|
ranges.push([currentSegmentStart, currentSegmentEnd]);
|
|
|
|
currentSegmentStart = null;
|
|
|
|
currentSegmentEnd = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (currentSegmentStart !== null) {
|
|
|
|
ranges.push([currentSegmentStart, currentSegmentEnd]);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ranges;
|
|
|
|
}, [
|
|
|
|
motionData,
|
|
|
|
reviewItems,
|
|
|
|
motionOnly,
|
|
|
|
alignStartDateToTimeline,
|
|
|
|
segmentDuration,
|
|
|
|
timeRangeSegmentEnd,
|
|
|
|
]);
|
|
|
|
|
|
|
|
const nextTimestamp = useMemo(() => {
|
|
|
|
if (!noMotionRanges) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!motionOnly) {
|
|
|
|
return currentTime + 0.5;
|
|
|
|
}
|
|
|
|
|
|
|
|
let currentRange = 0;
|
|
|
|
let nextTimestamp = currentTime + 0.5;
|
|
|
|
|
|
|
|
while (currentRange < noMotionRanges.length) {
|
|
|
|
const [start, end] = noMotionRanges[currentRange];
|
|
|
|
|
|
|
|
if (start && end) {
|
|
|
|
// If the current time is before the start of the current range
|
|
|
|
if (currentTime < start) {
|
|
|
|
// The next timestamp is either the start of the current range or currentTime + 0.5, whichever is smaller
|
|
|
|
nextTimestamp = Math.min(start, nextTimestamp);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// If the current time is within the current range
|
|
|
|
else if (currentTime >= start && currentTime < end) {
|
|
|
|
// The next timestamp is the end of the current range
|
|
|
|
nextTimestamp = end;
|
|
|
|
currentRange++;
|
|
|
|
}
|
|
|
|
// If the current time is past the end of the current range
|
|
|
|
else {
|
|
|
|
currentRange++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nextTimestamp;
|
|
|
|
}, [currentTime, noMotionRanges, motionOnly]);
|
|
|
|
|
|
|
|
return nextTimestamp;
|
|
|
|
}
|