Implement support for no recordings indicator on timeline (#18363)

* Indicate no recordings on the history timeline with gray hash marks

This commit includes a new backend API endpoint and the frontend changes needed to support this functionality

* don't show slashes for now
This commit is contained in:
Josh Hawkins 2025-05-23 09:55:48 -05:00 committed by GitHub
parent 8a1da3a89f
commit 9392ffc300
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 121 additions and 3 deletions

View File

@ -1,7 +1,8 @@
from enum import Enum
from typing import Optional
from typing import Optional, Union
from pydantic import BaseModel
from pydantic.json_schema import SkipJsonSchema
class Extension(str, Enum):
@ -46,3 +47,10 @@ class MediaMjpegFeedQueryParams(BaseModel):
class MediaRecordingsSummaryQueryParams(BaseModel):
timezone: str = "utc"
cameras: Optional[str] = "all"
class MediaRecordingsAvailabilityQueryParams(BaseModel):
cameras: str = "all"
before: Union[float, SkipJsonSchema[None]] = None
after: Union[float, SkipJsonSchema[None]] = None
scale: int = 30

View File

@ -8,6 +8,7 @@ import os
import subprocess as sp
import time
from datetime import datetime, timedelta, timezone
from functools import reduce
from pathlib import Path as FilePath
from typing import Any
from urllib.parse import unquote
@ -19,7 +20,7 @@ from fastapi import APIRouter, Path, Query, Request, Response
from fastapi.params import Depends
from fastapi.responses import FileResponse, JSONResponse, StreamingResponse
from pathvalidate import sanitize_filename
from peewee import DoesNotExist, fn
from peewee import DoesNotExist, fn, operator
from tzlocal import get_localzone_name
from frigate.api.defs.query.media_query_parameters import (
@ -27,6 +28,7 @@ from frigate.api.defs.query.media_query_parameters import (
MediaEventsSnapshotQueryParams,
MediaLatestFrameQueryParams,
MediaMjpegFeedQueryParams,
MediaRecordingsAvailabilityQueryParams,
MediaRecordingsSummaryQueryParams,
)
from frigate.api.defs.tags import Tags
@ -542,6 +544,66 @@ def recordings(
return JSONResponse(content=list(recordings))
@router.get("/recordings/unavailable", response_model=list[dict])
def no_recordings(params: MediaRecordingsAvailabilityQueryParams = Depends()):
"""Get time ranges with no recordings."""
cameras = params.cameras
before = params.before or datetime.datetime.now().timestamp()
after = (
params.after
or (datetime.datetime.now() - datetime.timedelta(hours=1)).timestamp()
)
scale = params.scale
clauses = [(Recordings.start_time > after) & (Recordings.end_time < before)]
if cameras != "all":
camera_list = cameras.split(",")
clauses.append((Recordings.camera << camera_list))
# Get recording start times
data: list[Recordings] = (
Recordings.select(Recordings.start_time, Recordings.end_time)
.where(reduce(operator.and_, clauses))
.order_by(Recordings.start_time.asc())
.dicts()
.iterator()
)
# Convert recordings to list of (start, end) tuples
recordings = [(r["start_time"], r["end_time"]) for r in data]
# Generate all time segments
current = after
no_recording_segments = []
current_start = None
while current < before:
segment_end = current + scale
# Check if segment overlaps with any recording
has_recording = any(
start <= segment_end and end >= current for start, end in recordings
)
if not has_recording:
if current_start is None:
current_start = current # Start a new gap
else:
if current_start is not None:
# End the current gap and append it
no_recording_segments.append(
{"start_time": int(current_start), "end_time": int(current)}
)
current_start = None
current = segment_end
# Append the last gap if it exists
if current_start is not None:
no_recording_segments.append(
{"start_time": int(current_start), "end_time": int(before)}
)
return JSONResponse(content=no_recording_segments)
@router.get(
"/{camera_name}/start/{start_ts}/end/{end_ts}/clip.mp4",
description="For iOS devices, use the master.m3u8 HLS link instead of clip.mp4. Safari does not reliably process progressive mp4 files.",

View File

@ -17,6 +17,7 @@ import {
VirtualizedMotionSegments,
VirtualizedMotionSegmentsRef,
} from "./VirtualizedMotionSegments";
import { RecordingSegment } from "@/types/record";
export type MotionReviewTimelineProps = {
segmentDuration: number;
@ -38,6 +39,7 @@ export type MotionReviewTimelineProps = {
setExportEndTime?: React.Dispatch<React.SetStateAction<number>>;
events: ReviewSegment[];
motion_events: MotionData[];
noRecordingRanges?: RecordingSegment[];
contentRef: RefObject<HTMLDivElement>;
timelineRef?: RefObject<HTMLDivElement>;
onHandlebarDraggingChange?: (isDragging: boolean) => void;
@ -66,6 +68,7 @@ export function MotionReviewTimeline({
setExportEndTime,
events,
motion_events,
noRecordingRanges,
contentRef,
timelineRef,
onHandlebarDraggingChange,
@ -97,6 +100,17 @@ export function MotionReviewTimeline({
motion_events,
);
const getRecordingAvailability = useCallback(
(time: number): boolean | undefined => {
if (!noRecordingRanges?.length) return undefined;
return !noRecordingRanges.some(
(range) => time >= range.start_time && time < range.end_time,
);
},
[noRecordingRanges],
);
const segmentTimes = useMemo(() => {
const segments = [];
let segmentTime = timelineStartAligned;
@ -206,6 +220,7 @@ export function MotionReviewTimeline({
dense={dense}
motionOnly={motionOnly}
getMotionSegmentValue={getMotionSegmentValue}
getRecordingAvailability={getRecordingAvailability}
/>
</ReviewTimeline>
);

View File

@ -15,6 +15,7 @@ type MotionSegmentProps = {
timestampSpread: number;
firstHalfMotionValue: number;
secondHalfMotionValue: number;
hasRecording?: boolean;
motionOnly: boolean;
showMinimap: boolean;
minimapStartTime?: number;
@ -31,6 +32,7 @@ export function MotionSegment({
timestampSpread,
firstHalfMotionValue,
secondHalfMotionValue,
hasRecording,
motionOnly,
showMinimap,
minimapStartTime,
@ -176,6 +178,12 @@ export function MotionSegment({
segmentClasses,
severity[0] && "bg-gradient-to-r",
severity[0] && severityColorsBg[severity[0]],
// TODO: will update this for 0.17
false &&
hasRecording == false &&
firstHalfMotionValue == 0 &&
secondHalfMotionValue == 0 &&
"bg-slashes",
)}
onClick={segmentClick}
onTouchEnd={(event) => handleTouchStart(event, segmentClick)}

View File

@ -24,6 +24,7 @@ type VirtualizedMotionSegmentsProps = {
dense: boolean;
motionOnly: boolean;
getMotionSegmentValue: (timestamp: number) => number;
getRecordingAvailability: (timestamp: number) => boolean | undefined;
};
export interface VirtualizedMotionSegmentsRef {
@ -55,6 +56,7 @@ export const VirtualizedMotionSegments = forwardRef<
dense,
motionOnly,
getMotionSegmentValue,
getRecordingAvailability,
},
ref,
) => {
@ -154,6 +156,8 @@ export const VirtualizedMotionSegments = forwardRef<
(item.end_time ?? segmentTime) >= motionEnd),
);
const hasRecording = getRecordingAvailability(segmentTime);
if ((!segmentMotion || overlappingReviewItems) && motionOnly) {
return null; // Skip rendering this segment in motion only mode
}
@ -172,6 +176,7 @@ export const VirtualizedMotionSegments = forwardRef<
events={events}
firstHalfMotionValue={firstHalfMotionValue}
secondHalfMotionValue={secondHalfMotionValue}
hasRecording={hasRecording}
segmentDuration={segmentDuration}
segmentTime={segmentTime}
timestampSpread={timestampSpread}
@ -189,6 +194,7 @@ export const VirtualizedMotionSegments = forwardRef<
[
events,
getMotionSegmentValue,
getRecordingAvailability,
motionOnly,
segmentDuration,
showMinimap,

View File

@ -43,7 +43,11 @@ import Logo from "@/components/Logo";
import { Skeleton } from "@/components/ui/skeleton";
import { FaVideo } from "react-icons/fa";
import { VideoResolutionType } from "@/types/live";
import { ASPECT_VERTICAL_LAYOUT, ASPECT_WIDE_LAYOUT } from "@/types/record";
import {
ASPECT_VERTICAL_LAYOUT,
ASPECT_WIDE_LAYOUT,
RecordingSegment,
} from "@/types/record";
import { useResizeObserver } from "@/hooks/resize-observer";
import { cn } from "@/lib/utils";
import { useFullscreen } from "@/hooks/use-fullscreen";
@ -808,6 +812,16 @@ function Timeline({
},
]);
const { data: noRecordings } = useSWR<RecordingSegment[]>([
"recordings/unavailable",
{
before: timeRange.before,
after: timeRange.after,
scale: Math.round(zoomSettings.segmentDuration / 2),
cameras: mainCamera,
},
]);
const [exportStart, setExportStartTime] = useState<number>(0);
const [exportEnd, setExportEndTime] = useState<number>(0);
@ -853,6 +867,7 @@ function Timeline({
setHandlebarTime={setCurrentTime}
events={mainCameraReviewItems}
motion_events={motionData ?? []}
noRecordingRanges={noRecordings ?? []}
contentRef={contentRef}
onHandlebarDraggingChange={(scrubbing) => setScrubbing(scrubbing)}
isZooming={isZooming}

View File

@ -42,6 +42,10 @@ module.exports = {
wide: "32 / 9",
tall: "8 / 9",
},
backgroundImage: {
slashes:
"repeating-linear-gradient(45deg, hsl(var(--primary-variant) / 0.2), hsl(var(--primary-variant) / 0.2) 2px, transparent 2px, transparent 8px)",
},
colors: {
border: "hsl(var(--border))",
input: "hsl(var(--input))",