Live player fixes and improvements (#12202)

* Live player fixes and improvements

* remove comment

* Simplify wording
This commit is contained in:
Josh Hawkins 2024-06-29 10:02:30 -05:00 committed by GitHub
parent 48a87b16b8
commit 53a2a865f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 267 additions and 85 deletions

View File

@ -7,13 +7,15 @@ Frigate intelligently displays your camera streams on the Live view dashboard. Y
## Live View technologies
Frigate intelligently uses three different streaming technologies to display your camera streams. The highest quality and fluency of the Live view requires the bundled `go2rtc` to be configured as shown in the [step by step guide](/guides/configuring_go2rtc).
Frigate intelligently uses three different streaming technologies to display your camera streams on the dashboard and the single camera view, switching between available modes based on network bandwidth, player errors, or required features like two-way talk. The highest quality and fluency of the Live view requires the bundled `go2rtc` to be configured as shown in the [step by step guide](/guides/configuring_go2rtc).
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | ------------------------------------------------ |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
The jsmpeg live view will use more browser and client GPU resources. Using go2rtc is highly recommended and will provide a superior experience.
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------ |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | 720p | no | no | resolution is configurable, but go2rtc is recommended if you want higher resolutions |
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
### Audio Support

View File

@ -26,13 +26,12 @@ export default function CameraImage({
const { name } = config ? config.cameras[camera] : "";
const enabled = config ? config.cameras[camera].enabled : "True";
const [isPortraitImage, setIsPortraitImage] = useState(false);
const [{ width: containerWidth, height: containerHeight }] =
useResizeObserver(containerRef);
const requestHeight = useMemo(() => {
if (!config || containerHeight == 0) {
if (!config || containerHeight == 0 || !hasLoaded) {
return 360;
}
@ -40,7 +39,14 @@ export default function CameraImage({
config.cameras[camera].detect.height,
Math.round(containerHeight * (isDesktop ? 1.1 : 1.25)),
);
}, [config, camera, containerHeight]);
}, [config, camera, containerHeight, hasLoaded]);
const isPortraitImage = useMemo(() => {
if (imgRef.current && containerWidth && containerHeight && hasLoaded) {
const { naturalHeight, naturalWidth } = imgRef.current;
return naturalWidth / naturalHeight < containerWidth / containerHeight;
}
}, [containerWidth, containerHeight, hasLoaded]);
useEffect(() => {
if (!config || !imgRef.current) {
@ -61,13 +67,6 @@ export default function CameraImage({
onLoad={() => {
setHasLoaded(true);
if (imgRef.current) {
const { naturalHeight, naturalWidth } = imgRef.current;
setIsPortraitImage(
naturalWidth / naturalHeight < containerWidth / containerHeight,
);
}
if (onload) {
onload();
}

View File

@ -12,7 +12,7 @@ type LivePlayerProps = {
birdseyeConfig: BirdseyeConfig;
liveMode: LivePlayerMode;
onClick?: () => void;
containerRef?: React.MutableRefObject<HTMLDivElement | null>;
containerRef: React.MutableRefObject<HTMLDivElement | null>;
};
export default function BirdseyeLivePlayer({
@ -54,6 +54,7 @@ export default function BirdseyeLivePlayer({
width={birdseyeConfig.width}
height={birdseyeConfig.height}
containerRef={containerRef}
playbackEnabled={true}
/>
);
} else {
@ -62,6 +63,7 @@ export default function BirdseyeLivePlayer({
return (
<div
ref={containerRef}
className={cn(
"relative flex w-full cursor-pointer justify-center",
className,

View File

@ -3,14 +3,15 @@ import { useResizeObserver } from "@/hooks/resize-observer";
import { cn } from "@/lib/utils";
// @ts-expect-error we know this doesn't have types
import JSMpeg from "@cycjimmy/jsmpeg-player";
import React, { useEffect, useMemo, useRef, useId, useState } from "react";
import React, { useEffect, useMemo, useRef, useState } from "react";
type JSMpegPlayerProps = {
className?: string;
camera: string;
width: number;
height: number;
containerRef?: React.MutableRefObject<HTMLDivElement | null>;
containerRef: React.MutableRefObject<HTMLDivElement | null>;
playbackEnabled: boolean;
onPlaying?: () => void;
};
@ -20,18 +21,21 @@ export default function JSMpegPlayer({
height,
className,
containerRef,
playbackEnabled,
onPlaying,
}: JSMpegPlayerProps) {
const url = `${baseUrl.replace(/^http/, "ws")}live/jsmpeg/${camera}`;
const playerRef = useRef<HTMLDivElement | null>(null);
const videoRef = useRef(null);
const videoRef = useRef<HTMLDivElement>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const internalContainerRef = useRef<HTMLDivElement | null>(null);
const onPlayingRef = useRef(onPlaying);
const [showCanvas, setShowCanvas] = useState(false);
const selectedContainerRef = useMemo(
() => containerRef ?? internalContainerRef,
[containerRef, internalContainerRef],
() => (containerRef.current ? containerRef : internalContainerRef),
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
[containerRef, containerRef.current, internalContainerRef],
);
const [{ width: containerWidth, height: containerHeight }] =
@ -83,39 +87,64 @@ export default function JSMpegPlayer({
}
}, [scaledHeight, aspectRatio]);
const uniqueId = useId();
useEffect(() => {
onPlayingRef.current = onPlaying;
}, [onPlaying]);
useEffect(() => {
if (!playerRef.current || videoRef.current) {
if (!selectedContainerRef?.current || !url) {
return;
}
videoRef.current = new JSMpeg.VideoElement(
playerRef.current,
url,
{ canvas: `#${CSS.escape(uniqueId)}` },
{
protocols: [],
audio: false,
videoBufferSize: 1024 * 1024 * 4,
onPlay: () => {
setShowCanvas(true);
onPlayingRef.current?.();
},
},
);
}, [url, uniqueId]);
const videoWrapper = videoRef.current;
const canvas = canvasRef.current;
let hasData = false;
let videoElement: JSMpeg.VideoElement | null = null;
if (videoWrapper && playbackEnabled) {
// Delayed init to avoid issues with react strict mode
const initPlayer = setTimeout(() => {
videoElement = new JSMpeg.VideoElement(
videoWrapper,
url,
{ canvas: canvas },
{
protocols: [],
audio: false,
videoBufferSize: 1024 * 1024 * 4,
onVideoDecode: () => {
if (!hasData) {
hasData = true;
setShowCanvas(true);
onPlayingRef.current?.();
}
},
},
);
}, 0);
return () => {
clearTimeout(initPlayer);
if (videoElement) {
try {
// this causes issues in react strict mode
// https://stackoverflow.com/questions/76822128/issue-with-cycjimmy-jsmpeg-player-in-react-18-cannot-read-properties-of-null-o
videoElement.destroy();
// eslint-disable-next-line no-empty
} catch (e) {}
}
};
}
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [playbackEnabled, url]);
return (
<div className={className}>
<div className="size-full" ref={internalContainerRef}>
<div ref={playerRef} className={cn("jsmpeg", !showCanvas && "hidden")}>
<div className={cn(className, !containerRef.current && "size-full")}>
<div className="internal-jsmpeg-container" ref={internalContainerRef}>
<div ref={videoRef} className={cn("jsmpeg", !showCanvas && "hidden")}>
<canvas
id={uniqueId}
ref={canvasRef}
style={{
width: scaledWidth ?? width,
height: scaledHeight ?? height,

View File

@ -2,7 +2,7 @@ import WebRtcPlayer from "./WebRTCPlayer";
import { CameraConfig } from "@/types/frigateConfig";
import AutoUpdatingCameraImage from "../camera/AutoUpdatingCameraImage";
import ActivityIndicator from "../indicators/activity-indicator";
import { useCallback, useEffect, useMemo, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import MSEPlayer from "./MsePlayer";
import JSMpegPlayer from "./JSMpegPlayer";
import { MdCircle } from "react-icons/md";
@ -55,6 +55,7 @@ export default function LivePlayer({
setFullResolution,
onError,
}: LivePlayerProps) {
const internalContainerRef = useRef<HTMLDivElement | null>(null);
// camera activity
const { activeMotion, activeTracking, objects, offline } =
@ -73,20 +74,12 @@ export default function LivePlayer({
const [liveReady, setLiveReady] = useState(false);
useEffect(() => {
if (!autoLive) {
return;
}
if (!liveReady) {
if (cameraActive && liveMode == "jsmpeg") {
setLiveReady(true);
}
if (!autoLive || !liveReady) {
return;
}
if (!cameraActive) {
setTimeout(() => setLiveReady(false), 500);
setLiveReady(false);
}
// live mode won't change
// eslint-disable-next-line react-hooks/exhaustive-deps
@ -181,7 +174,8 @@ export default function LivePlayer({
camera={cameraConfig.live.stream_name}
width={cameraConfig.detect.width}
height={cameraConfig.detect.height}
containerRef={containerRef}
playbackEnabled={cameraActive || !showStillWithoutActivity}
containerRef={containerRef ?? internalContainerRef}
onPlaying={playerIsPlaying}
/>
);
@ -194,7 +188,7 @@ export default function LivePlayer({
return (
<div
ref={cameraRef}
ref={cameraRef ?? internalContainerRef}
data-camera={cameraConfig.name}
className={cn(
"relative flex w-full cursor-pointer justify-center outline",

View File

@ -31,7 +31,7 @@ function MSEPlayer({
setFullResolution,
onError,
}: MSEPlayerProps) {
const RECONNECT_TIMEOUT: number = 30000;
const RECONNECT_TIMEOUT: number = 10000;
const CODECS: string[] = [
"avc1.640029", // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
@ -45,10 +45,12 @@ function MSEPlayer({
];
const visibilityCheck: boolean = !pip;
const [safariPlaying, setSafariPlaying] = useState(false);
const [wsState, setWsState] = useState<number>(WebSocket.CLOSED);
const [connectTS, setConnectTS] = useState<number>(0);
const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>();
const [errorCount, setErrorCount] = useState<number>(0);
const videoRef = useRef<HTMLVideoElement>(null);
const wsRef = useRef<WebSocket | null>(null);
@ -117,12 +119,21 @@ function MSEPlayer({
}, [wsURL]);
const onDisconnect = useCallback(() => {
if (wsRef.current && wsState == WebSocket.OPEN) {
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
if ((isSafari || isIOS) && safariPlaying) {
setSafariPlaying(false);
}
if (wsRef.current && wsState != WebSocket.CLOSED) {
setWsState(WebSocket.CLOSED);
wsRef.current.close();
wsRef.current = null;
}
}, [wsState]);
}, [wsState, bufferTimeout, safariPlaying]);
const onOpen = () => {
setWsState(WebSocket.OPEN);
@ -162,6 +173,26 @@ function MSEPlayer({
reconnect();
};
const sendWithTimeout = (value: object, timeout: number) => {
return new Promise<void>((resolve, reject) => {
const timeoutId = setTimeout(() => {
reject(new Error("Timeout waiting for response"));
}, timeout);
send(value);
// Override the onmessageRef handler for mse type to resolve the promise on response
const originalHandler = onmessageRef.current["mse"];
onmessageRef.current["mse"] = (msg) => {
if (msg.type === "mse") {
clearTimeout(timeoutId);
if (originalHandler) originalHandler(msg);
resolve();
}
};
});
};
const onMse = () => {
if ("ManagedMediaSource" in window) {
const MediaSource = window.ManagedMediaSource;
@ -169,10 +200,22 @@ function MSEPlayer({
msRef.current?.addEventListener(
"sourceopen",
() => {
send({
type: "mse",
// @ts-expect-error for typing
value: codecs(MediaSource.isTypeSupported),
sendWithTimeout(
{
type: "mse",
// @ts-expect-error for typing
value: codecs(MediaSource.isTypeSupported),
},
3000,
).catch(() => {
if (wsRef.current) {
onDisconnect();
}
if (isIOS || isSafari) {
onError?.("mse-decode");
} else {
onError?.("startup");
}
});
},
{ once: true },
@ -187,9 +230,21 @@ function MSEPlayer({
"sourceopen",
() => {
URL.revokeObjectURL(videoRef.current?.src || "");
send({
type: "mse",
value: codecs(MediaSource.isTypeSupported),
sendWithTimeout(
{
type: "mse",
value: codecs(MediaSource.isTypeSupported),
},
3000,
).catch(() => {
if (wsRef.current) {
onDisconnect();
}
if (isIOS || isSafari) {
onError?.("mse-decode");
} else {
onError?.("startup");
}
});
},
{ once: true },
@ -260,10 +315,6 @@ function MSEPlayer({
return () => {
onDisconnect();
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
};
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
@ -305,6 +356,23 @@ function MSEPlayer({
videoRef.current.requestPictureInPicture();
}, [pip, videoRef]);
// ensure we disconnect for slower connections
useEffect(() => {
if (wsState === WebSocket.OPEN && !playbackEnabled) {
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
setTimeout(() => {
if (!playbackEnabled) onDisconnect();
}, 10000);
}
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [playbackEnabled]);
return (
<video
ref={videoRef}
@ -317,7 +385,8 @@ function MSEPlayer({
}}
muted={!audioEnabled}
onProgress={() => {
if (isSafari || isIOS) {
if ((isSafari || isIOS) && !safariPlaying) {
setSafariPlaying(true);
onPlaying?.();
}
if (onError != undefined) {
@ -334,8 +403,10 @@ function MSEPlayer({
setTimeout(() => {
if (
document.visibilityState === "visible" &&
wsRef.current != null
wsRef.current != null &&
videoRef.current
) {
onDisconnect();
onError("stalled");
}
}, 3000),
@ -347,6 +418,9 @@ function MSEPlayer({
// @ts-expect-error code does exist
e.target.error.code == MediaError.MEDIA_ERR_NETWORK
) {
if (wsRef.current) {
onDisconnect();
}
onError?.("startup");
}
@ -355,15 +429,22 @@ function MSEPlayer({
e.target.error.code == MediaError.MEDIA_ERR_DECODE &&
(isSafari || isIOS)
) {
if (wsRef.current) {
onDisconnect();
}
onError?.("mse-decode");
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
setErrorCount((prevCount) => prevCount + 1);
if (wsRef.current) {
wsRef.current.close();
wsRef.current = null;
reconnect(5000);
onDisconnect();
if (errorCount >= 3) {
// too many mse errors, try jsmpeg
onError?.("startup");
} else {
reconnect(5000);
}
}
}}
/>

View File

@ -130,6 +130,7 @@ const generateRandomEvent = (): ReviewSegment => {
function UIPlayground() {
const { data: config } = useSWR<FrigateConfig>("config");
const contentRef = useRef<HTMLDivElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const reviewTimelineRef = useRef<HTMLDivElement>(null);
const [mockEvents, setMockEvents] = useState<ReviewSegment[]>([]);
const [mockMotionData, setMockMotionData] = useState<MotionData[]>([]);
@ -344,11 +345,12 @@ function UIPlayground() {
Zoom In
</Button>
</p>
<div className="">
<div ref={containerRef} className="">
{birdseyeConfig && (
<BirdseyeLivePlayer
birdseyeConfig={birdseyeConfig}
liveMode={birdseyeConfig.restream ? "mse" : "jsmpeg"}
containerRef={containerRef}
/>
)}
</div>

View File

@ -20,13 +20,13 @@ import {
} from "react-grid-layout";
import "react-grid-layout/css/styles.css";
import "react-resizable/css/styles.css";
import { LivePlayerMode } from "@/types/live";
import { LivePlayerError, LivePlayerMode } from "@/types/live";
import { ASPECT_VERTICAL_LAYOUT, ASPECT_WIDE_LAYOUT } from "@/types/record";
import { Skeleton } from "@/components/ui/skeleton";
import { useResizeObserver } from "@/hooks/resize-observer";
import { isEqual } from "lodash";
import useSWR from "swr";
import { isDesktop, isMobile, isSafari } from "react-device-detect";
import { isDesktop, isMobile } from "react-device-detect";
import BirdseyeLivePlayer from "@/components/player/BirdseyeLivePlayer";
import LivePlayer from "@/components/player/LivePlayer";
import { IoClose } from "react-icons/io5";
@ -73,6 +73,31 @@ export default function DraggableGridLayout({
const { data: config } = useSWR<FrigateConfig>("config");
const birdseyeConfig = useMemo(() => config?.birdseye, [config]);
// preferred live modes per camera
const [preferredLiveModes, setPreferredLiveModes] = useState<{
[key: string]: LivePlayerMode;
}>({});
useEffect(() => {
if (!cameras) return;
const newPreferredLiveModes = cameras.reduce(
(acc, camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
camera.live.stream_name,
);
acc[camera.name] = isRestreamed ? "mse" : "jsmpeg";
return acc;
},
{} as { [key: string]: LivePlayerMode },
);
setPreferredLiveModes(newPreferredLiveModes);
}, [cameras, config]);
const ResponsiveGridLayout = useMemo(() => WidthProvider(Responsive), []);
const [gridLayout, setGridLayout, isGridLayoutLoaded] = usePersistence<
@ -429,10 +454,21 @@ export default function DraggableGridLayout({
windowVisible && visibleCameras.includes(camera.name)
}
cameraConfig={camera}
preferredLiveMode={isSafari ? "webrtc" : "mse"}
preferredLiveMode={preferredLiveModes[camera.name] ?? "mse"}
onClick={() => {
!isEditMode && onSelectCamera(camera.name);
}}
onError={(e) => {
setPreferredLiveModes((prevModes) => {
const newModes = { ...prevModes };
if (e === "mse-decode") {
newModes[camera.name] = "webrtc";
} else {
newModes[camera.name] = "jsmpeg";
}
return newModes;
});
}}
>
{isEditMode && showCircles && <CornerCircles />}
</LivePlayerGridItem>
@ -590,6 +626,7 @@ type LivePlayerGridItemProps = {
cameraConfig: CameraConfig;
preferredLiveMode: LivePlayerMode;
onClick: () => void;
onError: (e: LivePlayerError) => void;
};
const LivePlayerGridItem = React.forwardRef<
@ -609,6 +646,7 @@ const LivePlayerGridItem = React.forwardRef<
cameraConfig,
preferredLiveMode,
onClick,
onError,
...props
},
ref,
@ -629,6 +667,7 @@ const LivePlayerGridItem = React.forwardRef<
cameraConfig={cameraConfig}
preferredLiveMode={preferredLiveMode}
onClick={onClick}
onError={onError}
containerRef={ref as React.RefObject<HTMLDivElement>}
/>
{children}

View File

@ -16,7 +16,6 @@ import {
isDesktop,
isMobile,
isMobileOnly,
isSafari,
isTablet,
} from "react-device-detect";
import useSWR from "swr";
@ -24,6 +23,7 @@ import DraggableGridLayout from "./DraggableGridLayout";
import { IoClose } from "react-icons/io5";
import { LuLayoutDashboard } from "react-icons/lu";
import { cn } from "@/lib/utils";
import { LivePlayerMode } from "@/types/live";
type LiveDashboardViewProps = {
cameras: CameraConfig[];
@ -99,6 +99,29 @@ export default function LiveDashboardView({
// camera live views
const [autoLiveView] = usePersistence("autoLiveView", true);
const [preferredLiveModes, setPreferredLiveModes] = useState<{
[key: string]: LivePlayerMode;
}>({});
useEffect(() => {
if (!cameras) return;
const newPreferredLiveModes = cameras.reduce(
(acc, camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
camera.live.stream_name,
);
acc[camera.name] = isRestreamed ? "mse" : "jsmpeg";
return acc;
},
{} as { [key: string]: LivePlayerMode },
);
setPreferredLiveModes(newPreferredLiveModes);
}, [cameras, config]);
const [windowVisible, setWindowVisible] = useState(true);
const visibilityListener = useCallback(() => {
setWindowVisible(document.visibilityState == "visible");
@ -289,9 +312,20 @@ export default function LiveDashboardView({
windowVisible && visibleCameras.includes(camera.name)
}
cameraConfig={camera}
preferredLiveMode={isSafari ? "webrtc" : "mse"}
preferredLiveMode={preferredLiveModes[camera.name] ?? "mse"}
autoLive={autoLiveView}
onClick={() => onSelectCamera(camera.name)}
onError={(e) => {
setPreferredLiveModes((prevModes) => {
const newModes = { ...prevModes };
if (e === "mse-decode") {
newModes[camera.name] = "webrtc";
} else {
newModes[camera.name] = "jsmpeg";
}
return newModes;
});
}}
/>
);
})}