Live player fixes (#13143)

* Jump to live when exceeding buffer time threshold in MSE player

* clean up

* Try adjusting playback rate instead of jumping to live

* clean up

* fallback to webrtc if enabled before jsmpeg

* baseline

* clean up

* remove comments

* adaptive playback rate and intelligent switching improvements

* increase logging and reset live mode after camera is no longer active on dashboard only

* jump to live on safari/iOS

* clean up

* clean up

* refactor camera live mode hook

* remove key listener

* resolve conflicts
This commit is contained in:
Josh Hawkins 2024-08-17 13:16:48 -05:00 committed by Nicolas Mowen
parent 758b0f9734
commit ef46451b80
7 changed files with 228 additions and 120 deletions

View File

@ -13,7 +13,6 @@ import {
LivePlayerMode, LivePlayerMode,
VideoResolutionType, VideoResolutionType,
} from "@/types/live"; } from "@/types/live";
import useCameraLiveMode from "@/hooks/use-camera-live-mode";
import { getIconForLabel } from "@/utils/iconUtil"; import { getIconForLabel } from "@/utils/iconUtil";
import Chip from "../indicators/Chip"; import Chip from "../indicators/Chip";
import { capitalizeFirstLetter } from "@/utils/stringUtil"; import { capitalizeFirstLetter } from "@/utils/stringUtil";
@ -25,7 +24,7 @@ type LivePlayerProps = {
containerRef?: React.MutableRefObject<HTMLDivElement | null>; containerRef?: React.MutableRefObject<HTMLDivElement | null>;
className?: string; className?: string;
cameraConfig: CameraConfig; cameraConfig: CameraConfig;
preferredLiveMode?: LivePlayerMode; preferredLiveMode: LivePlayerMode;
showStillWithoutActivity?: boolean; showStillWithoutActivity?: boolean;
windowVisible?: boolean; windowVisible?: boolean;
playAudio?: boolean; playAudio?: boolean;
@ -36,6 +35,7 @@ type LivePlayerProps = {
onClick?: () => void; onClick?: () => void;
setFullResolution?: React.Dispatch<React.SetStateAction<VideoResolutionType>>; setFullResolution?: React.Dispatch<React.SetStateAction<VideoResolutionType>>;
onError?: (error: LivePlayerError) => void; onError?: (error: LivePlayerError) => void;
onResetLiveMode?: () => void;
}; };
export default function LivePlayer({ export default function LivePlayer({
@ -54,6 +54,7 @@ export default function LivePlayer({
onClick, onClick,
setFullResolution, setFullResolution,
onError, onError,
onResetLiveMode,
}: LivePlayerProps) { }: LivePlayerProps) {
const internalContainerRef = useRef<HTMLDivElement | null>(null); const internalContainerRef = useRef<HTMLDivElement | null>(null);
// camera activity // camera activity
@ -70,8 +71,6 @@ export default function LivePlayer({
// camera live state // camera live state
const liveMode = useCameraLiveMode(cameraConfig, preferredLiveMode);
const [liveReady, setLiveReady] = useState(false); const [liveReady, setLiveReady] = useState(false);
const liveReadyRef = useRef(liveReady); const liveReadyRef = useRef(liveReady);
@ -91,6 +90,7 @@ export default function LivePlayer({
const timer = setTimeout(() => { const timer = setTimeout(() => {
if (liveReadyRef.current && !cameraActiveRef.current) { if (liveReadyRef.current && !cameraActiveRef.current) {
setLiveReady(false); setLiveReady(false);
onResetLiveMode?.();
} }
}, 500); }, 500);
@ -152,7 +152,7 @@ export default function LivePlayer({
let player; let player;
if (!autoLive) { if (!autoLive) {
player = null; player = null;
} else if (liveMode == "webrtc") { } else if (preferredLiveMode == "webrtc") {
player = ( player = (
<WebRtcPlayer <WebRtcPlayer
className={`size-full rounded-lg md:rounded-2xl ${liveReady ? "" : "hidden"}`} className={`size-full rounded-lg md:rounded-2xl ${liveReady ? "" : "hidden"}`}
@ -166,7 +166,7 @@ export default function LivePlayer({
onError={onError} onError={onError}
/> />
); );
} else if (liveMode == "mse") { } else if (preferredLiveMode == "mse") {
if ("MediaSource" in window || "ManagedMediaSource" in window) { if ("MediaSource" in window || "ManagedMediaSource" in window) {
player = ( player = (
<MSEPlayer <MSEPlayer
@ -187,7 +187,7 @@ export default function LivePlayer({
</div> </div>
); );
} }
} else if (liveMode == "jsmpeg") { } else if (preferredLiveMode == "jsmpeg") {
if (cameraActive || !showStillWithoutActivity || liveReady) { if (cameraActive || !showStillWithoutActivity || liveReady) {
player = ( player = (
<JSMpegPlayer <JSMpegPlayer

View File

@ -32,6 +32,7 @@ function MSEPlayer({
onError, onError,
}: MSEPlayerProps) { }: MSEPlayerProps) {
const RECONNECT_TIMEOUT: number = 10000; const RECONNECT_TIMEOUT: number = 10000;
const BUFFERING_COOLDOWN_TIMEOUT: number = 5000;
const CODECS: string[] = [ const CODECS: string[] = [
"avc1.640029", // H.264 high 4.1 (Chromecast 1st and 2nd Gen) "avc1.640029", // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
@ -46,6 +47,11 @@ function MSEPlayer({
const visibilityCheck: boolean = !pip; const visibilityCheck: boolean = !pip;
const [isPlaying, setIsPlaying] = useState(false); const [isPlaying, setIsPlaying] = useState(false);
const lastJumpTimeRef = useRef(0);
const MAX_BUFFER_ENTRIES = 10; // Size of the rolling window of buffered times
const bufferTimes = useRef<number[]>([]);
const bufferIndex = useRef(0);
const [wsState, setWsState] = useState<number>(WebSocket.CLOSED); const [wsState, setWsState] = useState<number>(WebSocket.CLOSED);
const [connectTS, setConnectTS] = useState<number>(0); const [connectTS, setConnectTS] = useState<number>(0);
@ -133,6 +139,13 @@ function MSEPlayer({
} }
}, [bufferTimeout]); }, [bufferTimeout]);
const handlePause = useCallback(() => {
// don't let the user pause the live stream
if (isPlaying && playbackEnabled) {
videoRef.current?.play();
}
}, [isPlaying, playbackEnabled]);
const onOpen = () => { const onOpen = () => {
setWsState(WebSocket.OPEN); setWsState(WebSocket.OPEN);
@ -193,6 +206,7 @@ function MSEPlayer({
const onMse = () => { const onMse = () => {
if ("ManagedMediaSource" in window) { if ("ManagedMediaSource" in window) {
// safari
const MediaSource = window.ManagedMediaSource; const MediaSource = window.ManagedMediaSource;
msRef.current?.addEventListener( msRef.current?.addEventListener(
@ -224,6 +238,7 @@ function MSEPlayer({
videoRef.current.srcObject = msRef.current; videoRef.current.srcObject = msRef.current;
} }
} else { } else {
// non safari
msRef.current?.addEventListener( msRef.current?.addEventListener(
"sourceopen", "sourceopen",
() => { () => {
@ -247,15 +262,35 @@ function MSEPlayer({
}, },
{ once: true }, { once: true },
); );
videoRef.current!.src = URL.createObjectURL(msRef.current!); if (videoRef.current && msRef.current) {
videoRef.current!.srcObject = null; videoRef.current.src = URL.createObjectURL(msRef.current);
videoRef.current.srcObject = null;
}
} }
play(); play();
onmessageRef.current["mse"] = (msg) => { onmessageRef.current["mse"] = (msg) => {
if (msg.type !== "mse") return; if (msg.type !== "mse") return;
const sb = msRef.current?.addSourceBuffer(msg.value); let sb: SourceBuffer | undefined;
try {
sb = msRef.current?.addSourceBuffer(msg.value);
if (sb?.mode) {
sb.mode = "segments";
}
} catch (e) {
// Safari sometimes throws this error
if (e instanceof DOMException && e.name === "InvalidStateError") {
if (wsRef.current) {
onDisconnect();
}
onError?.("mse-decode");
return;
} else {
throw e; // Re-throw if it's not the error we're handling
}
}
sb?.addEventListener("updateend", () => { sb?.addEventListener("updateend", () => {
if (sb.updating) return; if (sb.updating) return;
@ -302,6 +337,43 @@ function MSEPlayer({
return video.buffered.end(video.buffered.length - 1) - video.currentTime; return video.buffered.end(video.buffered.length - 1) - video.currentTime;
}; };
const jumpToLive = () => {
if (!videoRef.current) return;
const buffered = videoRef.current.buffered;
if (buffered.length > 0) {
const liveEdge = buffered.end(buffered.length - 1);
// Jump to the live edge
videoRef.current.currentTime = liveEdge - 0.75;
lastJumpTimeRef.current = Date.now();
}
};
const calculateAdaptiveBufferThreshold = () => {
const filledEntries = bufferTimes.current.length;
const sum = bufferTimes.current.reduce((a, b) => a + b, 0);
const averageBufferTime = filledEntries ? sum / filledEntries : 0;
return averageBufferTime * (isSafari || isIOS ? 3 : 1.5);
};
const calculateAdaptivePlaybackRate = (
bufferTime: number,
bufferThreshold: number,
) => {
const alpha = 0.2; // aggressiveness of playback rate increase
const beta = 0.5; // steepness of exponential growth
// don't adjust playback rate if we're close enough to live
if (
(bufferTime <= bufferThreshold && bufferThreshold < 3) ||
bufferTime < 3
) {
return 1;
}
const rate = 1 + alpha * Math.exp(beta * bufferTime - bufferThreshold);
return Math.min(rate, 2);
};
useEffect(() => { useEffect(() => {
if (!playbackEnabled) { if (!playbackEnabled) {
return; return;
@ -386,21 +458,71 @@ function MSEPlayer({
handleLoadedMetadata?.(); handleLoadedMetadata?.();
onPlaying?.(); onPlaying?.();
setIsPlaying(true); setIsPlaying(true);
lastJumpTimeRef.current = Date.now();
}} }}
muted={!audioEnabled} muted={!audioEnabled}
onPause={() => videoRef.current?.play()} onPause={handlePause}
onProgress={() => { onProgress={() => {
const bufferTime = getBufferedTime(videoRef.current);
if (
videoRef.current &&
(videoRef.current.playbackRate === 1 || bufferTime < 3)
) {
if (bufferTimes.current.length < MAX_BUFFER_ENTRIES) {
bufferTimes.current.push(bufferTime);
} else {
bufferTimes.current[bufferIndex.current] = bufferTime;
bufferIndex.current =
(bufferIndex.current + 1) % MAX_BUFFER_ENTRIES;
}
}
const bufferThreshold = calculateAdaptiveBufferThreshold();
// if we have > 3 seconds of buffered data and we're still not playing, // if we have > 3 seconds of buffered data and we're still not playing,
// something might be wrong - maybe codec issue, no audio, etc // something might be wrong - maybe codec issue, no audio, etc
// so mark the player as playing so that error handlers will fire // so mark the player as playing so that error handlers will fire
if ( if (!isPlaying && playbackEnabled && bufferTime > 3) {
!isPlaying &&
playbackEnabled &&
getBufferedTime(videoRef.current) > 3
) {
setIsPlaying(true); setIsPlaying(true);
lastJumpTimeRef.current = Date.now();
onPlaying?.(); onPlaying?.();
} }
// if we have more than 10 seconds of buffer, something's wrong so error out
if (
isPlaying &&
playbackEnabled &&
(bufferThreshold > 10 || bufferTime > 10)
) {
onDisconnect();
onError?.("stalled");
}
const playbackRate = calculateAdaptivePlaybackRate(
bufferTime,
bufferThreshold,
);
// if we're above our rolling average threshold or have > 3 seconds of
// buffered data and we're playing, we may have drifted from actual live
// time, so increase playback rate to compensate - non safari/ios only
if (
videoRef.current &&
isPlaying &&
playbackEnabled &&
Date.now() - lastJumpTimeRef.current > BUFFERING_COOLDOWN_TIMEOUT
) {
// Jump to live on Safari/iOS due to a change of playback rate causing re-buffering
if (isSafari || isIOS) {
if (bufferTime > 3) {
jumpToLive();
}
} else {
videoRef.current.playbackRate = playbackRate;
}
}
if (onError != undefined) { if (onError != undefined) {
if (videoRef.current?.paused) { if (videoRef.current?.paused) {
return; return;

View File

@ -1,49 +1,65 @@
import { CameraConfig, FrigateConfig } from "@/types/frigateConfig"; import { CameraConfig, FrigateConfig } from "@/types/frigateConfig";
import { useMemo } from "react"; import { useCallback, useEffect, useState } from "react";
import useSWR from "swr"; import useSWR from "swr";
import { usePersistence } from "./use-persistence";
import { LivePlayerMode } from "@/types/live"; import { LivePlayerMode } from "@/types/live";
export default function useCameraLiveMode( export default function useCameraLiveMode(
cameraConfig: CameraConfig, cameras: CameraConfig[],
preferredMode?: LivePlayerMode, windowVisible: boolean,
): LivePlayerMode | undefined {
const { data: config } = useSWR<FrigateConfig>("config");
const restreamEnabled = useMemo(() => {
if (!config) {
return false;
}
return (
cameraConfig &&
Object.keys(config.go2rtc.streams || {}).includes(
cameraConfig.live.stream_name,
)
);
}, [config, cameraConfig]);
const defaultLiveMode = useMemo<LivePlayerMode | undefined>(() => {
if (config) {
if (restreamEnabled) {
return preferredMode || "mse";
}
return "jsmpeg";
}
return undefined;
}, [config, preferredMode, restreamEnabled]);
const [viewSource] = usePersistence<LivePlayerMode>(
`${cameraConfig.name}-source`,
defaultLiveMode,
);
if (
restreamEnabled &&
(preferredMode == "mse" || preferredMode == "webrtc")
) { ) {
return preferredMode; const { data: config } = useSWR<FrigateConfig>("config");
const [preferredLiveModes, setPreferredLiveModes] = useState<{
[key: string]: LivePlayerMode;
}>({});
useEffect(() => {
if (!cameras) return;
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const newPreferredLiveModes = cameras.reduce(
(acc, camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
camera.live.stream_name,
);
if (!mseSupported) {
acc[camera.name] = isRestreamed ? "webrtc" : "jsmpeg";
} else { } else {
return viewSource; acc[camera.name] = isRestreamed ? "mse" : "jsmpeg";
} }
return acc;
},
{} as { [key: string]: LivePlayerMode },
);
setPreferredLiveModes(newPreferredLiveModes);
}, [cameras, config, windowVisible]);
const resetPreferredLiveMode = useCallback(
(cameraName: string) => {
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const isRestreamed =
config && Object.keys(config.go2rtc.streams || {}).includes(cameraName);
setPreferredLiveModes((prevModes) => {
const newModes = { ...prevModes };
if (!mseSupported) {
newModes[cameraName] = isRestreamed ? "webrtc" : "jsmpeg";
} else {
newModes[cameraName] = isRestreamed ? "mse" : "jsmpeg";
}
return newModes;
});
},
[config],
);
return { preferredLiveModes, setPreferredLiveModes, resetPreferredLiveMode };
} }

View File

@ -298,7 +298,12 @@ export interface FrigateConfig {
retry_interval: number; retry_interval: number;
}; };
go2rtc: Record<string, unknown>; go2rtc: {
streams: string[];
webrtc: {
candidates: string[];
};
};
camera_groups: { [groupName: string]: CameraGroupConfig }; camera_groups: { [groupName: string]: CameraGroupConfig };

View File

@ -41,6 +41,7 @@ import {
TooltipContent, TooltipContent,
} from "@/components/ui/tooltip"; } from "@/components/ui/tooltip";
import { Toaster } from "@/components/ui/sonner"; import { Toaster } from "@/components/ui/sonner";
import useCameraLiveMode from "@/hooks/use-camera-live-mode";
type DraggableGridLayoutProps = { type DraggableGridLayoutProps = {
cameras: CameraConfig[]; cameras: CameraConfig[];
@ -75,36 +76,8 @@ export default function DraggableGridLayout({
// preferred live modes per camera // preferred live modes per camera
const [preferredLiveModes, setPreferredLiveModes] = useState<{ const { preferredLiveModes, setPreferredLiveModes, resetPreferredLiveMode } =
[key: string]: LivePlayerMode; useCameraLiveMode(cameras, windowVisible);
}>({});
useEffect(() => {
if (!cameras) return;
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const newPreferredLiveModes = cameras.reduce(
(acc, camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
camera.live.stream_name,
);
if (!mseSupported) {
acc[camera.name] = isRestreamed ? "webrtc" : "jsmpeg";
} else {
acc[camera.name] = isRestreamed ? "mse" : "jsmpeg";
}
return acc;
},
{} as { [key: string]: LivePlayerMode },
);
setPreferredLiveModes(newPreferredLiveModes);
}, [cameras, config, windowVisible]);
const ResponsiveGridLayout = useMemo(() => WidthProvider(Responsive), []); const ResponsiveGridLayout = useMemo(() => WidthProvider(Responsive), []);
@ -477,6 +450,7 @@ export default function DraggableGridLayout({
return newModes; return newModes;
}); });
}} }}
onResetLiveMode={() => resetPreferredLiveMode(camera.name)}
> >
{isEditMode && showCircles && <CornerCircles />} {isEditMode && showCircles && <CornerCircles />}
</LivePlayerGridItem> </LivePlayerGridItem>
@ -635,6 +609,7 @@ type LivePlayerGridItemProps = {
preferredLiveMode: LivePlayerMode; preferredLiveMode: LivePlayerMode;
onClick: () => void; onClick: () => void;
onError: (e: LivePlayerError) => void; onError: (e: LivePlayerError) => void;
onResetLiveMode: () => void;
}; };
const LivePlayerGridItem = React.forwardRef< const LivePlayerGridItem = React.forwardRef<
@ -655,6 +630,7 @@ const LivePlayerGridItem = React.forwardRef<
preferredLiveMode, preferredLiveMode,
onClick, onClick,
onError, onError,
onResetLiveMode,
...props ...props
}, },
ref, ref,
@ -676,6 +652,7 @@ const LivePlayerGridItem = React.forwardRef<
preferredLiveMode={preferredLiveMode} preferredLiveMode={preferredLiveMode}
onClick={onClick} onClick={onClick}
onError={onError} onError={onError}
onResetLiveMode={onResetLiveMode}
containerRef={ref as React.RefObject<HTMLDivElement>} containerRef={ref as React.RefObject<HTMLDivElement>}
/> />
{children} {children}

View File

@ -227,6 +227,10 @@ export default function LiveCameraView({
return "webrtc"; return "webrtc";
} }
if (!isRestreamed) {
return "jsmpeg";
}
return "mse"; return "mse";
}, [lowBandwidth, mic, webRTC, isRestreamed]); }, [lowBandwidth, mic, webRTC, isRestreamed]);
@ -286,14 +290,23 @@ export default function LiveCameraView({
} }
}, [fullscreen, isPortrait, cameraAspectRatio, containerAspectRatio]); }, [fullscreen, isPortrait, cameraAspectRatio, containerAspectRatio]);
const handleError = useCallback((e: LivePlayerError) => { const handleError = useCallback(
if (e == "mse-decode") { (e: LivePlayerError) => {
if (e) {
if (
!webRTC &&
config &&
config.go2rtc?.webrtc?.candidates?.length > 0
) {
setWebRTC(true); setWebRTC(true);
} else { } else {
setWebRTC(false); setWebRTC(false);
setLowBandwidth(true); setLowBandwidth(true);
} }
}, []); }
},
[config, webRTC],
);
return ( return (
<TransformWrapper minScale={1.0} wheel={{ smoothStep: 0.005 }}> <TransformWrapper minScale={1.0} wheel={{ smoothStep: 0.005 }}>

View File

@ -28,8 +28,9 @@ import DraggableGridLayout from "./DraggableGridLayout";
import { IoClose } from "react-icons/io5"; import { IoClose } from "react-icons/io5";
import { LuLayoutDashboard } from "react-icons/lu"; import { LuLayoutDashboard } from "react-icons/lu";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { LivePlayerError, LivePlayerMode } from "@/types/live"; import { LivePlayerError } from "@/types/live";
import { FaCompress, FaExpand } from "react-icons/fa"; import { FaCompress, FaExpand } from "react-icons/fa";
import useCameraLiveMode from "@/hooks/use-camera-live-mode";
import { useResizeObserver } from "@/hooks/resize-observer"; import { useResizeObserver } from "@/hooks/resize-observer";
type LiveDashboardViewProps = { type LiveDashboardViewProps = {
@ -129,9 +130,6 @@ export default function LiveDashboardView({
// camera live views // camera live views
const [autoLiveView] = usePersistence("autoLiveView", true); const [autoLiveView] = usePersistence("autoLiveView", true);
const [preferredLiveModes, setPreferredLiveModes] = useState<{
[key: string]: LivePlayerMode;
}>({});
const [{ height: containerHeight }] = useResizeObserver(containerRef); const [{ height: containerHeight }] = useResizeObserver(containerRef);
@ -186,32 +184,8 @@ export default function LiveDashboardView({
}; };
}, []); }, []);
useEffect(() => { const { preferredLiveModes, setPreferredLiveModes, resetPreferredLiveMode } =
if (!cameras) return; useCameraLiveMode(cameras, windowVisible);
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const newPreferredLiveModes = cameras.reduce(
(acc, camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
camera.live.stream_name,
);
if (!mseSupported) {
acc[camera.name] = isRestreamed ? "webrtc" : "jsmpeg";
} else {
acc[camera.name] = isRestreamed ? "mse" : "jsmpeg";
}
return acc;
},
{} as { [key: string]: LivePlayerMode },
);
setPreferredLiveModes(newPreferredLiveModes);
}, [cameras, config, windowVisible]);
const cameraRef = useCallback( const cameraRef = useCallback(
(node: HTMLElement | null) => { (node: HTMLElement | null) => {
@ -381,6 +355,7 @@ export default function LiveDashboardView({
autoLive={autoLiveView} autoLive={autoLiveView}
onClick={() => onSelectCamera(camera.name)} onClick={() => onSelectCamera(camera.name)}
onError={(e) => handleError(camera.name, e)} onError={(e) => handleError(camera.name, e)}
onResetLiveMode={() => resetPreferredLiveMode(camera.name)}
/> />
); );
})} })}