Add live views to new webui (#8970)

* Add support for live views

* Lint fixes

* Fix width

* Fix lint

* Cleanup hooks
This commit is contained in:
Nicolas Mowen 2023-12-15 16:24:50 -07:00 committed by Blake Blackshear
parent fbe58652d5
commit e3387de48f
16 changed files with 1528 additions and 207 deletions

View File

@ -58,6 +58,7 @@
"@types/react": "^18.2.37",
"@types/react-dom": "^18.2.15",
"@types/react-icons": "^3.0.0",
"@types/strftime": "^0.9.8",
"@typescript-eslint/eslint-plugin": "^6.10.0",
"@typescript-eslint/parser": "^6.10.0",
"@vitejs/plugin-react-swc": "^3.5.0",
@ -2332,6 +2333,12 @@
"integrity": "sha512-eqNDvZsCNY49OAXB0Firg/Sc2BgoWsntsLUdybGFOhAfCD6QJ2n9HXUIHGqt5qjrxmMv4wS8WLAw43ZkKcJ8Pw==",
"dev": true
},
"node_modules/@types/strftime": {
"version": "0.9.8",
"resolved": "https://registry.npmjs.org/@types/strftime/-/strftime-0.9.8.tgz",
"integrity": "sha512-QIvDlGAKyF3YJbT3QZnfC+RIvV5noyDbi+ZJ5rkaSRqxCGrYJefgXm3leZAjtoQOutZe1hCXbAg+p89/Vj4HlQ==",
"dev": true
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "6.13.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.1.tgz",

View File

@ -63,6 +63,7 @@
"@types/react": "^18.2.37",
"@types/react-dom": "^18.2.15",
"@types/react-icons": "^3.0.0",
"@types/strftime": "^0.9.8",
"@typescript-eslint/eslint-plugin": "^6.10.0",
"@typescript-eslint/parser": "^6.10.0",
"@vitejs/plugin-react-swc": "^3.5.0",

View File

@ -29,7 +29,7 @@ function App() {
<Header onToggleNavbar={toggleNavbar} />
<div className="grid grid-cols-[auto,1fr] flex-grow-1 overflow-auto">
<Sidebar sheetOpen={sheetOpen} setSheetOpen={setSheetOpen} />
<div className="overflow-x-hidden px-4 py-2">
<div className="overflow-x-hidden px-4 py-2 w-screen md:w-full">
<Routes>
<Route path="/" element={<Dashboard />} />
<Route path="/live" element={<Live />} />

View File

@ -0,0 +1,47 @@
import { useCallback, useState } from "react";
import CameraImage from "./CameraImage";
type AutoUpdatingCameraImageProps = {
camera: string;
searchParams?: {};
showFps?: boolean;
className?: string;
};
const MIN_LOAD_TIMEOUT_MS = 200;
export default function AutoUpdatingCameraImage({
camera,
searchParams = "",
showFps = true,
className,
}: AutoUpdatingCameraImageProps) {
const [key, setKey] = useState(Date.now());
const [fps, setFps] = useState<string>("0");
const handleLoad = useCallback(() => {
const loadTime = Date.now() - key;
if (showFps) {
setFps((1000 / Math.max(loadTime, MIN_LOAD_TIMEOUT_MS)).toFixed(1));
}
setTimeout(
() => {
setKey(Date.now());
},
loadTime > MIN_LOAD_TIMEOUT_MS ? 1 : MIN_LOAD_TIMEOUT_MS
);
}, [key, setFps]);
return (
<div className={className}>
<CameraImage
camera={camera}
onload={handleLoad}
searchParams={`cache=${key}&${searchParams}`}
/>
{showFps ? <span className="text-xs">Displaying at {fps}fps</span> : null}
</div>
);
}

View File

@ -0,0 +1,105 @@
import { useApiHost } from "@/api";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import useSWR from "swr";
import ActivityIndicator from "../ui/activity-indicator";
import { useResizeObserver } from "@/hooks/resize-observer";
type CameraImageProps = {
camera: string;
onload?: (event: Event) => void;
searchParams: {};
stretch?: boolean;
};
export default function CameraImage({
camera,
onload,
searchParams = "",
stretch = false,
}: CameraImageProps) {
const { data: config } = useSWR("config");
const apiHost = useApiHost();
const [hasLoaded, setHasLoaded] = useState(false);
const containerRef = useRef<HTMLDivElement | null>(null);
const canvasRef = useRef<HTMLCanvasElement | null>(null);
const [{ width: containerWidth }] = useResizeObserver(containerRef);
// Add scrollbar width (when visible) to the available observer width to eliminate screen juddering.
// https://github.com/blakeblackshear/frigate/issues/1657
let scrollBarWidth = 0;
if (window.innerWidth && document.body.offsetWidth) {
scrollBarWidth = window.innerWidth - document.body.offsetWidth;
}
const availableWidth = scrollBarWidth
? containerWidth + scrollBarWidth
: containerWidth;
const { name } = config ? config.cameras[camera] : "";
const enabled = config ? config.cameras[camera].enabled : "True";
const { width, height } = config
? config.cameras[camera].detect
: { width: 1, height: 1 };
const aspectRatio = width / height;
const scaledHeight = useMemo(() => {
const scaledHeight = Math.floor(availableWidth / aspectRatio);
const finalHeight = stretch ? scaledHeight : Math.min(scaledHeight, height);
if (finalHeight > 0) {
return finalHeight;
}
return 100;
}, [availableWidth, aspectRatio, height, stretch]);
const scaledWidth = useMemo(
() => Math.ceil(scaledHeight * aspectRatio - scrollBarWidth),
[scaledHeight, aspectRatio, scrollBarWidth]
);
const img = useMemo(() => new Image(), []);
img.onload = useCallback(
(event: Event) => {
setHasLoaded(true);
if (canvasRef.current) {
const ctx = canvasRef.current.getContext("2d");
ctx?.drawImage(img, 0, 0, scaledWidth, scaledHeight);
}
onload && onload(event);
},
[img, scaledHeight, scaledWidth, setHasLoaded, onload, canvasRef]
);
useEffect(() => {
if (!config || scaledHeight === 0 || !canvasRef.current) {
return;
}
img.src = `${apiHost}api/${name}/latest.jpg?h=${scaledHeight}${
searchParams ? `&${searchParams}` : ""
}`;
}, [apiHost, canvasRef, name, img, searchParams, scaledHeight, config]);
return (
<div className="relative w-full" ref={containerRef}>
{enabled ? (
<canvas
data-testid="cameraimage-canvas"
height={scaledHeight}
ref={canvasRef}
width={scaledWidth}
/>
) : (
<div className="text-center pt-6">
Camera is disabled in config, no stream or snapshot available!
</div>
)}
{!hasLoaded && enabled ? (
<div
className="absolute inset-0 flex justify-center"
style={{ height: `${scaledHeight}px` }}
>
<ActivityIndicator />
</div>
) : null}
</div>
);
}

View File

@ -0,0 +1,175 @@
import WebRtcPlayer from "./WebRTCPlayer";
import { CameraConfig } from "@/types/frigateConfig";
import AutoUpdatingCameraImage from "../camera/AutoUpdatingCameraImage";
import ActivityIndicator from "../ui/activity-indicator";
import { Button } from "../ui/button";
import { LuSettings } from "react-icons/lu";
import { useCallback, useMemo, useState } from "react";
import { Card, CardContent, CardHeader, CardTitle } from "../ui/card";
import { Switch } from "../ui/switch";
import { Label } from "../ui/label";
import { usePersistence } from "@/context/use-persistence";
const emptyObject = Object.freeze({});
type LivePlayerProps = {
cameraConfig: CameraConfig;
liveMode: string;
};
type Options = { [key: string]: boolean };
export default function LivePlayer({
cameraConfig,
liveMode,
}: LivePlayerProps) {
const [showSettings, setShowSettings] = useState(false);
const [options, setOptions] = usePersistence(
`${cameraConfig.name}-feed`,
emptyObject
);
const handleSetOption = useCallback(
(id: string, value: boolean) => {
console.log("Setting " + id + " to " + value);
const newOptions = { ...options, [id]: value };
setOptions(newOptions);
},
[options, setOptions]
);
const searchParams = useMemo(
() =>
new URLSearchParams(
Object.keys(options).reduce((memo, key) => {
//@ts-ignore we know this is correct
memo.push([key, options[key] === true ? "1" : "0"]);
return memo;
}, [])
),
[options]
);
const handleToggleSettings = useCallback(() => {
setShowSettings(!showSettings);
}, [showSettings, setShowSettings]);
if (liveMode == "webrtc") {
return (
<div className="max-w-5xl">
<WebRtcPlayer camera={cameraConfig.live.stream_name} />
</div>
);
} else if (liveMode == "mse") {
return <div className="max-w-5xl">Not yet implemented</div>;
} else if (liveMode == "jsmpeg") {
return (
<div className={`max-w-[${cameraConfig.detect.width}px]`}>
Not Yet Implemented
</div>
);
} else if (liveMode == "debug") {
return (
<>
<AutoUpdatingCameraImage
camera={cameraConfig.name}
searchParams={searchParams}
/>
<Button onClick={handleToggleSettings} variant="link" size="sm">
<span className="w-5 h-5">
<LuSettings />
</span>{" "}
<span>{showSettings ? "Hide" : "Show"} Options</span>
</Button>
{showSettings ? (
<Card>
<CardHeader>
<CardTitle>Options</CardTitle>
</CardHeader>
<CardContent>
<DebugSettings
handleSetOption={handleSetOption}
options={options}
/>
</CardContent>
</Card>
) : null}
</>
);
} else {
<ActivityIndicator />;
}
}
type DebugSettingsProps = {
handleSetOption: (id: string, value: boolean) => void;
options: Options;
};
function DebugSettings({ handleSetOption, options }: DebugSettingsProps) {
return (
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
<div className="flex items-center space-x-2">
<Switch
id="bbox"
checked={options["bbox"]}
onCheckedChange={(isChecked) => {
handleSetOption("bbox", isChecked);
}}
/>
<Label htmlFor="bbox">Bounding Box</Label>
</div>
<div className="flex items-center space-x-2">
<Switch
id="timestamp"
checked={options["timestamp"]}
onCheckedChange={(isChecked) => {
handleSetOption("timestamp", isChecked);
}}
/>
<Label htmlFor="timestamp">Timestamp</Label>
</div>
<div className="flex items-center space-x-2">
<Switch
id="zones"
checked={options["zones"]}
onCheckedChange={(isChecked) => {
handleSetOption("zones", isChecked);
}}
/>
<Label htmlFor="zones">Zones</Label>
</div>
<div className="flex items-center space-x-2">
<Switch
id="mask"
checked={options["mask"]}
onCheckedChange={(isChecked) => {
handleSetOption("mask", isChecked);
}}
/>
<Label htmlFor="mask">Mask</Label>
</div>
<div className="flex items-center space-x-2">
<Switch
id="motion"
checked={options["motion"]}
onCheckedChange={(isChecked) => {
handleSetOption("motion", isChecked);
}}
/>
<Label htmlFor="motion">Motion</Label>
</div>
<div className="flex items-center space-x-2">
<Switch
id="regions"
checked={options["regions"]}
onCheckedChange={(isChecked) => {
handleSetOption("regions", isChecked);
}}
/>
<Label htmlFor="regions">Regions</Label>
</div>
</div>
);
}

View File

@ -18,7 +18,7 @@ type VideoPlayerProps = {
}
export default function VideoPlayer({ children, options, seekOptions = {forward:30, backward: 10}, onReady = (_) => {}, onDispose = () => {} }: VideoPlayerProps) {
const videoRef = useRef<HTMLVideoElement | null>(null);
const videoRef = useRef<HTMLDivElement | null>(null);
const playerRef = useRef<Player | null>(null);
useEffect(() => {

View File

@ -0,0 +1,161 @@
import { baseUrl } from "@/api/baseUrl";
import { useCallback, useEffect, useRef } from "react";
type WebRtcPlayerProps = {
camera: string;
width?: number;
height?: number;
};
export default function WebRtcPlayer({
camera,
width,
height,
}: WebRtcPlayerProps) {
const pcRef = useRef<RTCPeerConnection | undefined>();
const videoRef = useRef<HTMLVideoElement | null>(null);
const PeerConnection = useCallback(
async (media: string) => {
if (!videoRef.current) {
return;
}
const pc = new RTCPeerConnection({
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
});
const localTracks = [];
if (/camera|microphone/.test(media)) {
const tracks = await getMediaTracks("user", {
video: media.indexOf("camera") >= 0,
audio: media.indexOf("microphone") >= 0,
});
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: "sendonly" });
if (track.kind === "video") localTracks.push(track);
});
}
if (media.indexOf("display") >= 0) {
const tracks = await getMediaTracks("display", {
video: true,
audio: media.indexOf("speaker") >= 0,
});
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: "sendonly" });
if (track.kind === "video") localTracks.push(track);
});
}
if (/video|audio/.test(media)) {
const tracks = ["video", "audio"]
.filter((kind) => media.indexOf(kind) >= 0)
.map(
(kind) =>
pc.addTransceiver(kind, { direction: "recvonly" }).receiver.track
);
localTracks.push(...tracks);
}
videoRef.current.srcObject = new MediaStream(localTracks);
return pc;
},
[videoRef]
);
async function getMediaTracks(
media: string,
constraints: MediaStreamConstraints
) {
try {
const stream =
media === "user"
? await navigator.mediaDevices.getUserMedia(constraints)
: await navigator.mediaDevices.getDisplayMedia(constraints);
return stream.getTracks();
} catch (e) {
return [];
}
}
const connect = useCallback(
async (ws: WebSocket, aPc: Promise<RTCPeerConnection | undefined>) => {
if (!aPc) {
return;
}
pcRef.current = await aPc;
ws.addEventListener("open", () => {
pcRef.current?.addEventListener("icecandidate", (ev) => {
if (!ev.candidate) return;
const msg = {
type: "webrtc/candidate",
value: ev.candidate.candidate,
};
ws.send(JSON.stringify(msg));
});
pcRef.current
?.createOffer()
.then((offer) => pcRef.current?.setLocalDescription(offer))
.then(() => {
const msg = {
type: "webrtc/offer",
value: pcRef.current?.localDescription?.sdp,
};
ws.send(JSON.stringify(msg));
});
});
ws.addEventListener("message", (ev) => {
const msg = JSON.parse(ev.data);
if (msg.type === "webrtc/candidate") {
pcRef.current?.addIceCandidate({ candidate: msg.value, sdpMid: "0" });
} else if (msg.type === "webrtc/answer") {
pcRef.current?.setRemoteDescription({
type: "answer",
sdp: msg.value,
});
}
});
},
[]
);
useEffect(() => {
if (!videoRef.current) {
return;
}
const url = `${baseUrl.replace(
/^http/,
"ws"
)}live/webrtc/api/ws?src=${camera}`;
const ws = new WebSocket(url);
const aPc = PeerConnection("video+audio");
connect(ws, aPc);
return () => {
if (pcRef.current) {
pcRef.current.close();
pcRef.current = undefined;
}
};
}, [camera, connect, PeerConnection, pcRef, videoRef]);
return (
<div>
<video
ref={videoRef}
autoPlay
playsInline
controls
muted
width={width}
height={height}
/>
</div>
);
}

View File

@ -0,0 +1,39 @@
import { MutableRefObject, useEffect, useMemo, useState } from "react";
export function useResizeObserver(...refs: MutableRefObject<Element | null>[]) {
const [dimensions, setDimensions] = useState(
new Array(refs.length).fill({
width: 0,
height: 0,
x: -Infinity,
y: -Infinity,
})
);
const resizeObserver = useMemo(
() =>
new ResizeObserver((entries) => {
window.requestAnimationFrame(() => {
setDimensions(entries.map((entry) => entry.contentRect));
});
}),
[]
);
useEffect(() => {
refs.forEach((ref) => {
if (ref.current) {
resizeObserver.observe(ref.current);
}
});
return () => {
refs.forEach((ref) => {
if (ref.current) {
resizeObserver.unobserve(ref.current);
}
});
};
}, [refs, resizeObserver]);
return dimensions;
}

View File

@ -1,12 +1,18 @@
import { useEffect, useState, useCallback } from "react";
import { get as getData, set as setData } from "idb-keyval";
type usePersistenceReturn = [
value: any | undefined,
setValue: (value: string | boolean) => void,
loaded: boolean,
];
export function usePersistence(
key: string,
defaultValue: string | boolean | undefined = undefined
) {
const [value, setInternalValue] = useState(defaultValue);
const [loaded, setLoaded] = useState(false);
defaultValue: any | undefined = undefined
): usePersistenceReturn {
const [value, setInternalValue] = useState<any | undefined>(defaultValue);
const [loaded, setLoaded] = useState<boolean>(false);
const setValue = useCallback(
(value: string | boolean) => {

View File

@ -0,0 +1,678 @@
class VideoRTC extends HTMLElement {
constructor() {
super();
this.DISCONNECT_TIMEOUT = 5000;
this.RECONNECT_TIMEOUT = 30000;
this.CODECS = [
"avc1.640029", // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
"avc1.64002A", // H.264 high 4.2 (Chromecast 3rd Gen)
"avc1.640033", // H.264 high 5.1 (Chromecast with Google TV)
"hvc1.1.6.L153.B0", // H.265 main 5.1 (Chromecast Ultra)
"mp4a.40.2", // AAC LC
"mp4a.40.5", // AAC HE
"flac", // FLAC (PCM compatible)
"opus", // OPUS Chrome, Firefox
];
/**
* [config] Supported modes (webrtc, mse, mp4, mjpeg).
* @type {string}
*/
this.mode = "webrtc,mse,mp4,mjpeg";
/**
* [config] Run stream when not displayed on the screen. Default `false`.
* @type {boolean}
*/
this.background = false;
/**
* [config] Run stream only when player in the viewport. Stop when user scroll out player.
* Value is percentage of visibility from `0` (not visible) to `1` (full visible).
* Default `0` - disable;
* @type {number}
*/
this.visibilityThreshold = 0;
/**
* [config] Run stream only when browser page on the screen. Stop when user change browser
* tab or minimise browser windows.
* @type {boolean}
*/
this.visibilityCheck = true;
/**
* [config] WebRTC configuration
* @type {RTCConfiguration}
*/
this.pcConfig = {
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
sdpSemantics: "unified-plan", // important for Chromecast 1
};
/**
* [info] WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
* @type {number}
*/
this.wsState = WebSocket.CLOSED;
/**
* [info] WebRTC connection state.
* @type {number}
*/
this.pcState = WebSocket.CLOSED;
/**
* @type {HTMLVideoElement}
*/
this.video = null;
/**
* @type {WebSocket}
*/
this.ws = null;
/**
* @type {string|URL}
*/
this.wsURL = "";
/**
* @type {RTCPeerConnection}
*/
this.pc = null;
/**
* @type {number}
*/
this.connectTS = 0;
/**
* @type {string}
*/
this.mseCodecs = "";
/**
* [internal] Disconnect TimeoutID.
* @type {number}
*/
this.disconnectTID = 0;
/**
* [internal] Reconnect TimeoutID.
* @type {number}
*/
this.reconnectTID = 0;
/**
* [internal] Handler for receiving Binary from WebSocket.
* @type {Function}
*/
this.ondata = null;
/**
* [internal] Handlers list for receiving JSON from WebSocket
* @type {Object.<string,Function>}}
*/
this.onmessage = null;
}
/**
* Set video source (WebSocket URL). Support relative path.
* @param {string|URL} value
*/
set src(value) {
if (typeof value !== "string") value = value.toString();
if (value.startsWith("http")) {
value = `ws${value.substring(4)}`;
} else if (value.startsWith("/")) {
value = `ws${location.origin.substring(4)}${value}`;
}
this.wsURL = value;
this.onconnect();
}
/**
* Play video. Support automute when autoplay blocked.
* https://developer.chrome.com/blog/autoplay/
*/
play() {
this.video.play().catch((er) => {
if (er.name === "NotAllowedError" && !this.video.muted) {
this.video.muted = true;
this.video.play().catch(() => {});
}
});
}
/**
* Send message to server via WebSocket
* @param {Object} value
*/
send(value) {
if (this.ws) this.ws.send(JSON.stringify(value));
}
/** @param {Function} isSupported */
codecs(isSupported) {
return this.CODECS.filter((codec) =>
isSupported(`video/mp4; codecs="${codec}"`)
).join();
}
/**
* `CustomElement`. Invoked each time the custom element is appended into a
* document-connected element.
*/
connectedCallback() {
if (this.disconnectTID) {
clearTimeout(this.disconnectTID);
this.disconnectTID = 0;
}
// because video autopause on disconnected from DOM
if (this.video) {
const seek = this.video.seekable;
if (seek.length > 0) {
this.video.currentTime = seek.end(seek.length - 1);
}
this.play();
} else {
this.oninit();
}
this.onconnect();
}
/**
* `CustomElement`. Invoked each time the custom element is disconnected from the
* document's DOM.
*/
disconnectedCallback() {
if (this.background || this.disconnectTID) return;
if (this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED)
return;
this.disconnectTID = setTimeout(() => {
if (this.reconnectTID) {
clearTimeout(this.reconnectTID);
this.reconnectTID = 0;
}
this.disconnectTID = 0;
this.ondisconnect();
}, this.DISCONNECT_TIMEOUT);
}
/**
* Creates child DOM elements. Called automatically once on `connectedCallback`.
*/
oninit() {
this.video = document.createElement("video");
this.video.controls = true;
this.video.playsInline = true;
this.video.preload = "auto";
this.video.muted = true;
this.video.style.display = "block"; // fix bottom margin 4px
this.video.style.width = "100%";
this.video.style.height = "100%";
this.appendChild(this.video);
if (this.background) return;
if ("hidden" in document && this.visibilityCheck) {
document.addEventListener("visibilitychange", () => {
if (document.hidden) {
this.disconnectedCallback();
} else if (this.isConnected) {
this.connectedCallback();
}
});
}
if ("IntersectionObserver" in window && this.visibilityThreshold) {
const observer = new IntersectionObserver(
(entries) => {
entries.forEach((entry) => {
if (!entry.isIntersecting) {
this.disconnectedCallback();
} else if (this.isConnected) {
this.connectedCallback();
}
});
},
{ threshold: this.visibilityThreshold }
);
observer.observe(this);
}
}
/**
* Connect to WebSocket. Called automatically on `connectedCallback`.
* @return {boolean} true if the connection has started.
*/
onconnect() {
if (!this.isConnected || !this.wsURL || this.ws || this.pc) return false;
// CLOSED or CONNECTING => CONNECTING
this.wsState = WebSocket.CONNECTING;
this.connectTS = Date.now();
this.ws = new WebSocket(this.wsURL);
this.ws.binaryType = "arraybuffer";
this.ws.addEventListener("open", (ev) => this.onopen(ev));
this.ws.addEventListener("close", (ev) => this.onclose(ev));
return true;
}
ondisconnect() {
this.wsState = WebSocket.CLOSED;
if (this.ws) {
this.ws.close();
this.ws = null;
}
this.pcState = WebSocket.CLOSED;
if (this.pc) {
this.pc.close();
this.pc = null;
}
}
/**
* @returns {Array.<string>} of modes (mse, webrtc, etc.)
*/
onopen() {
// CONNECTING => OPEN
this.wsState = WebSocket.OPEN;
this.ws.addEventListener("message", (ev) => {
if (typeof ev.data === "string") {
const msg = JSON.parse(ev.data);
for (const mode in this.onmessage) {
this.onmessage[mode](msg);
}
} else {
this.ondata(ev.data);
}
});
this.ondata = null;
this.onmessage = {};
const modes = [];
if (
this.mode.indexOf("mse") >= 0 &&
("MediaSource" in window || "ManagedMediaSource" in window)
) {
// iPhone
modes.push("mse");
this.onmse();
} else if (this.mode.indexOf("mp4") >= 0) {
modes.push("mp4");
this.onmp4();
}
if (this.mode.indexOf("webrtc") >= 0 && "RTCPeerConnection" in window) {
// macOS Desktop app
modes.push("webrtc");
this.onwebrtc();
}
if (this.mode.indexOf("mjpeg") >= 0) {
if (modes.length) {
this.onmessage["mjpeg"] = (msg) => {
if (msg.type !== "error" || msg.value.indexOf(modes[0]) !== 0) return;
this.onmjpeg();
};
} else {
modes.push("mjpeg");
this.onmjpeg();
}
}
return modes;
}
/**
* @return {boolean} true if reconnection has started.
*/
onclose() {
if (this.wsState === WebSocket.CLOSED) return false;
// CONNECTING, OPEN => CONNECTING
this.wsState = WebSocket.CONNECTING;
this.ws = null;
// reconnect no more than once every X seconds
const delay = Math.max(
this.RECONNECT_TIMEOUT - (Date.now() - this.connectTS),
0
);
this.reconnectTID = setTimeout(() => {
this.reconnectTID = 0;
this.onconnect();
}, delay);
return true;
}
onmse() {
/** @type {MediaSource} */
let ms;
if ("ManagedMediaSource" in window) {
const MediaSource = window.ManagedMediaSource;
ms = new MediaSource();
ms.addEventListener(
"sourceopen",
() => {
this.send({
type: "mse",
value: this.codecs(MediaSource.isTypeSupported),
});
},
{ once: true }
);
this.video.disableRemotePlayback = true;
this.video.srcObject = ms;
} else {
ms = new MediaSource();
ms.addEventListener(
"sourceopen",
() => {
URL.revokeObjectURL(this.video.src);
this.send({
type: "mse",
value: this.codecs(MediaSource.isTypeSupported),
});
},
{ once: true }
);
this.video.src = URL.createObjectURL(ms);
this.video.srcObject = null;
}
this.play();
this.mseCodecs = "";
this.onmessage["mse"] = (msg) => {
if (msg.type !== "mse") return;
this.mseCodecs = msg.value;
const sb = ms.addSourceBuffer(msg.value);
sb.mode = "segments"; // segments or sequence
sb.addEventListener("updateend", () => {
if (sb.updating) return;
try {
if (bufLen > 0) {
const data = buf.slice(0, bufLen);
bufLen = 0;
sb.appendBuffer(data);
} else if (sb.buffered && sb.buffered.length) {
const end = sb.buffered.end(sb.buffered.length - 1) - 15;
const start = sb.buffered.start(0);
if (end > start) {
sb.remove(start, end);
ms.setLiveSeekableRange(end, end + 15);
}
// console.debug("VideoRTC.buffered", start, end);
}
} catch (e) {
// console.debug(e);
}
});
const buf = new Uint8Array(2 * 1024 * 1024);
let bufLen = 0;
this.ondata = (data) => {
if (sb.updating || bufLen > 0) {
const b = new Uint8Array(data);
buf.set(b, bufLen);
bufLen += b.byteLength;
// console.debug("VideoRTC.buffer", b.byteLength, bufLen);
} else {
try {
sb.appendBuffer(data);
} catch (e) {
// console.debug(e);
}
}
};
};
}
onwebrtc() {
const pc = new RTCPeerConnection(this.pcConfig);
/** @type {HTMLVideoElement} */
const video2 = document.createElement("video");
video2.addEventListener("loadeddata", (ev) => this.onpcvideo(ev), {
once: true,
});
pc.addEventListener("icecandidate", (ev) => {
const candidate = ev.candidate ? ev.candidate.toJSON().candidate : "";
this.send({ type: "webrtc/candidate", value: candidate });
});
pc.addEventListener("track", (ev) => {
// when stream already init
if (video2.srcObject !== null) return;
// when audio track not exist in Chrome
if (ev.streams.length === 0) return;
// when audio track not exist in Firefox
if (ev.streams[0].id[0] === "{") return;
video2.srcObject = ev.streams[0];
});
pc.addEventListener("connectionstatechange", () => {
if (
pc.connectionState === "failed" ||
pc.connectionState === "disconnected"
) {
pc.close(); // stop next events
this.pcState = WebSocket.CLOSED;
this.pc = null;
this.onconnect();
}
});
this.onmessage["webrtc"] = (msg) => {
switch (msg.type) {
case "webrtc/candidate":
pc.addIceCandidate({
candidate: msg.value,
sdpMid: "0",
}).catch(() => {});
break;
case "webrtc/answer":
pc.setRemoteDescription({
type: "answer",
sdp: msg.value,
}).catch(() => {});
break;
case "error":
if (msg.value.indexOf("webrtc/offer") < 0) return;
pc.close();
}
};
// Safari doesn't support "offerToReceiveVideo"
pc.addTransceiver("video", { direction: "recvonly" });
pc.addTransceiver("audio", { direction: "recvonly" });
pc.createOffer().then((offer) => {
pc.setLocalDescription(offer).then(() => {
this.send({ type: "webrtc/offer", value: offer.sdp });
});
});
this.pcState = WebSocket.CONNECTING;
this.pc = pc;
}
/**
* @param ev {Event}
*/
onpcvideo(ev) {
if (!this.pc) return;
/** @type {HTMLVideoElement} */
const video2 = ev.target;
const state = this.pc.connectionState;
// Firefox doesn't support pc.connectionState
if (state === "connected" || state === "connecting" || !state) {
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
let rtcPriority = 0,
msePriority = 0;
/** @type {MediaStream} */
const ms = video2.srcObject;
if (ms.getVideoTracks().length > 0) rtcPriority += 0x220;
if (ms.getAudioTracks().length > 0) rtcPriority += 0x102;
if (this.mseCodecs.indexOf("hvc1.") >= 0) msePriority += 0x230;
if (this.mseCodecs.indexOf("avc1.") >= 0) msePriority += 0x210;
if (this.mseCodecs.indexOf("mp4a.") >= 0) msePriority += 0x101;
if (rtcPriority >= msePriority) {
this.video.srcObject = ms;
this.play();
this.pcState = WebSocket.OPEN;
this.wsState = WebSocket.CLOSED;
this.ws.close();
this.ws = null;
} else {
this.pcState = WebSocket.CLOSED;
this.pc.close();
this.pc = null;
}
}
video2.srcObject = null;
}
onmjpeg() {
this.ondata = (data) => {
this.video.controls = false;
this.video.poster = `data:image/jpeg;base64,${VideoRTC.btoa(data)}`;
};
this.send({ type: "mjpeg" });
}
onmp4() {
/** @type {HTMLCanvasElement} **/
const canvas = document.createElement("canvas");
/** @type {CanvasRenderingContext2D} */
let context;
/** @type {HTMLVideoElement} */
const video2 = document.createElement("video");
video2.autoplay = true;
video2.playsInline = true;
video2.muted = true;
video2.addEventListener("loadeddata", (_) => {
if (!context) {
canvas.width = video2.videoWidth;
canvas.height = video2.videoHeight;
context = canvas.getContext("2d");
}
context.drawImage(video2, 0, 0, canvas.width, canvas.height);
this.video.controls = false;
this.video.poster = canvas.toDataURL("image/jpeg");
});
this.ondata = (data) => {
video2.src = `data:video/mp4;base64,${VideoRTC.btoa(data)}`;
};
this.send({ type: "mp4", value: this.codecs(this.video.canPlayType) });
}
static btoa(buffer) {
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
let binary = "";
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return window.btoa(binary);
}
}
class VideoStream extends VideoRTC {
/**
* Custom GUI
*/
oninit() {
super.oninit();
const info = this.querySelector(".info");
this.insertBefore(this.video, info);
}
onconnect() {
const result = super.onconnect();
if (result) this.divMode = "loading";
return result;
}
ondisconnect() {
super.ondisconnect();
}
onopen() {
const result = super.onopen();
this.onmessage["stream"] = (_) => {};
return result;
}
onclose() {
return super.onclose();
}
onpcvideo(ev) {
super.onpcvideo(ev);
if (this.pcState !== WebSocket.CLOSED) {
this.divMode = "RTC";
}
}
}
customElements.define("video-stream", VideoStream);
export default function MsePlayer({ src }) {
return <video-stream src={src} mode="mse" />;
}

View File

@ -1,7 +1,7 @@
import useSWR from "swr";
import * as monaco from "monaco-editor";
import { configureMonacoYaml } from "monaco-yaml";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import { useApiHost } from "@/api";
import Heading from "@/components/ui/heading";
import ActivityIndicator from "@/components/ui/activity-indicator";

View File

@ -1,9 +1,113 @@
import LivePlayer from "@/components/player/LivePlayer";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuLabel,
DropdownMenuRadioGroup,
DropdownMenuRadioItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import Heading from "@/components/ui/heading";
import { usePersistence } from "@/hooks/use-persistence";
import { FrigateConfig } from "@/types/frigateConfig";
import { useMemo, useState } from "react";
import useSWR from "swr";
function Live() {
const { data: config } = useSWR<FrigateConfig>("config");
const [camera, setCamera] = useState<string>("Select A Camera");
const cameraConfig = useMemo(() => {
return config?.cameras[camera];
}, [camera, config]);
const restreamEnabled = useMemo(() => {
return (
config &&
cameraConfig &&
Object.keys(config.go2rtc.streams || {}).includes(
cameraConfig.live.stream_name
)
);
}, [config, cameraConfig]);
const defaultLiveMode = useMemo(() => {
if (cameraConfig) {
if (restreamEnabled) {
return cameraConfig.ui.live_mode;
}
return "jsmpeg";
}
return undefined;
}, [cameraConfig, restreamEnabled]);
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence(
`${camera}-source`,
defaultLiveMode
);
return (
<div>
<Heading as="h2">Birdseye</Heading>
<div className=" w-full">
<div className="flex justify-between">
<Heading as="h2">Live</Heading>
<div>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button className="capitalize" variant="outline">
{camera?.replaceAll("_", " ") || "Select A Camera"}
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent>
<DropdownMenuLabel>Select A Camera</DropdownMenuLabel>
<DropdownMenuSeparator />
<DropdownMenuRadioGroup value={camera} onValueChange={setCamera}>
{Object.keys(config?.cameras || {}).map((item) => (
<DropdownMenuRadioItem
className="capitalize"
key={item}
value={item}
>
{item.replaceAll("_", " ")}
</DropdownMenuRadioItem>
))}
</DropdownMenuRadioGroup>
</DropdownMenuContent>
</DropdownMenu>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button className="capitalize" variant="outline">
{viewSource || defaultLiveMode || "Select A Live Mode"}
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent>
<DropdownMenuLabel>Select A Live Mode</DropdownMenuLabel>
<DropdownMenuSeparator />
<DropdownMenuRadioGroup
value={`${viewSource}`}
onValueChange={setViewSource}
>
<DropdownMenuRadioItem value="webrtc">
Webrtc
</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="mse">MSE</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="jsmpeg">
Jsmpeg
</DropdownMenuRadioItem>
<DropdownMenuRadioItem value="debug">
Debug
</DropdownMenuRadioItem>
</DropdownMenuRadioGroup>
</DropdownMenuContent>
</DropdownMenu>
</div>
</div>
{cameraConfig && sourceIsLoaded && (
<LivePlayer
liveMode={`${viewSource ?? defaultLiveMode}`}
cameraConfig={cameraConfig}
/>
)}
</div>
);
}

View File

@ -1,13 +1,196 @@
interface UiConfig {
export interface UiConfig {
timezone: string;
time_format: 'browser' | '12hour' | '24hour';
date_style: 'full' | 'long' | 'medium' | 'short';
time_style: 'full' | 'long' | 'medium' | 'short';
time_format: "browser" | "12hour" | "24hour";
date_style: "full" | "long" | "medium" | "short";
time_style: "full" | "long" | "medium" | "short";
strftime_fmt: string;
live_mode: string;
use_experimental: boolean;
}
export interface CameraConfig {
audio: {
enabled: boolean;
enabled_in_config: boolean;
filters: string[] | null;
listen: string[];
max_not_heard: number;
min_volume: number;
num_threads: number;
};
best_image_timeout: number;
birdseye: {
enabled: boolean;
mode: "objects";
order: number;
};
detect: {
annotation_offset: number;
enabled: boolean;
fps: number;
height: number;
max_disappeared: number;
min_initialized: number;
stationary: {
interval: number;
max_frames: {
default: number | null;
objects: Record<string, unknown>;
};
threshold: number;
};
width: number;
};
enabled: boolean;
ffmpeg: {
global_args: string[];
hwaccel_args: string;
input_args: string;
inputs: {
global_args: string[];
hwaccel_args: string[];
input_args: string;
path: string;
roles: string[];
}[];
output_args: {
detect: string[];
record: string;
rtmp: string;
};
retry_interval: number;
};
ffmpeg_cmds: {
cmd: string;
roles: string[];
}[];
live: {
height: number;
quality: number;
stream_name: string;
};
motion: {
contour_area: number;
delta_alpha: number;
frame_alpha: number;
frame_height: number;
improve_contrast: boolean;
lightning_threshold: number;
mask: string[];
mqtt_off_delay: number;
threshold: number;
};
mqtt: {
bounding_box: boolean;
crop: boolean;
enabled: boolean;
height: number;
quality: number;
required_zones: string[];
timestamp: boolean;
};
name: string;
objects: {
filters: {
[objectName: string]: {
mask: string | null;
max_area: number;
max_ratio: number;
min_area: number;
min_ratio: number;
min_score: number;
threshold: number;
};
};
mask: string;
track: string[];
};
onvif: {
autotracking: {
calibrate_on_startup: boolean;
enabled: boolean;
enabled_in_config: boolean;
movement_weights: string[];
required_zones: string[];
return_preset: string;
timeout: number;
track: string[];
zoom_factor: number;
zooming: string;
};
host: string;
password: string | null;
port: number;
user: string | null;
};
record: {
enabled: boolean;
enabled_in_config: boolean;
events: {
objects: string[] | null;
post_capture: number;
pre_capture: number;
required_zones: string[];
retain: {
default: number;
mode: string;
objects: Record<string, unknown>;
};
};
expire_interval: number;
export: {
timelapse_args: string;
};
preview: {
quality: string;
};
retain: {
days: number;
mode: string;
};
sync_recordings: boolean;
};
rtmp: {
enabled: boolean;
};
snapshots: {
bounding_box: boolean;
clean_copy: boolean;
crop: boolean;
enabled: boolean;
height: number | null;
quality: number;
required_zones: string[];
retain: {
default: number;
mode: string;
objects: Record<string, unknown>;
};
timestamp: boolean;
};
timestamp_style: {
color: {
blue: number;
green: number;
red: number;
};
effect: string | null;
format: string;
position: string;
thickness: number;
};
ui: UiConfig;
webui_url: string | null;
zones: {
[zoneName: string]: {
coordinates: string;
filters: Record<string, unknown>;
inertia: number;
objects: any[];
};
};
}
export interface FrigateConfig {
audio: {
enabled: boolean;
@ -29,191 +212,7 @@ export interface FrigateConfig {
};
cameras: {
[cameraName: string]: {
audio: {
enabled: boolean;
enabled_in_config: boolean;
filters: string[] | null;
listen: string[];
max_not_heard: number;
min_volume: number;
num_threads: number;
};
best_image_timeout: number;
birdseye: {
enabled: boolean;
mode: "objects";
order: number;
};
detect: {
annotation_offset: number;
enabled: boolean;
fps: number;
height: number;
max_disappeared: number;
min_initialized: number;
stationary: {
interval: number;
max_frames: {
default: number | null;
objects: Record<string, unknown>;
};
threshold: number;
};
width: number;
};
enabled: boolean;
ffmpeg: {
global_args: string[];
hwaccel_args: string;
input_args: string;
inputs: {
global_args: string[];
hwaccel_args: string[];
input_args: string;
path: string;
roles: string[];
}[];
output_args: {
detect: string[];
record: string;
rtmp: string;
};
retry_interval: number;
};
ffmpeg_cmds: {
cmd: string;
roles: string[];
}[];
live: {
height: number;
quality: number;
stream_name: string;
};
motion: {
contour_area: number;
delta_alpha: number;
frame_alpha: number;
frame_height: number;
improve_contrast: boolean;
lightning_threshold: number;
mask: string[];
mqtt_off_delay: number;
threshold: number;
};
mqtt: {
bounding_box: boolean;
crop: boolean;
enabled: boolean;
height: number;
quality: number;
required_zones: string[];
timestamp: boolean;
};
name: string;
objects: {
filters: {
[objectName: string]: {
mask: string | null;
max_area: number;
max_ratio: number;
min_area: number;
min_ratio: number;
min_score: number;
threshold: number;
};
};
mask: string;
track: string[];
};
onvif: {
autotracking: {
calibrate_on_startup: boolean,
enabled: boolean;
enabled_in_config: boolean;
movement_weights: string[];
required_zones: string[];
return_preset: string;
timeout: number;
track: string[];
zoom_factor: number;
zooming: string;
};
host: string;
password: string | null;
port: number;
user: string | null;
};
record: {
enabled: boolean;
enabled_in_config: boolean;
events: {
objects: string[] | null;
post_capture: number;
pre_capture: number;
required_zones: string[];
retain: {
default: number;
mode: string;
objects: Record<string, unknown>;
};
};
expire_interval: number;
export: {
timelapse_args: string;
};
preview: {
quality: string;
};
retain: {
days: number;
mode: string;
};
sync_recordings: boolean;
};
rtmp: {
enabled: boolean;
};
snapshots: {
bounding_box: boolean;
clean_copy: boolean;
crop: boolean;
enabled: boolean;
height: number | null;
quality: number;
required_zones: string[];
retain: {
default: number;
mode: string;
objects: Record<string, unknown>;
};
timestamp: boolean;
};
timestamp_style: {
color: {
blue: number;
green: number;
red: number;
};
effect: string | null;
format: string;
position: string;
thickness: number;
};
ui: {
dashboard: boolean;
order: number;
};
webui_url: string | null;
zones: {
[zoneName: string]: {
coordinates: string;
filters: Record<string, unknown>;
inertia: number;
objects: any[];
};
};
};
[cameraName: string]: CameraConfig;
};
database: {
@ -400,5 +399,4 @@ export interface FrigateConfig {
};
ui: UiConfig;
}
}

View File

@ -1,6 +1,6 @@
import strftime from 'strftime';
import { fromUnixTime, intervalToDuration, formatDuration } from 'date-fns';
import { FrigateConfig, UiConfig } from "@/types/frigateConfig";
import { UiConfig } from "@/types/frigateConfig";
export const longToDate = (long: number): Date => new Date(long * 1000);
export const epochToLong = (date: number): number => date / 1000;
export const dateToLong = (date: Date): number => epochToLong(date.getTime());

View File

@ -12,24 +12,24 @@ export default defineConfig({
server: {
proxy: {
'/api': {
target: 'http://192.168.50.106:5000',
target: 'http://localhost:5000',
ws: true,
},
'/vod': {
target: 'http://192.168.50.106:5000'
target: 'http://localhost:5000'
},
'/clips': {
target: 'http://192.168.50.106:5000'
target: 'http://localhost:5000'
},
'/exports': {
target: 'http://192.168.50.106:5000'
target: 'http://localhost:5000'
},
'/ws': {
target: 'ws://192.168.50.106:5000',
target: 'ws://localhost:5000',
ws: true,
},
'/live': {
target: 'ws://192.168.50.106:5000',
target: 'ws://localhost:5000',
changeOrigin: true,
ws: true,
},