From e9787c5a881759bfb9538bf8131f0ff6c69752a3 Mon Sep 17 00:00:00 2001 From: Nicolas Mowen Date: Fri, 11 Apr 2025 08:21:01 -0600 Subject: [PATCH] Small Tweaks (#17652) * Ensure that hailo uses correct labelmap * Make whole button clickable * Add weblate to readme * Update docs for HEIC * Fix explore chip icon logic * Sort regardless of case * Don't allow selection * Fix image uploading --- README.md | 12 ++++++ docs/docs/configuration/face_recognition.md | 4 ++ docs/docs/configuration/object_detectors.md | 2 + web/src/components/card/SearchThumbnail.tsx | 37 +++++++++++-------- web/src/components/input/ImageEntry.tsx | 6 ++- .../overlay/dialog/SearchFilterDialog.tsx | 9 ++++- web/src/pages/FaceLibrary.tsx | 6 +-- web/src/views/live/LiveCameraView.tsx | 14 +++---- 8 files changed, 62 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 8b40304d1..35e8cb7e9 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,10 @@ # Frigate - NVR With Realtime Object Detection for IP Cameras + +Translation status + + \[English\] | [简体中文](https://github.com/blakeblackshear/frigate/blob/dev/README_CN.md) A complete and local NVR designed for [Home Assistant](https://www.home-assistant.io) with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras. @@ -32,21 +36,25 @@ If you would like to make a donation to support development, please use [Github ## Screenshots ### Live dashboard +
Live dashboard
### Streamlined review workflow +
Streamlined review workflow
### Multi-camera scrubbing +
Multi-camera scrubbing
### Built-in mask and zone editor +
Multi-camera scrubbing
@@ -54,3 +62,7 @@ If you would like to make a donation to support development, please use [Github ## Translations We use [Weblate](https://hosted.weblate.org/projects/frigate-nvr/) to support language translations. Contributions are always welcome. + + +Translation status + diff --git a/docs/docs/configuration/face_recognition.md b/docs/docs/configuration/face_recognition.md index e652bb4a0..4ca57f9e1 100644 --- a/docs/docs/configuration/face_recognition.md +++ b/docs/docs/configuration/face_recognition.md @@ -136,3 +136,7 @@ Face recognition does not run on the recording stream, this would be suboptimal 1. The latency of accessing the recordings means the notifications would not include the names of recognized people because recognition would not complete until after. 2. The embedding models used run on a set image size, so larger images will be scaled down to match this anyway. 3. Motion clarity is much more important than extra pixels, over-compression and motion blur are much more detrimental to results than resolution. + +### I get an unknown error when taking a photo directly with my iPhone + +By default iOS devices will use HEIC (High Efficiency Image Container) for images, but this format is not supported for uploads. Choosing `large` as the format instead of `original` will use JPG which will work correctly. diff --git a/docs/docs/configuration/object_detectors.md b/docs/docs/configuration/object_detectors.md index 2f3ebc397..31f0df1da 100644 --- a/docs/docs/configuration/object_detectors.md +++ b/docs/docs/configuration/object_detectors.md @@ -163,6 +163,7 @@ model: input_pixel_format: rgb input_dtype: int model_type: yolo-generic + labelmap_path: /labelmap/coco-80.txt # The detector automatically selects the default model based on your hardware: # - For Hailo-8 hardware: YOLOv6n (default: yolov6n.hef) @@ -219,6 +220,7 @@ model: input_pixel_format: rgb input_dtype: int model_type: yolo-generic + labelmap_path: /labelmap/coco-80.txt # Optional: Specify a local model path. # path: /config/model_cache/hailo/custom_model.hef # diff --git a/web/src/components/card/SearchThumbnail.tsx b/web/src/components/card/SearchThumbnail.tsx index f53756be0..a2c5cd37f 100644 --- a/web/src/components/card/SearchThumbnail.tsx +++ b/web/src/components/card/SearchThumbnail.tsx @@ -44,23 +44,31 @@ export default function SearchThumbnail({ [searchResult, onClick], ); - const objectLabel = useMemo(() => { - if ( - !config || - !searchResult.sub_label || - !config.model.attributes_map[searchResult.label] - ) { - return searchResult.label; - } - - return `${searchResult.label}-verified`; - }, [config, searchResult]); - const hasRecognizedPlate = useMemo( () => (searchResult.data.recognized_license_plate?.length || 0) > 0, [searchResult], ); + const objectLabel = useMemo(() => { + if (!config) { + return searchResult.label; + } + + if (!searchResult.sub_label) { + return `${searchResult.label}${hasRecognizedPlate ? "-plate" : ""}`; + } + + if ( + config.model.attributes_map[searchResult.label]?.includes( + searchResult.sub_label, + ) + ) { + return searchResult.sub_label; + } + + return `${searchResult.label}-verified`; + }, [config, hasRecognizedPlate, searchResult]); + return (
onClick(searchResult, false, true)} > - {getIconForLabel( - `${objectLabel}${hasRecognizedPlate ? "-plate" : ""}`, - "size-3 text-white", - )} + {getIconForLabel(objectLabel, "size-3 text-white")} {Math.round( (searchResult.data.score ?? searchResult.data.top_score ?? diff --git a/web/src/components/input/ImageEntry.tsx b/web/src/components/input/ImageEntry.tsx index 29a388f7c..47c8714ef 100644 --- a/web/src/components/input/ImageEntry.tsx +++ b/web/src/components/input/ImageEntry.tsx @@ -32,7 +32,11 @@ export default function ImageEntry({ const [preview, setPreview] = useState(null); const formSchema = z.object({ - file: z.instanceof(File, { message: "Please select an image file." }), + file: z + .instanceof(File, { message: t("imageEntry.validation.selectImage") }) + .refine((file) => + accept["image/*"].includes(`.${file.type.split("/")[1]}`), + ), }); const form = useForm>({ diff --git a/web/src/components/overlay/dialog/SearchFilterDialog.tsx b/web/src/components/overlay/dialog/SearchFilterDialog.tsx index 4d39acdae..bf0069911 100644 --- a/web/src/components/overlay/dialog/SearchFilterDialog.tsx +++ b/web/src/components/overlay/dialog/SearchFilterDialog.tsx @@ -462,6 +462,13 @@ export function SubFilterContent({ setSubLabels, }: SubFilterContentProps) { const { t } = useTranslation(["components/filter"]); + const sortedSubLabels = useMemo( + () => + [...allSubLabels].sort((a, b) => + a.toLowerCase().localeCompare(b.toLowerCase()), + ), + [allSubLabels], + ); return (
@@ -482,7 +489,7 @@ export function SubFilterContent({ />
- {allSubLabels.map((item) => ( + {sortedSubLabels.map((item) => (
-
+
Person {event?.sub_label ? `: ${event.sub_label} (${Math.round((event.data.sub_label_score || 0) * 100)}%)` @@ -848,7 +848,7 @@ function FaceAttempt({ : "outline-transparent duration-500", )} > -
+
-
+
{data.name}
diff --git a/web/src/views/live/LiveCameraView.tsx b/web/src/views/live/LiveCameraView.tsx index 34d61d684..d6a15d4e8 100644 --- a/web/src/views/live/LiveCameraView.tsx +++ b/web/src/views/live/LiveCameraView.tsx @@ -1479,17 +1479,17 @@ function FrigateCameraFeatures({ })}

-
+
+ navigate(`/settings?page=debug&camera=${camera.name}`) + } + >
{t("streaming.debugView", { ns: "components/dialog", })} - - navigate(`/settings?page=debug&camera=${camera.name}`) - } - className="ml-2 inline-flex size-5 cursor-pointer" - /> +