From 207d1d280698a08d92a916fd2167d9c0170de246 Mon Sep 17 00:00:00 2001 From: Nicolas Mowen Date: Mon, 31 Mar 2025 15:49:56 -0600 Subject: [PATCH] Face UI cleanup (#17472) * Add note * Sort by event id * Fix reprocess causing shift * Move event group to separate comp * Handle selecting events * implement event selection * Implement selected handler * handle right click * Toggle ctrl + a * Stop propogation * Fix --- docs/docs/configuration/face_recognition.md | 4 + frigate/data_processing/real_time/face.py | 52 ++-- web/src/hooks/use-contextmenu.ts | 1 + web/src/pages/FaceLibrary.tsx | 319 +++++++++++++------- 4 files changed, 255 insertions(+), 121 deletions(-) diff --git a/docs/docs/configuration/face_recognition.md b/docs/docs/configuration/face_recognition.md index b894133fb..fe6ed1f99 100644 --- a/docs/docs/configuration/face_recognition.md +++ b/docs/docs/configuration/face_recognition.md @@ -108,6 +108,10 @@ Once straight-on images are performing well, start choosing slightly off-angle i It is important to methodically add photos to the library, bulk importing photos (especially from a general photo library) will lead to over-fitting in that particular scenario and hurt recognition performance. +### Why can't I bulk reprocess faces? + +Face embedding models work by breaking apart faces into different features. This means that when reprocessing an image, only images from a similar angle will have its score affected. + ### Why do unknown people score similarly to known people? This can happen for a few different reasons, but this is usually an indicator that the training set needs to be improved. This is often related to over-fitting: diff --git a/frigate/data_processing/real_time/face.py b/frigate/data_processing/real_time/face.py index 5b20a6303..dd18aeffc 100644 --- a/frigate/data_processing/real_time/face.py +++ b/frigate/data_processing/real_time/face.py @@ -272,22 +272,9 @@ class FaceRealTimeProcessor(RealTimeProcessorApi): f"Detected best face for person as: {sub_label} with probability {score}" ) - if self.config.face_recognition.save_attempts: - # write face to library - folder = os.path.join(FACE_DIR, "train") - file = os.path.join(folder, f"{id}-{sub_label}-{score}-0.webp") - os.makedirs(folder, exist_ok=True) - cv2.imwrite(file, face_frame) - - files = sorted( - filter(lambda f: (f.endswith(".webp")), os.listdir(folder)), - key=lambda f: os.path.getctime(os.path.join(folder, f)), - reverse=True, - ) - - # delete oldest face image if maximum is reached - if len(files) > self.config.face_recognition.save_attempts: - os.unlink(os.path.join(folder, files[-1])) + self.write_face_attempt( + face_frame, id, datetime.datetime.now().timestamp(), sub_label, score + ) if id not in self.person_face_history: self.person_face_history[id] = [] @@ -383,9 +370,9 @@ class FaceRealTimeProcessor(RealTimeProcessorApi): } elif topic == EmbeddingsRequestEnum.reprocess_face.value: current_file: str = request_data["image_file"] - id = current_file[0 : current_file.index("-", current_file.index("-") + 1)] - face_score = current_file[current_file.rfind("-") : current_file.rfind(".")] + (id_time, id_rand, timestamp, _, _) = current_file.split("-") img = None + id = f"{id_time}-{id_rand}" if current_file: img = cv2.imread(current_file) @@ -411,7 +398,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi): folder = os.path.join(FACE_DIR, "train") os.makedirs(folder, exist_ok=True) new_file = os.path.join( - folder, f"{id}-{sub_label}-{score}-{face_score}.webp" + folder, f"{id}-{timestamp}-{sub_label}-{score}.webp" ) shutil.move(current_file, new_file) @@ -461,3 +448,30 @@ class FaceRealTimeProcessor(RealTimeProcessorApi): weighted_average = weighted_scores[best_name] / total_weights[best_name] return best_name, weighted_average + + def write_face_attempt( + self, + frame: np.ndarray, + event_id: str, + timestamp: float, + sub_label: str, + score: float, + ) -> None: + if self.config.face_recognition.save_attempts: + # write face to library + folder = os.path.join(FACE_DIR, "train") + file = os.path.join( + folder, f"{event_id}-{timestamp}-{sub_label}-{score}.webp" + ) + os.makedirs(folder, exist_ok=True) + cv2.imwrite(file, frame) + + files = sorted( + filter(lambda f: (f.endswith(".webp")), os.listdir(folder)), + key=lambda f: os.path.getctime(os.path.join(folder, f)), + reverse=True, + ) + + # delete oldest face image if maximum is reached + if len(files) > self.config.face_recognition.save_attempts: + os.unlink(os.path.join(folder, files[-1])) diff --git a/web/src/hooks/use-contextmenu.ts b/web/src/hooks/use-contextmenu.ts index f121846ae..21c03e353 100644 --- a/web/src/hooks/use-contextmenu.ts +++ b/web/src/hooks/use-contextmenu.ts @@ -33,6 +33,7 @@ export default function useContextMenu( }; } else { const context = (e: MouseEvent) => { + e.stopPropagation(); e.preventDefault(); callback(); }; diff --git a/web/src/pages/FaceLibrary.tsx b/web/src/pages/FaceLibrary.tsx index df57f729a..0b18f3d39 100644 --- a/web/src/pages/FaceLibrary.tsx +++ b/web/src/pages/FaceLibrary.tsx @@ -142,29 +142,33 @@ export default function FaceLibrary() { const [selectedFaces, setSelectedFaces] = useState([]); - const onClickFace = useCallback( - (imageId: string, ctrl: boolean) => { + const onClickFaces = useCallback( + (images: string[], ctrl: boolean) => { if (selectedFaces.length == 0 && !ctrl) { return; } - const index = selectedFaces.indexOf(imageId); + let newSelectedFaces = [...selectedFaces]; - if (index != -1) { - if (selectedFaces.length == 1) { - setSelectedFaces([]); + images.forEach((imageId) => { + const index = newSelectedFaces.indexOf(imageId); + + if (index != -1) { + if (selectedFaces.length == 1) { + newSelectedFaces = []; + } else { + const copy = [ + ...newSelectedFaces.slice(0, index), + ...newSelectedFaces.slice(index + 1), + ]; + newSelectedFaces = copy; + } } else { - const copy = [ - ...selectedFaces.slice(0, index), - ...selectedFaces.slice(index + 1), - ]; - setSelectedFaces(copy); + newSelectedFaces.push(imageId); } - } else { - const copy = [...selectedFaces]; - copy.push(imageId); - setSelectedFaces(copy); - } + }); + + setSelectedFaces(newSelectedFaces); }, [selectedFaces, setSelectedFaces], ); @@ -212,7 +216,11 @@ export default function FaceLibrary() { switch (key) { case "a": if (modifiers.ctrl) { - setSelectedFaces([...trainImages]); + if (selectedFaces.length) { + setSelectedFaces([]); + } else { + setSelectedFaces([...trainImages]); + } } break; case "Escape": @@ -253,6 +261,16 @@ export default function FaceLibrary() { /> {selectedFaces?.length > 0 ? (
+
+
{`${selectedFaces.length} selected`}
+
{"|"}
+
setSelectedFaces([])} + > + {t("button.unselect", { ns: "common" })} +
+