2024-10-07 22:30:45 +02:00
|
|
|
"""SQLite-vec embeddings database."""
|
2024-06-21 23:30:19 +02:00
|
|
|
|
|
|
|
import base64
|
|
|
|
import logging
|
2024-10-11 14:59:29 +02:00
|
|
|
import os
|
2024-10-23 00:05:48 +02:00
|
|
|
import random
|
|
|
|
import string
|
2024-06-21 23:30:19 +02:00
|
|
|
import time
|
|
|
|
|
2024-10-13 19:33:27 +02:00
|
|
|
from numpy import ndarray
|
2024-06-21 23:30:19 +02:00
|
|
|
from playhouse.shortcuts import model_to_dict
|
|
|
|
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.comms.inter_process import InterProcessRequestor
|
2024-10-23 17:03:18 +02:00
|
|
|
from frigate.config import FrigateConfig
|
2024-10-11 14:59:29 +02:00
|
|
|
from frigate.const import (
|
|
|
|
CONFIG_DIR,
|
2024-10-23 00:05:48 +02:00
|
|
|
FACE_DIR,
|
2024-10-11 14:59:29 +02:00
|
|
|
UPDATE_EMBEDDINGS_REINDEX_PROGRESS,
|
|
|
|
UPDATE_MODEL_STATE,
|
|
|
|
)
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
2024-06-21 23:30:19 +02:00
|
|
|
from frigate.models import Event
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.types import ModelStatusTypesEnum
|
2024-10-10 17:42:24 +02:00
|
|
|
from frigate.util.builtin import serialize
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
from .functions.onnx import GenericONNXEmbedding, ModelTypeEnum
|
2024-06-21 23:30:19 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def get_metadata(event: Event) -> dict:
|
|
|
|
"""Extract valid event metadata."""
|
|
|
|
event_dict = model_to_dict(event)
|
|
|
|
return (
|
|
|
|
{
|
|
|
|
k: v
|
|
|
|
for k, v in event_dict.items()
|
2024-09-26 22:30:56 +02:00
|
|
|
if k not in ["thumbnail"]
|
2024-06-21 23:30:19 +02:00
|
|
|
and v is not None
|
|
|
|
and isinstance(v, (str, int, float, bool))
|
|
|
|
}
|
|
|
|
| {
|
|
|
|
k: v
|
|
|
|
for k, v in event_dict["data"].items()
|
|
|
|
if k not in ["description"]
|
|
|
|
and v is not None
|
|
|
|
and isinstance(v, (str, int, float, bool))
|
|
|
|
}
|
|
|
|
| {
|
|
|
|
# Metadata search doesn't support $contains
|
|
|
|
# and an event can have multiple zones, so
|
|
|
|
# we need to create a key for each zone
|
|
|
|
f"{k}_{x}": True
|
|
|
|
for k, v in event_dict.items()
|
|
|
|
if isinstance(v, list) and len(v) > 0
|
|
|
|
for x in v
|
|
|
|
if isinstance(x, str)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Embeddings:
|
2024-10-07 22:30:45 +02:00
|
|
|
"""SQLite-vec embeddings database."""
|
|
|
|
|
2024-10-23 17:03:18 +02:00
|
|
|
def __init__(self, config: FrigateConfig, db: SqliteVecQueueDatabase) -> None:
|
2024-10-10 15:09:12 +02:00
|
|
|
self.config = config
|
2024-10-07 22:30:45 +02:00
|
|
|
self.db = db
|
|
|
|
self.requestor = InterProcessRequestor()
|
|
|
|
|
|
|
|
# Create tables if they don't exist
|
2024-10-23 00:05:48 +02:00
|
|
|
self.db.create_embeddings_tables(self.config.face_recognition.enabled)
|
2024-10-07 22:30:45 +02:00
|
|
|
|
|
|
|
models = [
|
2024-10-09 23:31:54 +02:00
|
|
|
"jinaai/jina-clip-v1-text_model_fp16.onnx",
|
|
|
|
"jinaai/jina-clip-v1-tokenizer",
|
2024-10-11 00:46:21 +02:00
|
|
|
"jinaai/jina-clip-v1-vision_model_fp16.onnx"
|
2024-10-23 17:03:18 +02:00
|
|
|
if config.semantic_search.model_size == "large"
|
2024-10-11 00:46:21 +02:00
|
|
|
else "jinaai/jina-clip-v1-vision_model_quantized.onnx",
|
2024-10-09 23:31:54 +02:00
|
|
|
"jinaai/jina-clip-v1-preprocessor_config.json",
|
2024-10-26 19:07:45 +02:00
|
|
|
"facenet-facenet.onnx",
|
|
|
|
"paddleocr-onnx-detection.onnx",
|
|
|
|
"paddleocr-onnx-classification.onnx",
|
|
|
|
"paddleocr-onnx-recognition.onnx",
|
2024-10-07 22:30:45 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for model in models:
|
|
|
|
self.requestor.send_data(
|
|
|
|
UPDATE_MODEL_STATE,
|
|
|
|
{
|
|
|
|
"model": model,
|
|
|
|
"state": ModelStatusTypesEnum.not_downloaded,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2024-10-09 23:31:54 +02:00
|
|
|
self.text_embedding = GenericONNXEmbedding(
|
|
|
|
model_name="jinaai/jina-clip-v1",
|
|
|
|
model_file="text_model_fp16.onnx",
|
|
|
|
tokenizer_file="tokenizer",
|
|
|
|
download_urls={
|
|
|
|
"text_model_fp16.onnx": "https://huggingface.co/jinaai/jina-clip-v1/resolve/main/onnx/text_model_fp16.onnx",
|
|
|
|
},
|
2024-10-23 17:03:18 +02:00
|
|
|
model_size=config.semantic_search.model_size,
|
2024-10-22 00:19:34 +02:00
|
|
|
model_type=ModelTypeEnum.text,
|
2024-10-10 23:37:43 +02:00
|
|
|
requestor=self.requestor,
|
2024-10-10 15:09:12 +02:00
|
|
|
device="CPU",
|
2024-10-07 22:30:45 +02:00
|
|
|
)
|
2024-10-09 23:31:54 +02:00
|
|
|
|
2024-10-11 00:46:21 +02:00
|
|
|
model_file = (
|
|
|
|
"vision_model_fp16.onnx"
|
2024-10-23 17:03:18 +02:00
|
|
|
if self.config.semantic_search.model_size == "large"
|
2024-10-11 00:46:21 +02:00
|
|
|
else "vision_model_quantized.onnx"
|
|
|
|
)
|
|
|
|
|
|
|
|
download_urls = {
|
|
|
|
model_file: f"https://huggingface.co/jinaai/jina-clip-v1/resolve/main/onnx/{model_file}",
|
|
|
|
"preprocessor_config.json": "https://huggingface.co/jinaai/jina-clip-v1/resolve/main/preprocessor_config.json",
|
|
|
|
}
|
|
|
|
|
2024-10-09 23:31:54 +02:00
|
|
|
self.vision_embedding = GenericONNXEmbedding(
|
|
|
|
model_name="jinaai/jina-clip-v1",
|
2024-10-11 00:46:21 +02:00
|
|
|
model_file=model_file,
|
|
|
|
download_urls=download_urls,
|
2024-10-23 17:26:03 +02:00
|
|
|
model_size=config.semantic_search.model_size,
|
2024-10-22 00:19:34 +02:00
|
|
|
model_type=ModelTypeEnum.vision,
|
2024-10-10 23:37:43 +02:00
|
|
|
requestor=self.requestor,
|
2024-10-23 17:26:03 +02:00
|
|
|
device="GPU" if config.semantic_search.model_size == "large" else "CPU",
|
2024-10-07 22:30:45 +02:00
|
|
|
)
|
|
|
|
|
2024-10-23 00:05:48 +02:00
|
|
|
self.face_embedding = None
|
|
|
|
|
|
|
|
if self.config.face_recognition.enabled:
|
|
|
|
self.face_embedding = GenericONNXEmbedding(
|
|
|
|
model_name="facenet",
|
|
|
|
model_file="facenet.onnx",
|
|
|
|
download_urls={
|
2024-10-29 00:35:20 +01:00
|
|
|
"facenet.onnx": "https://github.com/NickM-27/facenet-onnx/releases/download/v1.0/facenet.onnx",
|
2024-10-23 21:50:58 +02:00
|
|
|
"facedet.onnx": "https://github.com/opencv/opencv_zoo/raw/refs/heads/main/models/face_detection_yunet/face_detection_yunet_2023mar_int8.onnx",
|
2024-10-23 00:05:48 +02:00
|
|
|
},
|
|
|
|
model_size="large",
|
|
|
|
model_type=ModelTypeEnum.face,
|
|
|
|
requestor=self.requestor,
|
|
|
|
device="GPU",
|
|
|
|
)
|
|
|
|
|
2024-10-26 19:07:45 +02:00
|
|
|
self.lpr_detection_model = None
|
|
|
|
self.lpr_classification_model = None
|
|
|
|
self.lpr_recognition_model = None
|
|
|
|
|
|
|
|
if self.config.lpr.enabled:
|
|
|
|
self.lpr_detection_model = GenericONNXEmbedding(
|
|
|
|
model_name="paddleocr-onnx",
|
|
|
|
model_file="detection.onnx",
|
|
|
|
download_urls={
|
|
|
|
"detection.onnx": "https://github.com/hawkeye217/paddleocr-onnx/raw/refs/heads/master/models/detection.onnx"
|
|
|
|
},
|
|
|
|
model_size="large",
|
2024-11-02 00:43:21 +01:00
|
|
|
model_type=ModelTypeEnum.lpr_detect,
|
2024-10-26 19:07:45 +02:00
|
|
|
requestor=self.requestor,
|
|
|
|
device="CPU",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.lpr_classification_model = GenericONNXEmbedding(
|
|
|
|
model_name="paddleocr-onnx",
|
|
|
|
model_file="classification.onnx",
|
|
|
|
download_urls={
|
|
|
|
"classification.onnx": "https://github.com/hawkeye217/paddleocr-onnx/raw/refs/heads/master/models/classification.onnx"
|
|
|
|
},
|
|
|
|
model_size="large",
|
2024-11-02 00:43:21 +01:00
|
|
|
model_type=ModelTypeEnum.lpr_classify,
|
2024-10-26 19:07:45 +02:00
|
|
|
requestor=self.requestor,
|
|
|
|
device="CPU",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.lpr_recognition_model = GenericONNXEmbedding(
|
|
|
|
model_name="paddleocr-onnx",
|
|
|
|
model_file="recognition.onnx",
|
|
|
|
download_urls={
|
|
|
|
"recognition.onnx": "https://github.com/hawkeye217/paddleocr-onnx/raw/refs/heads/master/models/recognition.onnx"
|
|
|
|
},
|
|
|
|
model_size="large",
|
2024-11-02 00:43:21 +01:00
|
|
|
model_type=ModelTypeEnum.lpr_recognize,
|
2024-10-26 19:07:45 +02:00
|
|
|
requestor=self.requestor,
|
|
|
|
device="CPU",
|
|
|
|
)
|
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
def embed_thumbnail(
|
|
|
|
self, event_id: str, thumbnail: bytes, upsert: bool = True
|
|
|
|
) -> ndarray:
|
|
|
|
"""Embed thumbnail and optionally insert into DB.
|
|
|
|
|
|
|
|
@param: event_id in Events DB
|
|
|
|
@param: thumbnail bytes in jpg format
|
|
|
|
@param: upsert If embedding should be upserted into vec DB
|
|
|
|
"""
|
2024-10-07 22:30:45 +02:00
|
|
|
# Convert thumbnail bytes to PIL Image
|
2024-10-22 00:19:34 +02:00
|
|
|
embedding = self.vision_embedding([thumbnail])[0]
|
|
|
|
|
|
|
|
if upsert:
|
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_thumbnails(id, thumbnail_embedding)
|
|
|
|
VALUES(?, ?)
|
|
|
|
""",
|
|
|
|
(event_id, serialize(embedding)),
|
|
|
|
)
|
2024-10-07 22:30:45 +02:00
|
|
|
|
|
|
|
return embedding
|
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
def batch_embed_thumbnail(
|
|
|
|
self, event_thumbs: dict[str, bytes], upsert: bool = True
|
|
|
|
) -> list[ndarray]:
|
|
|
|
"""Embed thumbnails and optionally insert into DB.
|
|
|
|
|
|
|
|
@param: event_thumbs Map of Event IDs in DB to thumbnail bytes in jpg format
|
|
|
|
@param: upsert If embedding should be upserted into vec DB
|
|
|
|
"""
|
2024-10-13 19:33:27 +02:00
|
|
|
ids = list(event_thumbs.keys())
|
2024-10-22 00:19:34 +02:00
|
|
|
embeddings = self.vision_embedding(list(event_thumbs.values()))
|
2024-10-13 23:25:13 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
if upsert:
|
|
|
|
items = []
|
2024-10-13 23:25:13 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
for i in range(len(ids)):
|
|
|
|
items.append(ids[i])
|
|
|
|
items.append(serialize(embeddings[i]))
|
|
|
|
|
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_thumbnails(id, thumbnail_embedding)
|
|
|
|
VALUES {}
|
|
|
|
""".format(", ".join(["(?, ?)"] * len(ids))),
|
|
|
|
items,
|
|
|
|
)
|
2024-10-13 19:33:27 +02:00
|
|
|
|
|
|
|
return embeddings
|
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
def embed_description(
|
|
|
|
self, event_id: str, description: str, upsert: bool = True
|
|
|
|
) -> ndarray:
|
2024-10-09 23:31:54 +02:00
|
|
|
embedding = self.text_embedding([description])[0]
|
2024-10-22 00:19:34 +02:00
|
|
|
|
|
|
|
if upsert:
|
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_descriptions(id, description_embedding)
|
|
|
|
VALUES(?, ?)
|
|
|
|
""",
|
|
|
|
(event_id, serialize(embedding)),
|
|
|
|
)
|
2024-10-07 22:30:45 +02:00
|
|
|
|
|
|
|
return embedding
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
def batch_embed_description(
|
|
|
|
self, event_descriptions: dict[str, str], upsert: bool = True
|
|
|
|
) -> ndarray:
|
2024-10-15 15:40:45 +02:00
|
|
|
# upsert embeddings one by one to avoid token limit
|
|
|
|
embeddings = []
|
|
|
|
|
|
|
|
for desc in event_descriptions.values():
|
2024-10-16 02:25:59 +02:00
|
|
|
embeddings.append(self.text_embedding([desc])[0])
|
2024-10-15 15:17:54 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
if upsert:
|
|
|
|
ids = list(event_descriptions.keys())
|
|
|
|
items = []
|
2024-10-13 23:25:13 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
for i in range(len(ids)):
|
|
|
|
items.append(ids[i])
|
|
|
|
items.append(serialize(embeddings[i]))
|
2024-10-13 23:25:13 +02:00
|
|
|
|
2024-10-22 00:19:34 +02:00
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_descriptions(id, description_embedding)
|
|
|
|
VALUES {}
|
|
|
|
""".format(", ".join(["(?, ?)"] * len(ids))),
|
|
|
|
items,
|
|
|
|
)
|
2024-10-13 19:33:27 +02:00
|
|
|
|
|
|
|
return embeddings
|
|
|
|
|
2024-10-23 00:05:48 +02:00
|
|
|
def embed_face(self, label: str, thumbnail: bytes, upsert: bool = False) -> ndarray:
|
|
|
|
embedding = self.face_embedding(thumbnail)[0]
|
|
|
|
|
|
|
|
if upsert:
|
|
|
|
rand_id = "".join(
|
|
|
|
random.choices(string.ascii_lowercase + string.digits, k=6)
|
|
|
|
)
|
|
|
|
id = f"{label}-{rand_id}"
|
|
|
|
|
|
|
|
# write face to library
|
|
|
|
folder = os.path.join(FACE_DIR, label)
|
|
|
|
file = os.path.join(folder, f"{id}.webp")
|
|
|
|
os.makedirs(folder, exist_ok=True)
|
|
|
|
|
|
|
|
# save face image
|
|
|
|
with open(file, "wb") as output:
|
|
|
|
output.write(thumbnail)
|
|
|
|
|
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_faces(id, face_embedding)
|
|
|
|
VALUES(?, ?)
|
|
|
|
""",
|
|
|
|
(id, serialize(embedding)),
|
|
|
|
)
|
|
|
|
|
|
|
|
return embedding
|
|
|
|
|
2024-06-21 23:30:19 +02:00
|
|
|
def reindex(self) -> None:
|
2024-10-10 21:28:43 +02:00
|
|
|
logger.info("Indexing tracked object embeddings...")
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-10 23:37:43 +02:00
|
|
|
self.db.drop_embeddings_tables()
|
|
|
|
logger.debug("Dropped embeddings tables.")
|
2024-10-23 00:05:48 +02:00
|
|
|
self.db.create_embeddings_tables(self.config.face_recognition.enabled)
|
2024-10-10 23:37:43 +02:00
|
|
|
logger.debug("Created embeddings tables.")
|
2024-10-09 23:31:54 +02:00
|
|
|
|
2024-10-11 14:59:29 +02:00
|
|
|
# Delete the saved stats file
|
|
|
|
if os.path.exists(os.path.join(CONFIG_DIR, ".search_stats.json")):
|
|
|
|
os.remove(os.path.join(CONFIG_DIR, ".search_stats.json"))
|
|
|
|
|
2024-06-21 23:30:19 +02:00
|
|
|
st = time.time()
|
2024-10-10 21:28:43 +02:00
|
|
|
|
|
|
|
# Get total count of events to process
|
|
|
|
total_events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
|
|
|
)
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
|
2024-10-13 19:33:27 +02:00
|
|
|
batch_size = 32
|
2024-06-23 21:27:21 +02:00
|
|
|
current_page = 1
|
2024-10-10 21:28:43 +02:00
|
|
|
|
2024-10-13 23:25:13 +02:00
|
|
|
totals = {
|
|
|
|
"thumbnails": 0,
|
|
|
|
"descriptions": 0,
|
|
|
|
"processed_objects": total_events - 1 if total_events < batch_size else 0,
|
|
|
|
"total_objects": total_events,
|
|
|
|
"time_remaining": 0 if total_events < batch_size else -1,
|
|
|
|
"status": "indexing",
|
|
|
|
}
|
|
|
|
|
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|
|
|
|
|
2024-06-23 21:27:21 +02:00
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
2024-06-21 23:30:19 +02:00
|
|
|
)
|
2024-06-23 21:27:21 +02:00
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.paginate(current_page, batch_size)
|
|
|
|
)
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-06-23 21:27:21 +02:00
|
|
|
while len(events) > 0:
|
|
|
|
event: Event
|
2024-10-13 19:33:27 +02:00
|
|
|
batch_thumbs = {}
|
|
|
|
batch_descs = {}
|
2024-06-23 21:27:21 +02:00
|
|
|
for event in events:
|
2024-10-13 19:33:27 +02:00
|
|
|
batch_thumbs[event.id] = base64.b64decode(event.thumbnail)
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["thumbnails"] += 1
|
|
|
|
|
2024-09-23 14:53:19 +02:00
|
|
|
if description := event.data.get("description", "").strip():
|
2024-10-13 19:33:27 +02:00
|
|
|
batch_descs[event.id] = description
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["descriptions"] += 1
|
2024-06-23 21:27:21 +02:00
|
|
|
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["processed_objects"] += 1
|
|
|
|
|
2024-10-13 19:33:27 +02:00
|
|
|
# run batch embedding
|
2024-10-22 00:19:34 +02:00
|
|
|
self.batch_embed_thumbnail(batch_thumbs)
|
2024-10-13 19:33:27 +02:00
|
|
|
|
|
|
|
if batch_descs:
|
2024-10-22 00:19:34 +02:00
|
|
|
self.batch_embed_description(batch_descs)
|
2024-10-13 19:33:27 +02:00
|
|
|
|
|
|
|
# report progress every batch so we don't spam the logs
|
|
|
|
progress = (totals["processed_objects"] / total_events) * 100
|
|
|
|
logger.debug(
|
|
|
|
"Processed %d/%d events (%.2f%% complete) | Thumbnails: %d, Descriptions: %d",
|
|
|
|
totals["processed_objects"],
|
|
|
|
total_events,
|
|
|
|
progress,
|
|
|
|
totals["thumbnails"],
|
|
|
|
totals["descriptions"],
|
|
|
|
)
|
|
|
|
|
|
|
|
# Calculate time remaining
|
|
|
|
elapsed_time = time.time() - st
|
|
|
|
avg_time_per_event = elapsed_time / totals["processed_objects"]
|
|
|
|
remaining_events = total_events - totals["processed_objects"]
|
|
|
|
time_remaining = avg_time_per_event * remaining_events
|
|
|
|
totals["time_remaining"] = int(time_remaining)
|
|
|
|
|
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|
2024-10-10 21:28:43 +02:00
|
|
|
|
|
|
|
# Move to the next page
|
2024-06-23 21:27:21 +02:00
|
|
|
current_page += 1
|
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
|
|
|
)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.paginate(current_page, batch_size)
|
2024-06-21 23:30:19 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Embedded %d thumbnails and %d descriptions in %s seconds",
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["thumbnails"],
|
|
|
|
totals["descriptions"],
|
2024-10-12 21:44:01 +02:00
|
|
|
round(time.time() - st, 1),
|
2024-06-21 23:30:19 +02:00
|
|
|
)
|
2024-10-12 21:44:01 +02:00
|
|
|
totals["status"] = "completed"
|
|
|
|
|
2024-10-10 21:28:43 +02:00
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|