2024-10-07 22:30:45 +02:00
|
|
|
"""SQLite-vec embeddings database."""
|
2024-06-21 23:30:19 +02:00
|
|
|
|
|
|
|
import base64
|
|
|
|
import io
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
|
|
|
|
from PIL import Image
|
|
|
|
from playhouse.shortcuts import model_to_dict
|
|
|
|
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.comms.inter_process import InterProcessRequestor
|
2024-10-10 15:09:12 +02:00
|
|
|
from frigate.config.semantic_search import SemanticSearchConfig
|
2024-10-10 21:28:43 +02:00
|
|
|
from frigate.const import UPDATE_EMBEDDINGS_REINDEX_PROGRESS, UPDATE_MODEL_STATE
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
2024-06-21 23:30:19 +02:00
|
|
|
from frigate.models import Event
|
2024-10-07 22:30:45 +02:00
|
|
|
from frigate.types import ModelStatusTypesEnum
|
2024-10-10 17:42:24 +02:00
|
|
|
from frigate.util.builtin import serialize
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-09 23:31:54 +02:00
|
|
|
from .functions.onnx import GenericONNXEmbedding
|
2024-06-21 23:30:19 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def get_metadata(event: Event) -> dict:
|
|
|
|
"""Extract valid event metadata."""
|
|
|
|
event_dict = model_to_dict(event)
|
|
|
|
return (
|
|
|
|
{
|
|
|
|
k: v
|
|
|
|
for k, v in event_dict.items()
|
2024-09-26 22:30:56 +02:00
|
|
|
if k not in ["thumbnail"]
|
2024-06-21 23:30:19 +02:00
|
|
|
and v is not None
|
|
|
|
and isinstance(v, (str, int, float, bool))
|
|
|
|
}
|
|
|
|
| {
|
|
|
|
k: v
|
|
|
|
for k, v in event_dict["data"].items()
|
|
|
|
if k not in ["description"]
|
|
|
|
and v is not None
|
|
|
|
and isinstance(v, (str, int, float, bool))
|
|
|
|
}
|
|
|
|
| {
|
|
|
|
# Metadata search doesn't support $contains
|
|
|
|
# and an event can have multiple zones, so
|
|
|
|
# we need to create a key for each zone
|
|
|
|
f"{k}_{x}": True
|
|
|
|
for k, v in event_dict.items()
|
|
|
|
if isinstance(v, list) and len(v) > 0
|
|
|
|
for x in v
|
|
|
|
if isinstance(x, str)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Embeddings:
|
2024-10-07 22:30:45 +02:00
|
|
|
"""SQLite-vec embeddings database."""
|
|
|
|
|
2024-10-10 15:09:12 +02:00
|
|
|
def __init__(
|
|
|
|
self, config: SemanticSearchConfig, db: SqliteVecQueueDatabase
|
|
|
|
) -> None:
|
|
|
|
self.config = config
|
2024-10-07 22:30:45 +02:00
|
|
|
self.db = db
|
|
|
|
self.requestor = InterProcessRequestor()
|
|
|
|
|
|
|
|
# Create tables if they don't exist
|
2024-10-10 23:37:43 +02:00
|
|
|
self.db.create_embeddings_tables()
|
2024-10-07 22:30:45 +02:00
|
|
|
|
|
|
|
models = [
|
2024-10-09 23:31:54 +02:00
|
|
|
"jinaai/jina-clip-v1-text_model_fp16.onnx",
|
|
|
|
"jinaai/jina-clip-v1-tokenizer",
|
2024-10-11 00:46:21 +02:00
|
|
|
"jinaai/jina-clip-v1-vision_model_fp16.onnx"
|
|
|
|
if config.model_size == "large"
|
|
|
|
else "jinaai/jina-clip-v1-vision_model_quantized.onnx",
|
2024-10-09 23:31:54 +02:00
|
|
|
"jinaai/jina-clip-v1-preprocessor_config.json",
|
2024-10-07 22:30:45 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for model in models:
|
|
|
|
self.requestor.send_data(
|
|
|
|
UPDATE_MODEL_STATE,
|
|
|
|
{
|
|
|
|
"model": model,
|
|
|
|
"state": ModelStatusTypesEnum.not_downloaded,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2024-10-09 23:31:54 +02:00
|
|
|
def jina_text_embedding_function(outputs):
|
|
|
|
return outputs[0]
|
|
|
|
|
|
|
|
def jina_vision_embedding_function(outputs):
|
|
|
|
return outputs[0]
|
|
|
|
|
|
|
|
self.text_embedding = GenericONNXEmbedding(
|
|
|
|
model_name="jinaai/jina-clip-v1",
|
|
|
|
model_file="text_model_fp16.onnx",
|
|
|
|
tokenizer_file="tokenizer",
|
|
|
|
download_urls={
|
|
|
|
"text_model_fp16.onnx": "https://huggingface.co/jinaai/jina-clip-v1/resolve/main/onnx/text_model_fp16.onnx",
|
|
|
|
},
|
|
|
|
embedding_function=jina_text_embedding_function,
|
2024-10-11 00:46:21 +02:00
|
|
|
model_size=config.model_size,
|
2024-10-09 23:31:54 +02:00
|
|
|
model_type="text",
|
2024-10-10 23:37:43 +02:00
|
|
|
requestor=self.requestor,
|
2024-10-10 15:09:12 +02:00
|
|
|
device="CPU",
|
2024-10-07 22:30:45 +02:00
|
|
|
)
|
2024-10-09 23:31:54 +02:00
|
|
|
|
2024-10-11 00:46:21 +02:00
|
|
|
model_file = (
|
|
|
|
"vision_model_fp16.onnx"
|
|
|
|
if self.config.model_size == "large"
|
|
|
|
else "vision_model_quantized.onnx"
|
|
|
|
)
|
|
|
|
|
|
|
|
download_urls = {
|
|
|
|
model_file: f"https://huggingface.co/jinaai/jina-clip-v1/resolve/main/onnx/{model_file}",
|
|
|
|
"preprocessor_config.json": "https://huggingface.co/jinaai/jina-clip-v1/resolve/main/preprocessor_config.json",
|
|
|
|
}
|
|
|
|
|
2024-10-09 23:31:54 +02:00
|
|
|
self.vision_embedding = GenericONNXEmbedding(
|
|
|
|
model_name="jinaai/jina-clip-v1",
|
2024-10-11 00:46:21 +02:00
|
|
|
model_file=model_file,
|
|
|
|
download_urls=download_urls,
|
2024-10-09 23:31:54 +02:00
|
|
|
embedding_function=jina_vision_embedding_function,
|
2024-10-11 00:46:21 +02:00
|
|
|
model_size=config.model_size,
|
2024-10-09 23:31:54 +02:00
|
|
|
model_type="vision",
|
2024-10-10 23:37:43 +02:00
|
|
|
requestor=self.requestor,
|
2024-10-10 15:09:12 +02:00
|
|
|
device=self.config.device,
|
2024-10-07 22:30:45 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def upsert_thumbnail(self, event_id: str, thumbnail: bytes):
|
|
|
|
# Convert thumbnail bytes to PIL Image
|
|
|
|
image = Image.open(io.BytesIO(thumbnail)).convert("RGB")
|
2024-10-09 23:31:54 +02:00
|
|
|
embedding = self.vision_embedding([image])[0]
|
2024-10-07 22:30:45 +02:00
|
|
|
|
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_thumbnails(id, thumbnail_embedding)
|
|
|
|
VALUES(?, ?)
|
|
|
|
""",
|
|
|
|
(event_id, serialize(embedding)),
|
|
|
|
)
|
|
|
|
|
|
|
|
return embedding
|
|
|
|
|
|
|
|
def upsert_description(self, event_id: str, description: str):
|
2024-10-09 23:31:54 +02:00
|
|
|
embedding = self.text_embedding([description])[0]
|
2024-10-07 22:30:45 +02:00
|
|
|
self.db.execute_sql(
|
|
|
|
"""
|
|
|
|
INSERT OR REPLACE INTO vec_descriptions(id, description_embedding)
|
|
|
|
VALUES(?, ?)
|
|
|
|
""",
|
|
|
|
(event_id, serialize(embedding)),
|
|
|
|
)
|
|
|
|
|
|
|
|
return embedding
|
2024-06-21 23:30:19 +02:00
|
|
|
|
|
|
|
def reindex(self) -> None:
|
2024-10-10 21:28:43 +02:00
|
|
|
logger.info("Indexing tracked object embeddings...")
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-10 23:37:43 +02:00
|
|
|
self.db.drop_embeddings_tables()
|
|
|
|
logger.debug("Dropped embeddings tables.")
|
|
|
|
self.db.create_embeddings_tables()
|
|
|
|
logger.debug("Created embeddings tables.")
|
2024-10-09 23:31:54 +02:00
|
|
|
|
2024-06-21 23:30:19 +02:00
|
|
|
st = time.time()
|
2024-06-23 21:27:21 +02:00
|
|
|
totals = {
|
2024-10-10 21:28:43 +02:00
|
|
|
"thumbnails": 0,
|
|
|
|
"descriptions": 0,
|
|
|
|
"processed_objects": 0,
|
|
|
|
"total_objects": 0,
|
2024-06-23 21:27:21 +02:00
|
|
|
}
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-10-10 21:28:43 +02:00
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|
|
|
|
|
|
|
|
# Get total count of events to process
|
|
|
|
total_events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
|
|
|
)
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
totals["total_objects"] = total_events
|
|
|
|
|
2024-06-23 21:27:21 +02:00
|
|
|
batch_size = 100
|
|
|
|
current_page = 1
|
2024-10-10 21:28:43 +02:00
|
|
|
processed_events = 0
|
|
|
|
|
2024-06-23 21:27:21 +02:00
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
2024-06-21 23:30:19 +02:00
|
|
|
)
|
2024-06-23 21:27:21 +02:00
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.paginate(current_page, batch_size)
|
|
|
|
)
|
2024-06-21 23:30:19 +02:00
|
|
|
|
2024-06-23 21:27:21 +02:00
|
|
|
while len(events) > 0:
|
|
|
|
event: Event
|
|
|
|
for event in events:
|
|
|
|
thumbnail = base64.b64decode(event.thumbnail)
|
2024-10-07 22:30:45 +02:00
|
|
|
self.upsert_thumbnail(event.id, thumbnail)
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["thumbnails"] += 1
|
|
|
|
|
2024-09-23 14:53:19 +02:00
|
|
|
if description := event.data.get("description", "").strip():
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["descriptions"] += 1
|
2024-10-07 22:30:45 +02:00
|
|
|
self.upsert_description(event.id, description)
|
2024-06-23 21:27:21 +02:00
|
|
|
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["processed_objects"] += 1
|
|
|
|
|
|
|
|
# report progress every 10 events so we don't spam the logs
|
|
|
|
if (totals["processed_objects"] % 10) == 0:
|
|
|
|
progress = (processed_events / total_events) * 100
|
|
|
|
logger.debug(
|
|
|
|
"Processed %d/%d events (%.2f%% complete) | Thumbnails: %d, Descriptions: %d",
|
|
|
|
processed_events,
|
|
|
|
total_events,
|
|
|
|
progress,
|
|
|
|
totals["thumbnails"],
|
|
|
|
totals["descriptions"],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|
|
|
|
|
|
|
|
# Move to the next page
|
2024-06-23 21:27:21 +02:00
|
|
|
current_page += 1
|
|
|
|
events = (
|
|
|
|
Event.select()
|
|
|
|
.where(
|
|
|
|
(Event.has_clip == True | Event.has_snapshot == True)
|
|
|
|
& Event.thumbnail.is_null(False)
|
|
|
|
)
|
|
|
|
.order_by(Event.start_time.desc())
|
|
|
|
.paginate(current_page, batch_size)
|
2024-06-21 23:30:19 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Embedded %d thumbnails and %d descriptions in %s seconds",
|
2024-10-10 21:28:43 +02:00
|
|
|
totals["thumbnails"],
|
|
|
|
totals["descriptions"],
|
2024-06-21 23:30:19 +02:00
|
|
|
time.time() - st,
|
|
|
|
)
|
2024-10-10 21:28:43 +02:00
|
|
|
self.requestor.send_data(UPDATE_EMBEDDINGS_REINDEX_PROGRESS, totals)
|