mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
36cbffcc5e
* Initial re-implementation of semantic search * put docker-compose back and make reindex match docs * remove debug code and fix import * fix docs * manually build pysqlite3 as binaries are only available for x86-64 * update comment in build_pysqlite3.sh * only embed objects * better error handling when genai fails * ask ollama to pull requested model at startup * update ollama docs * address some PR review comments * fix lint * use IPC to write description, update docs for reindex * remove gemini-pro-vision from docs as it will be unavailable soon * fix OpenAI doc available models * fix api error in gemini and metadata for embeddings
42 lines
1.3 KiB
Python
42 lines
1.3 KiB
Python
"""Ollama Provider for Frigate AI."""
|
|
|
|
import logging
|
|
from typing import Optional
|
|
|
|
from httpx import TimeoutException
|
|
from ollama import Client as ApiClient
|
|
from ollama import ResponseError
|
|
|
|
from frigate.config import GenAIProviderEnum
|
|
from frigate.genai import GenAIClient, register_genai_provider
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@register_genai_provider(GenAIProviderEnum.ollama)
|
|
class OllamaClient(GenAIClient):
|
|
"""Generative AI client for Frigate using Ollama."""
|
|
|
|
provider: ApiClient
|
|
|
|
def _init_provider(self):
|
|
"""Initialize the client."""
|
|
client = ApiClient(host=self.genai_config.base_url, timeout=self.timeout)
|
|
response = client.pull(self.genai_config.model)
|
|
if response["status"] != "success":
|
|
logger.error("Failed to pull %s model from Ollama", self.genai_config.model)
|
|
return None
|
|
return client
|
|
|
|
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
|
"""Submit a request to Ollama"""
|
|
try:
|
|
result = self.provider.generate(
|
|
self.genai_config.model,
|
|
prompt,
|
|
images=images,
|
|
)
|
|
return result["response"].strip()
|
|
except (TimeoutException, ResponseError):
|
|
return None
|