mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-09-05 17:51:36 +02:00
Add ability to pass additional args to Ollama (#19484)
* Call out recognized objects more specifically * Cleanup * Make keep_alive and options configurable * Generalize * Use for other providers
This commit is contained in:
parent
34bf1b21df
commit
4b33e3765e
@ -1,5 +1,5 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
@ -23,3 +23,6 @@ class GenAIConfig(FrigateBaseModel):
|
|||||||
base_url: Optional[str] = Field(default=None, title="Provider base url.")
|
base_url: Optional[str] = Field(default=None, title="Provider base url.")
|
||||||
model: str = Field(default="gpt-4o", title="GenAI model.")
|
model: str = Field(default="gpt-4o", title="GenAI model.")
|
||||||
provider: GenAIProviderEnum | None = Field(default=None, title="GenAI provider.")
|
provider: GenAIProviderEnum | None = Field(default=None, title="GenAI provider.")
|
||||||
|
provider_options: dict[str, Any] = Field(
|
||||||
|
default={}, title="GenAI Provider extra options."
|
||||||
|
)
|
||||||
|
@ -209,7 +209,9 @@ def run_analysis(
|
|||||||
{
|
{
|
||||||
"id": final_data["id"],
|
"id": final_data["id"],
|
||||||
"camera": camera,
|
"camera": camera,
|
||||||
"objects": final_data["data"]["objects"],
|
"objects": list(
|
||||||
|
filter(lambda o: "-verified" not in o, final_data["data"]["objects"])
|
||||||
|
),
|
||||||
"recognized_objects": final_data["data"]["sub_labels"],
|
"recognized_objects": final_data["data"]["sub_labels"],
|
||||||
"zones": final_data["data"]["zones"],
|
"zones": final_data["data"]["zones"],
|
||||||
"timestamp": datetime.datetime.fromtimestamp(final_data["end_time"]),
|
"timestamp": datetime.datetime.fromtimestamp(final_data["end_time"]),
|
||||||
|
@ -46,20 +46,21 @@ class GenAIClient:
|
|||||||
debug_save: bool,
|
debug_save: bool,
|
||||||
) -> ReviewMetadata | None:
|
) -> ReviewMetadata | None:
|
||||||
"""Generate a description for the review item activity."""
|
"""Generate a description for the review item activity."""
|
||||||
|
|
||||||
|
def get_concern_prompt() -> str:
|
||||||
if concerns:
|
if concerns:
|
||||||
concern_list = "\n - ".join(concerns)
|
concern_list = "\n - ".join(concerns)
|
||||||
concern_prompt = f"""
|
return f"""
|
||||||
- `other_concerns` (list of strings): Include a list of any of the following concerns that are occurring:
|
- `other_concerns` (list of strings): Include a list of any of the following concerns that are occurring:
|
||||||
- {concern_list}
|
- {concern_list}"""
|
||||||
"""
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
concern_prompt = ""
|
return ""
|
||||||
|
|
||||||
|
def get_language_prompt() -> str:
|
||||||
if preferred_language:
|
if preferred_language:
|
||||||
language_prompt = f"Provide your answer in {preferred_language}"
|
return f"Provide your answer in {preferred_language}"
|
||||||
else:
|
else:
|
||||||
language_prompt = ""
|
return ""
|
||||||
|
|
||||||
context_prompt = f"""
|
context_prompt = f"""
|
||||||
Please analyze the sequence of images ({len(thumbnails)} total) taken in chronological order from the perspective of the {review_data["camera"].replace("_", " ")} security camera.
|
Please analyze the sequence of images ({len(thumbnails)} total) taken in chronological order from the perspective of the {review_data["camera"].replace("_", " ")} security camera.
|
||||||
@ -81,7 +82,7 @@ Your response MUST be a flat JSON object with:
|
|||||||
- `scene` (string): A full description including setting, entities, actions, and any plausible supported inferences.
|
- `scene` (string): A full description including setting, entities, actions, and any plausible supported inferences.
|
||||||
- `confidence` (float): 0-1 confidence in the analysis.
|
- `confidence` (float): 0-1 confidence in the analysis.
|
||||||
- `potential_threat_level` (integer): 0, 1, or 2 as defined below.
|
- `potential_threat_level` (integer): 0, 1, or 2 as defined below.
|
||||||
{concern_prompt}
|
{get_concern_prompt()}
|
||||||
|
|
||||||
Threat-level definitions:
|
Threat-level definitions:
|
||||||
- 0 — Typical or expected activity for this location/time (includes residents, guests, or known animals engaged in normal activities, even if they glance around or scan surroundings).
|
- 0 — Typical or expected activity for this location/time (includes residents, guests, or known animals engaged in normal activities, even if they glance around or scan surroundings).
|
||||||
@ -97,7 +98,7 @@ Sequence details:
|
|||||||
|
|
||||||
**IMPORTANT:**
|
**IMPORTANT:**
|
||||||
- Values must be plain strings, floats, or integers — no nested objects, no extra commentary.
|
- Values must be plain strings, floats, or integers — no nested objects, no extra commentary.
|
||||||
{language_prompt}
|
{get_language_prompt()}
|
||||||
"""
|
"""
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Sending {len(thumbnails)} images to create review description on {review_data['camera']}"
|
f"Sending {len(thumbnails)} images to create review description on {review_data['camera']}"
|
||||||
|
@ -21,7 +21,9 @@ class GeminiClient(GenAIClient):
|
|||||||
def _init_provider(self):
|
def _init_provider(self):
|
||||||
"""Initialize the client."""
|
"""Initialize the client."""
|
||||||
genai.configure(api_key=self.genai_config.api_key)
|
genai.configure(api_key=self.genai_config.api_key)
|
||||||
return genai.GenerativeModel(self.genai_config.model)
|
return genai.GenerativeModel(
|
||||||
|
self.genai_config.model, **self.genai_config.provider_options
|
||||||
|
)
|
||||||
|
|
||||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||||
"""Submit a request to Gemini."""
|
"""Submit a request to Gemini."""
|
||||||
|
@ -48,7 +48,7 @@ class OllamaClient(GenAIClient):
|
|||||||
self.genai_config.model,
|
self.genai_config.model,
|
||||||
prompt,
|
prompt,
|
||||||
images=images if images else None,
|
images=images if images else None,
|
||||||
keep_alive="1h",
|
**self.genai_config.provider_options,
|
||||||
)
|
)
|
||||||
return result["response"].strip()
|
return result["response"].strip()
|
||||||
except (TimeoutException, ResponseError) as e:
|
except (TimeoutException, ResponseError) as e:
|
||||||
|
@ -21,7 +21,9 @@ class OpenAIClient(GenAIClient):
|
|||||||
|
|
||||||
def _init_provider(self):
|
def _init_provider(self):
|
||||||
"""Initialize the client."""
|
"""Initialize the client."""
|
||||||
return OpenAI(api_key=self.genai_config.api_key)
|
return OpenAI(
|
||||||
|
api_key=self.genai_config.api_key, **self.genai_config.provider_options
|
||||||
|
)
|
||||||
|
|
||||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||||
"""Submit a request to OpenAI."""
|
"""Submit a request to OpenAI."""
|
||||||
|
Loading…
Reference in New Issue
Block a user