mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
Refactor onnx providers (#13804)
* Ensure dirs exist for model caches * Formatting * Don't use tensorrt for embeddings
This commit is contained in:
parent
38ff46e45c
commit
1ed8642010
@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
@ -42,6 +43,9 @@ class ONNXDetector(DetectionApi):
|
||||
|
||||
for provider in providers:
|
||||
if provider == "TensorrtExecutionProvider":
|
||||
os.makedirs(
|
||||
"/config/model_cache/tensorrt/ort/trt-engines", exist_ok=True
|
||||
)
|
||||
options.append(
|
||||
{
|
||||
"trt_timing_cache_enable": True,
|
||||
@ -51,7 +55,13 @@ class ONNXDetector(DetectionApi):
|
||||
}
|
||||
)
|
||||
elif provider == "OpenVINOExecutionProvider":
|
||||
options.append({"cache_dir": "/config/model_cache/openvino/ort"})
|
||||
os.makedirs("/config/model_cache/openvino/ort", exist_ok=True)
|
||||
options.append(
|
||||
{
|
||||
"cache_dir": "/config/model_cache/openvino/ort",
|
||||
"device_type": "GPU",
|
||||
}
|
||||
)
|
||||
else:
|
||||
options.append({})
|
||||
|
||||
|
@ -49,22 +49,24 @@ class Clip(OnnxClip):
|
||||
|
||||
@staticmethod
|
||||
def _load_model(path: str, silent: bool):
|
||||
providers = ort.get_available_providers()
|
||||
providers = []
|
||||
options = []
|
||||
|
||||
for provider in providers:
|
||||
for provider in ort.get_available_providers():
|
||||
if provider == "TensorrtExecutionProvider":
|
||||
continue
|
||||
elif provider == "OpenVINOExecutionProvider":
|
||||
# TODO need to verify openvino works correctly
|
||||
os.makedirs("/config/model_cache/openvino/ort", exist_ok=True)
|
||||
providers.append(provider)
|
||||
options.append(
|
||||
{
|
||||
"trt_timing_cache_enable": True,
|
||||
"trt_timing_cache_path": "/config/model_cache/tensorrt/ort",
|
||||
"trt_engine_cache_enable": True,
|
||||
"trt_engine_cache_path": "/config/model_cache/tensorrt/ort/trt-engines",
|
||||
"cache_dir": "/config/model_cache/openvino/ort",
|
||||
"device_type": "GPU",
|
||||
}
|
||||
)
|
||||
elif provider == "OpenVINOExecutionProvider":
|
||||
options.append({"cache_dir": "/config/model_cache/openvino/ort"})
|
||||
else:
|
||||
providers.append(provider)
|
||||
options.append({})
|
||||
|
||||
try:
|
||||
|
Loading…
Reference in New Issue
Block a user