diff --git a/docker/tensorrt/Dockerfile.amd64 b/docker/tensorrt/Dockerfile.amd64 index 61d3264c9..3dcb42658 100644 --- a/docker/tensorrt/Dockerfile.amd64 +++ b/docker/tensorrt/Dockerfile.amd64 @@ -3,8 +3,6 @@ # https://askubuntu.com/questions/972516/debian-frontend-environment-variable ARG DEBIAN_FRONTEND=noninteractive -ARG TRT_BASE=nvcr.io/nvidia/tensorrt:23.03-py3 - # Make this a separate target so it can be built/cached optionally FROM wheels as trt-wheels ARG DEBIAN_FRONTEND @@ -15,7 +13,7 @@ COPY docker/tensorrt/requirements-amd64.txt /requirements-tensorrt.txt RUN mkdir -p /trt-wheels && pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt # Build CuDNN -FROM ${TRT_BASE} AS cudnn-deps +FROM wget AS cudnn-deps ARG COMPUTE_LEVEL diff --git a/frigate/detectors/plugins/onnx.py b/frigate/detectors/plugins/onnx.py index a0f5552d9..f3e15422b 100644 --- a/frigate/detectors/plugins/onnx.py +++ b/frigate/detectors/plugins/onnx.py @@ -49,7 +49,9 @@ class ONNXDetector(DetectionApi): "trt_timing_cache_enable": True, "trt_timing_cache_path": "/config/model_cache/tensorrt/ort", "trt_engine_cache_enable": True, + "trt_dump_ep_context_model": True, "trt_engine_cache_path": "/config/model_cache/tensorrt/ort/trt-engines", + "trt_ep_context_file_path": "/config/model_cache/tensorrt/ort", } ) elif provider == "OpenVINOExecutionProvider":