Initial support for Hailo-8L (#12431)

* Initial support for Hailo-8L

Added file for Hailo-8L detector including dockerfile, h8l.mk, h8l.hcl, hailo8l.py, ci.yml and ssd_mobilenat_v1.hef as the inference network.

Added files to help with the installation of Hailo-8L dependences like generate_wheel_conf.py, requirements-wheel-h8l.txt and modified setup.py to try and work with any hardware.

Updated docs to reflect Initial Hailo-8L support including oject_detectors.md,  hardware.md and installation.md.

* Update .github/workflows/ci.yml

typo h8l not arm64

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>

* Update docs/docs/configuration/object_detectors.md

Clarity for the end user and correct uses of words

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>

* Update docs/docs/frigate/installation.md

typo

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>

* update Installation.md to clarify Hailo-8L installation process.

* Update docs/docs/frigate/hardware.md

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>

* Update hardware.md add Inference time.

* Oops no new line at the end of the file.

* Update docs/docs/frigate/hardware.md typo

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>

* Update dockerfile to download the ssd_modilenet_v1 model instead of having it in the repo.

* Updated dockerfile so it dose not download the model file.

add function to download it at runtime.

update model path.

* fix formatting according to ruff and removed unnecessary functions.

---------

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>
Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>
This commit is contained in:
spanner3003 2024-07-14 18:17:02 +01:00 committed by Nicolas Mowen
parent e7fabce4e0
commit 4a35573210
12 changed files with 689 additions and 1 deletions

View File

@ -78,6 +78,16 @@ jobs:
set: |
rk.tags=${{ steps.setup.outputs.image-name }}-rk
*.cache-from=type=gha
- name: Build and push Hailo-8l build
uses: docker/bake-action@v4
with:
push: true
targets: h8l
files: docker/hailo8l/h8l.hcl
set: |
h8l.tags=${{ steps.setup.outputs.image-name }}-h8l
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l,mode=max
jetson_jp4_build:
runs-on: ubuntu-latest
name: Jetson Jetpack 4

View File

@ -4,3 +4,4 @@
/docker/tensorrt/*jetson* @madsciencetist
/docker/rockchip/ @MarcA711
/docker/rocm/ @harakas
/docker/hailo8l/ @spanner3003

100
docker/hailo8l/Dockerfile Normal file
View File

@ -0,0 +1,100 @@
# syntax=docker/dockerfile:1.6
ARG DEBIAN_FRONTEND=noninteractive
# Build Python wheels
FROM wheels AS h8l-wheels
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
COPY docker/hailo8l/requirements-wheels-h8l.txt /requirements-wheels-h8l.txt
RUN sed -i "/https:\/\//d" /requirements-wheels.txt
# Create a directory to store the built wheels
RUN mkdir /h8l-wheels
# Build the wheels
RUN pip3 wheel --wheel-dir=/h8l-wheels -c /requirements-wheels.txt -r /requirements-wheels-h8l.txt
# Build HailoRT and create wheel
FROM deps AS build-hailort
# Install necessary APT packages
RUN apt-get update && apt-get install -y \
build-essential \
cmake \
git \
wget \
python3-dev \
gcc-9 \
g++-9 \
libzmq3-dev \
pciutils \
rsync \
&& rm -rf /var/lib/apt/lists/*
# Extract Python version and set environment variables
RUN PYTHON_VERSION=$(python3 --version 2>&1 | awk '{print $2}' | cut -d. -f1,2) && \
PYTHON_VERSION_NO_DOT=$(echo $PYTHON_VERSION | sed 's/\.//') && \
echo "PYTHON_VERSION=$PYTHON_VERSION" > /etc/environment && \
echo "PYTHON_VERSION_NO_DOT=$PYTHON_VERSION_NO_DOT" >> /etc/environment
#ENV PYTHON_VER=$PYTHON_VERSION
#ENV PYTHON_VER_NO_DOT=$PYTHON_VERSION_NO_DOT
# Clone and build HailoRT
RUN . /etc/environment && \
git clone https://github.com/hailo-ai/hailort.git /opt/hailort && \
cd /opt/hailort && \
git checkout v4.17.0 && \
cmake -H. -Bbuild -DCMAKE_BUILD_TYPE=Release -DHAILO_BUILD_PYBIND=1 -DPYBIND11_PYTHON_VERSION=${PYTHON_VERSION} && \
cmake --build build --config release --target libhailort && \
cmake --build build --config release --target _pyhailort && \
cp build/hailort/libhailort/bindings/python/src/_pyhailort.cpython-${PYTHON_VERSION_NO_DOT}-aarch64-linux-gnu.so hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/ && \
cp build/hailort/libhailort/src/libhailort.so hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/
RUN ls -ahl /opt/hailort/build/hailort/libhailort/src/
RUN ls -ahl /opt/hailort/hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/
# Remove the existing setup.py if it exists in the target directory
RUN rm -f /opt/hailort/hailort/libhailort/bindings/python/platform/setup.py
# Copy generate_wheel_conf.py and setup.py
COPY docker/hailo8l/pyhailort_build_scripts/generate_wheel_conf.py /opt/hailort/hailort/libhailort/bindings/python/platform/generate_wheel_conf.py
COPY docker/hailo8l/pyhailort_build_scripts/setup.py /opt/hailort/hailort/libhailort/bindings/python/platform/setup.py
# Run the generate_wheel_conf.py script
RUN python3 /opt/hailort/hailort/libhailort/bindings/python/platform/generate_wheel_conf.py
# Create a wheel file using pip3 wheel
RUN cd /opt/hailort/hailort/libhailort/bindings/python/platform && \
python3 setup.py bdist_wheel --dist-dir /hailo-wheels
# Use deps as the base image
FROM deps AS h8l-frigate
# Copy the wheels from the wheels stage
COPY --from=h8l-wheels /h8l-wheels /deps/h8l-wheels
COPY --from=build-hailort /hailo-wheels /deps/hailo-wheels
COPY --from=build-hailort /etc/environment /etc/environment
RUN CC=$(python3 -c "import sysconfig; import shlex; cc = sysconfig.get_config_var('CC'); cc_cmd = shlex.split(cc)[0]; print(cc_cmd[:-4] if cc_cmd.endswith('-gcc') else cc_cmd)") && \
echo "CC=$CC" >> /etc/environment
# Install the wheels
RUN pip3 install -U /deps/h8l-wheels/*.whl
RUN pip3 install -U /deps/hailo-wheels/*.whl
RUN . /etc/environment && \
mv /usr/local/lib/python${PYTHON_VERSION}/dist-packages/hailo_platform/pyhailort/libhailort.so /usr/lib/${CC} && \
cd /usr/lib/${CC}/ && \
ln -s libhailort.so libhailort.so.4.17.0
# Copy base files from the rootfs stage
COPY --from=rootfs / /
# Set environment variables for Hailo SDK
ENV PATH="/opt/hailort/bin:${PATH}"
ENV LD_LIBRARY_PATH="/usr/lib/aarch64-linux-gnu:${LD_LIBRARY_PATH}"
# Set workdir
WORKDIR /opt/frigate/

27
docker/hailo8l/h8l.hcl Normal file
View File

@ -0,0 +1,27 @@
target wheels {
dockerfile = "docker/main/Dockerfile"
platforms = ["linux/arm64"]
target = "wheels"
}
target deps {
dockerfile = "docker/main/Dockerfile"
platforms = ["linux/arm64"]
target = "deps"
}
target rootfs {
dockerfile = "docker/main/Dockerfile"
platforms = ["linux/arm64"]
target = "rootfs"
}
target h8l {
dockerfile = "docker/hailo8l/Dockerfile"
contexts = {
wheels = "target:wheels"
deps = "target:deps"
rootfs = "target:rootfs"
}
platforms = ["linux/arm64"]
}

10
docker/hailo8l/h8l.mk Normal file
View File

@ -0,0 +1,10 @@
BOARDS += h8l
local-h8l: version
docker buildx bake --load --file=docker/hailo8l/h8l.hcl --set h8l.tags=frigate:latest-h8l h8l
build-h8l: version
docker buildx bake --file=docker/hailo8l/h8l.hcl --set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l h8l
push-h8l: build-h8l
docker buildx bake --push --file=docker/hailo8l/h8l.hcl --set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l h8l

View File

@ -0,0 +1,67 @@
import json
import os
import platform
import sys
import sysconfig
def extract_toolchain_info(compiler):
# Remove the "-gcc" or "-g++" suffix if present
if compiler.endswith("-gcc") or compiler.endswith("-g++"):
compiler = compiler.rsplit("-", 1)[0]
# Extract the toolchain and ABI part (e.g., "gnu")
toolchain_parts = compiler.split("-")
abi_conventions = next(
(part for part in toolchain_parts if part in ["gnu", "musl", "eabi", "uclibc"]),
"",
)
return abi_conventions
def generate_wheel_conf():
conf_file_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "wheel_conf.json"
)
# Extract current system and Python version information
py_version = f"cp{sys.version_info.major}{sys.version_info.minor}"
arch = platform.machine()
system = platform.system().lower()
libc_version = platform.libc_ver()[1]
# Get the compiler information
compiler = sysconfig.get_config_var("CC")
abi_conventions = extract_toolchain_info(compiler)
# Create the new configuration data
new_conf_data = {
"py_version": py_version,
"arch": arch,
"system": system,
"libc_version": libc_version,
"abi": abi_conventions,
"extension": {
"posix": "so",
"nt": "pyd", # Windows
}[os.name],
}
# If the file exists, load the existing data
if os.path.isfile(conf_file_path):
with open(conf_file_path, "r") as conf_file:
conf_data = json.load(conf_file)
# Update the existing data with the new data
conf_data.update(new_conf_data)
else:
# If the file does not exist, use the new data
conf_data = new_conf_data
# Write the updated data to the file
with open(conf_file_path, "w") as conf_file:
json.dump(conf_data, conf_file, indent=4)
if __name__ == "__main__":
generate_wheel_conf()

View File

@ -0,0 +1,111 @@
import json
import os
from setuptools import find_packages, setup
from wheel.bdist_wheel import bdist_wheel as orig_bdist_wheel
class NonPurePythonBDistWheel(orig_bdist_wheel):
"""Makes the wheel platform-dependent so it can be based on the _pyhailort architecture"""
def finalize_options(self):
orig_bdist_wheel.finalize_options(self)
self.root_is_pure = False
def _get_hailort_lib_path():
lib_filename = "libhailort.so"
lib_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
f"hailo_platform/pyhailort/{lib_filename}",
)
if os.path.exists(lib_path):
print(f"Found libhailort shared library at: {lib_path}")
else:
print(f"Error: libhailort shared library not found at: {lib_path}")
raise FileNotFoundError(f"libhailort shared library not found at: {lib_path}")
return lib_path
def _get_pyhailort_lib_path():
conf_file_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "wheel_conf.json"
)
if not os.path.isfile(conf_file_path):
raise FileNotFoundError(f"Configuration file not found: {conf_file_path}")
with open(conf_file_path, "r") as conf_file:
content = json.load(conf_file)
py_version = content["py_version"]
arch = content["arch"]
system = content["system"]
extension = content["extension"]
abi = content["abi"]
# Construct the filename directly
lib_filename = f"_pyhailort.cpython-{py_version.split('cp')[1]}-{arch}-{system}-{abi}.{extension}"
lib_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
f"hailo_platform/pyhailort/{lib_filename}",
)
if os.path.exists(lib_path):
print(f"Found _pyhailort shared library at: {lib_path}")
else:
print(f"Error: _pyhailort shared library not found at: {lib_path}")
raise FileNotFoundError(
f"_pyhailort shared library not found at: {lib_path}"
)
return lib_path
def _get_package_paths():
packages = []
pyhailort_lib = _get_pyhailort_lib_path()
hailort_lib = _get_hailort_lib_path()
if pyhailort_lib:
packages.append(pyhailort_lib)
if hailort_lib:
packages.append(hailort_lib)
packages.append(os.path.abspath("hailo_tutorials/notebooks/*"))
packages.append(os.path.abspath("hailo_tutorials/hefs/*"))
return packages
if __name__ == "__main__":
setup(
author="Hailo team",
author_email="contact@hailo.ai",
cmdclass={
"bdist_wheel": NonPurePythonBDistWheel,
},
description="HailoRT",
entry_points={
"console_scripts": [
"hailo=hailo_platform.tools.hailocli.main:main",
]
},
install_requires=[
"argcomplete",
"contextlib2",
"future",
"netaddr",
"netifaces",
"verboselogs",
"numpy==1.23.3",
],
name="hailort",
package_data={
"hailo_platform": _get_package_paths(),
},
packages=find_packages(),
platforms=[
"linux_x86_64",
"linux_aarch64",
"win_amd64",
],
url="https://hailo.ai/",
version="4.17.0",
zip_safe=False,
)

View File

@ -0,0 +1,12 @@
appdirs==1.4.4
argcomplete==2.0.0
contextlib2==0.6.0.post1
distlib==0.3.6
filelock==3.8.0
future==0.18.2
importlib-metadata==5.1.0
importlib-resources==5.1.2
netaddr==0.8.0
netifaces==0.10.9
verboselogs==1.7
virtualenv==20.17.0

View File

@ -5,7 +5,7 @@ title: Object Detectors
# Officially Supported Detectors
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, and `rknn`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, `rknn`, and `hailo8l`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
## CPU Detector (not recommended)
@ -386,3 +386,25 @@ $ cat /sys/kernel/debug/rknpu/load
- All models are automatically downloaded and stored in the folder `config/model_cache/rknn_cache`. After upgrading Frigate, you should remove older models to free up space.
- You can also provide your own `.rknn` model. You should not save your own models in the `rknn_cache` folder, store them directly in the `model_cache` folder or another subfolder. To convert a model to `.rknn` format see the `rknn-toolkit2` (requires a x86 machine). Note, that there is only post-processing for the supported models.
## Hailo-8l
This detector is available if you are using the Raspberry Pi 5 with Hailo-8L AI Kit. This has not been tested using the Hailo-8L with other hardware.
### Configuration
```yaml
detectors:
hailo8l:
type: hailo8l
device: PCIe
model:
path: /config/model_cache/h8l_cache/ssd_mobilenet_v1.hef
model:
width: 300
height: 300
input_tensor: nhwc
input_pixel_format: bgr
model_type: ssd
```

View File

@ -107,6 +107,12 @@ Frigate supports hardware video processing on all Rockchip boards. However, hard
The inference time of a rk3588 with all 3 cores enabled is typically 25-30 ms for yolo-nas s.
#### Hailo-8l PCIe
Frigate supports the Hailo-8l M.2 card on any hardware but currently it is only tested on the Raspberry Pi5 PCIe hat from the AI kit.
The inference time for the Hailo-8L chip at time of writing is around 17-21 ms for the SSD MobileNet Version 1 model.
## What does Frigate use the CPU for and what does it use a detector for? (ELI5 Version)
This is taken from a [user question on reddit](https://www.reddit.com/r/homeassistant/comments/q8mgau/comment/hgqbxh5/?utm_source=share&utm_medium=web2x&context=3). Modified slightly for clarity.

View File

@ -100,6 +100,38 @@ By default, the Raspberry Pi limits the amount of memory available to the GPU. I
Additionally, the USB Coral draws a considerable amount of power. If using any other USB devices such as an SSD, you will experience instability due to the Pi not providing enough power to USB devices. You will need to purchase an external USB hub with it's own power supply. Some have reported success with <a href="https://amzn.to/3a2mH0P" target="_blank" rel="nofollow noopener sponsored">this</a> (affiliate link).
### Hailo-8L
The Hailo-8L is an M.2 card typically connected to a carrier board for PCIe, which then connects to the Raspberry Pi 5 as part of the AI Kit. However, it can also be used on other boards equipped with an M.2 M key edge connector.
#### Installation
For Raspberry Pi 5 users with the AI Kit, installation is straightforward. Simply follow this [guide](https://www.raspberrypi.com/documentation/accessories/ai-kit.html#ai-kit-installation) to install the driver and software.
For other boards, follow these steps for installation:
1. Install the driver from the [Hailo GitHub repository](https://github.com/hailo-ai/hailort-drivers). A convenient script for Linux is available to clone the repository, build the driver, and install it.
2. Copy or download [this script](https://gist.github.com/spanner3003/4b85751d671d4ac55f926e564f1abc3e#file-install_hailo8l_driver-sh).
3. Ensure it has execution permissions with `sudo chmod +x install_hailo8l_driver.sh`
4. Run the script with `./install_hailo8l_driver.sh`
#### Setup
To set up Frigate, follow the default installation instructions, but use a Docker image with the `-h8l` suffix, for example: `ghcr.io/blakeblackshear/frigate:stable-h8l`
Next, grant Docker permissions to access your hardware by adding the following lines to your `docker-compose.yml` file:
```yaml
devices:
- /dev/hailo0
```
If you are using `docker run`, add this option to your command `--device /dev/hailo0`
#### Configuration
Finally, configure [hardware object detection](/configuration/object_detectors#hailo-8l) to complete the setup.
### Rockchip platform
Make sure that you use a linux distribution that comes with the rockchip BSP kernel 5.10 or 6.1 and necessary drivers (especially rkvdec2 and rknpu). To check, enter the following commands:
@ -222,6 +254,7 @@ The community supported docker image tags for the current stable version are:
- `stable-rocm-gfx900` - AMD gfx900 driver only
- `stable-rocm-gfx1030` - AMD gfx1030 driver only
- `stable-rocm-gfx1100` - AMD gfx1100 driver only
- `stable-h8l` - Frigate build for the Hailo-8L M.2 PICe Raspberry Pi 5 hat
## Home Assistant Addon

View File

@ -0,0 +1,289 @@
import logging
import os
import urllib.request
import numpy as np
from hailo_platform import (
HEF,
ConfigureParams,
FormatType,
HailoRTException,
HailoStreamInterface,
InferVStreams,
InputVStreamParams,
OutputVStreamParams,
VDevice,
)
from pydantic import BaseModel, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
from frigate.detectors.detector_config import BaseDetectorConfig
from frigate.detectors.util import preprocess # Assuming this function is available
# Set up logging
logger = logging.getLogger(__name__)
# Define the detector key for Hailo
DETECTOR_KEY = "hailo8l"
# Configuration class for model settings
class ModelConfig(BaseModel):
path: str = Field(default=None, title="Model Path") # Path to the HEF file
# Configuration class for Hailo detector
class HailoDetectorConfig(BaseDetectorConfig):
type: Literal[DETECTOR_KEY] # Type of the detector
device: str = Field(default="PCIe", title="Device Type") # Device type (e.g., PCIe)
# Hailo detector class implementation
class HailoDetector(DetectionApi):
type_key = DETECTOR_KEY # Set the type key to the Hailo detector key
def __init__(self, detector_config: HailoDetectorConfig):
# Initialize device type and model path from the configuration
self.h8l_device_type = detector_config.device
self.h8l_model_path = detector_config.model.path
self.h8l_model_height = detector_config.model.height
self.h8l_model_width = detector_config.model.width
self.h8l_model_type = detector_config.model.model_type
self.h8l_tensor_format = detector_config.model.input_tensor
self.h8l_pixel_format = detector_config.model.input_pixel_format
self.model_url = "https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ModelZoo/Compiled/v2.11.0/hailo8l/ssd_mobilenet_v1.hef"
self.cache_dir = "/config/model_cache/h8l_cache"
self.expected_model_filename = "ssd_mobilenet_v1.hef"
output_type = "FLOAT32"
logger.info(f"Initializing Hailo device as {self.h8l_device_type}")
self.check_and_prepare_model()
try:
# Validate device type
if self.h8l_device_type not in ["PCIe", "M.2"]:
raise ValueError(f"Unsupported device type: {self.h8l_device_type}")
# Initialize the Hailo device
self.target = VDevice()
# Load the HEF (Hailo's binary format for neural networks)
self.hef = HEF(self.h8l_model_path)
# Create configuration parameters from the HEF
self.configure_params = ConfigureParams.create_from_hef(
hef=self.hef, interface=HailoStreamInterface.PCIe
)
# Configure the device with the HEF
self.network_groups = self.target.configure(self.hef, self.configure_params)
self.network_group = self.network_groups[0]
self.network_group_params = self.network_group.create_params()
# Create input and output virtual stream parameters
self.input_vstreams_params = InputVStreamParams.make(
self.network_group,
format_type=self.hef.get_input_vstream_infos()[0].format.type,
)
self.output_vstreams_params = OutputVStreamParams.make(
self.network_group, format_type=getattr(FormatType, output_type)
)
# Get input and output stream information from the HEF
self.input_vstream_info = self.hef.get_input_vstream_infos()
self.output_vstream_info = self.hef.get_output_vstream_infos()
logger.info("Hailo device initialized successfully")
logger.debug(f"[__init__] Model Path: {self.h8l_model_path}")
logger.debug(f"[__init__] Input Tensor Format: {self.h8l_tensor_format}")
logger.debug(f"[__init__] Input Pixel Format: {self.h8l_pixel_format}")
logger.debug(f"[__init__] Input VStream Info: {self.input_vstream_info[0]}")
logger.debug(
f"[__init__] Output VStream Info: {self.output_vstream_info[0]}"
)
except HailoRTException as e:
logger.error(f"HailoRTException during initialization: {e}")
raise
except Exception as e:
logger.error(f"Failed to initialize Hailo device: {e}")
raise
def check_and_prepare_model(self):
# Ensure cache directory exists
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# Check for the expected model file
model_file_path = os.path.join(self.cache_dir, self.expected_model_filename)
if not os.path.isfile(model_file_path):
logger.info(
f"A model file was not found at {model_file_path}, Downloading one from {self.model_url}."
)
urllib.request.urlretrieve(self.model_url, model_file_path)
logger.info(f"A model file was downloaded to {model_file_path}.")
else:
logger.info(
f"A model file already exists at {model_file_path} not downloading one."
)
def detect_raw(self, tensor_input):
logger.debug("[detect_raw] Entering function")
logger.debug(
f"[detect_raw] The `tensor_input` = {tensor_input} tensor_input shape = {tensor_input.shape}"
)
if tensor_input is None:
raise ValueError(
"[detect_raw] The 'tensor_input' argument must be provided"
)
# Ensure tensor_input is a numpy array
if isinstance(tensor_input, list):
tensor_input = np.array(tensor_input)
logger.debug(
f"[detect_raw] Converted tensor_input to numpy array: shape {tensor_input.shape}"
)
# Preprocess the tensor input using Frigate's preprocess function
processed_tensor = preprocess(
tensor_input, (1, self.h8l_model_height, self.h8l_model_width, 3), np.uint8
)
logger.debug(
f"[detect_raw] Tensor data and shape after preprocessing: {processed_tensor} {processed_tensor.shape}"
)
input_data = processed_tensor
logger.debug(
f"[detect_raw] Input data for inference shape: {processed_tensor.shape}, dtype: {processed_tensor.dtype}"
)
try:
with InferVStreams(
self.network_group,
self.input_vstreams_params,
self.output_vstreams_params,
) as infer_pipeline:
input_dict = {}
if isinstance(input_data, dict):
input_dict = input_data
logger.debug("[detect_raw] it a dictionary.")
elif isinstance(input_data, (list, tuple)):
for idx, layer_info in enumerate(self.input_vstream_info):
input_dict[layer_info.name] = input_data[idx]
logger.debug("[detect_raw] converted from list/tuple.")
else:
if len(input_data.shape) == 3:
input_data = np.expand_dims(input_data, axis=0)
logger.debug("[detect_raw] converted from an array.")
input_dict[self.input_vstream_info[0].name] = input_data
logger.debug(
f"[detect_raw] Input dictionary for inference keys: {input_dict.keys()}"
)
with self.network_group.activate(self.network_group_params):
raw_output = infer_pipeline.infer(input_dict)
logger.debug(f"[detect_raw] Raw inference output: {raw_output}")
if self.output_vstream_info[0].name not in raw_output:
logger.error(
f"[detect_raw] Missing output stream {self.output_vstream_info[0].name} in inference results"
)
return np.zeros((20, 6), np.float32)
raw_output = raw_output[self.output_vstream_info[0].name][0]
logger.debug(
f"[detect_raw] Raw output for stream {self.output_vstream_info[0].name}: {raw_output}"
)
# Process the raw output
detections = self.process_detections(raw_output)
if len(detections) == 0:
logger.debug(
"[detect_raw] No detections found after processing. Setting default values."
)
return np.zeros((20, 6), np.float32)
else:
formatted_detections = detections
if (
formatted_detections.shape[1] != 6
): # Ensure the formatted detections have 6 columns
logger.error(
f"[detect_raw] Unexpected shape for formatted detections: {formatted_detections.shape}. Expected (20, 6)."
)
return np.zeros((20, 6), np.float32)
return formatted_detections
except HailoRTException as e:
logger.error(f"[detect_raw] HailoRTException during inference: {e}")
return np.zeros((20, 6), np.float32)
except Exception as e:
logger.error(f"[detect_raw] Exception during inference: {e}")
return np.zeros((20, 6), np.float32)
finally:
logger.debug("[detect_raw] Exiting function")
def process_detections(self, raw_detections, threshold=0.5):
boxes, scores, classes = [], [], []
num_detections = 0
logger.debug(f"[process_detections] Raw detections: {raw_detections}")
for i, detection_set in enumerate(raw_detections):
if not isinstance(detection_set, np.ndarray) or detection_set.size == 0:
logger.debug(
f"[process_detections] Detection set {i} is empty or not an array, skipping."
)
continue
logger.debug(
f"[process_detections] Detection set {i} shape: {detection_set.shape}"
)
for detection in detection_set:
if detection.shape[0] == 0:
logger.debug(
f"[process_detections] Detection in set {i} is empty, skipping."
)
continue
ymin, xmin, ymax, xmax = detection[:4]
score = np.clip(detection[4], 0, 1) # Use np.clip for clarity
if score < threshold:
logger.debug(
f"[process_detections] Detection in set {i} has a score {score} below threshold {threshold}. Skipping."
)
continue
logger.debug(
f"[process_detections] Adding detection with coordinates: ({xmin}, {ymin}), ({xmax}, {ymax}) and score: {score}"
)
boxes.append([ymin, xmin, ymax, xmax])
scores.append(score)
classes.append(i)
num_detections += 1
logger.debug(
f"[process_detections] Boxes: {boxes}, Scores: {scores}, Classes: {classes}, Num detections: {num_detections}"
)
if num_detections == 0:
logger.debug("[process_detections] No valid detections found.")
return np.zeros((20, 6), np.float32)
combined = np.hstack(
(
np.array(classes)[:, np.newaxis],
np.array(scores)[:, np.newaxis],
np.array(boxes),
)
)
if combined.shape[0] < 20:
padding = np.zeros(
(20 - combined.shape[0], combined.shape[1]), dtype=combined.dtype
)
combined = np.vstack((combined, padding))
logger.debug(
f"[process_detections] Combined detections (padded to 20 if necessary): {np.array_str(combined, precision=4, suppress_small=True)}"
)
return combined[:20, :6]