mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-08-18 13:47:20 +02:00
commit
01e2d209d0
@ -37,42 +37,57 @@
|
|||||||
"onAutoForward": "silent"
|
"onAutoForward": "silent"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"extensions": [
|
"customizations": {
|
||||||
"ms-python.vscode-pylance",
|
"vscode": {
|
||||||
"ms-python.python",
|
"extensions": [
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"ms-python.python",
|
||||||
"mhutchie.git-graph",
|
"ms-python.vscode-pylance",
|
||||||
"ms-azuretools.vscode-docker",
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"streetsidesoftware.code-spell-checker",
|
"mhutchie.git-graph",
|
||||||
"esbenp.prettier-vscode",
|
"ms-azuretools.vscode-docker",
|
||||||
"dbaeumer.vscode-eslint",
|
"streetsidesoftware.code-spell-checker",
|
||||||
"mikestead.dotenv",
|
"esbenp.prettier-vscode",
|
||||||
"csstools.postcss",
|
"dbaeumer.vscode-eslint",
|
||||||
"blanu.vscode-styled-jsx",
|
"mikestead.dotenv",
|
||||||
"bradlc.vscode-tailwindcss"
|
"csstools.postcss",
|
||||||
],
|
"blanu.vscode-styled-jsx",
|
||||||
"settings": {
|
"bradlc.vscode-tailwindcss",
|
||||||
"remote.autoForwardPorts": false,
|
"charliermarsh.ruff"
|
||||||
"python.linting.pylintEnabled": true,
|
],
|
||||||
"python.linting.enabled": true,
|
"settings": {
|
||||||
"python.formatting.provider": "black",
|
"remote.autoForwardPorts": false,
|
||||||
"python.languageServer": "Pylance",
|
"python.formatting.provider": "none",
|
||||||
"editor.formatOnPaste": false,
|
"python.languageServer": "Pylance",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnPaste": false,
|
||||||
"editor.formatOnType": true,
|
"editor.formatOnSave": true,
|
||||||
"python.testing.pytestEnabled": false,
|
"editor.formatOnType": true,
|
||||||
"python.testing.unittestEnabled": true,
|
"python.testing.pytestEnabled": false,
|
||||||
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
"python.testing.unittestEnabled": true,
|
||||||
"files.trimTrailingWhitespace": true,
|
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
||||||
"eslint.workingDirectories": ["./web"],
|
"files.trimTrailingWhitespace": true,
|
||||||
"[json][jsonc]": {
|
"eslint.workingDirectories": ["./web"],
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
"isort.args": ["--settings-path=./pyproject.toml"],
|
||||||
},
|
"[python]": {
|
||||||
"[jsx][js][tsx][ts]": {
|
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||||
"editor.codeActionsOnSave": ["source.addMissingImports", "source.fixAll"],
|
"editor.formatOnSave": true,
|
||||||
"editor.tabSize": 2
|
"editor.codeActionsOnSave": {
|
||||||
},
|
"source.fixAll": true,
|
||||||
"cSpell.ignoreWords": ["rtmp"],
|
"source.organizeImports": true
|
||||||
"cSpell.words": ["preact"]
|
}
|
||||||
|
},
|
||||||
|
"[json][jsonc]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
|
"[jsx][js][tsx][ts]": {
|
||||||
|
"editor.codeActionsOnSave": [
|
||||||
|
"source.addMissingImports",
|
||||||
|
"source.fixAll"
|
||||||
|
],
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"cSpell.ignoreWords": ["rtmp"],
|
||||||
|
"cSpell.words": ["preact", "astype", "hwaccel", "mqtt"]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,12 +2,23 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
|
# Cleanup the old github host key
|
||||||
|
sed -i -e '/AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31\/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi\/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==/d' ~/.ssh/known_hosts
|
||||||
|
# Add new github host key
|
||||||
|
curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | \
|
||||||
|
sed -e 's/^/github.com /' >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
# Frigate normal container runs as root, so it have permission to create
|
# Frigate normal container runs as root, so it have permission to create
|
||||||
# the folders. But the devcontainer runs as the host user, so we need to
|
# the folders. But the devcontainer runs as the host user, so we need to
|
||||||
# create the folders and give the host user permission to write to them.
|
# create the folders and give the host user permission to write to them.
|
||||||
sudo mkdir -p /media/frigate
|
sudo mkdir -p /media/frigate
|
||||||
sudo chown -R "$(id -u):$(id -g)" /media/frigate
|
sudo chown -R "$(id -u):$(id -g)" /media/frigate
|
||||||
|
|
||||||
|
# When started as a service, LIBAVFORMAT_VERSION_MAJOR is defined in the
|
||||||
|
# s6 service file. For dev, where frigate is started from an interactive
|
||||||
|
# shell, we define it in .bashrc instead.
|
||||||
|
echo 'export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po "libavformat\W+\K\d+")' >> $HOME/.bashrc
|
||||||
|
|
||||||
make version
|
make version
|
||||||
|
|
||||||
cd web
|
cd web
|
||||||
|
39
.github/actions/setup/action.yml
vendored
Normal file
39
.github/actions/setup/action.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
name: 'Setup'
|
||||||
|
description: 'Set up QEMU and Buildx'
|
||||||
|
inputs:
|
||||||
|
GITHUB_TOKEN:
|
||||||
|
required: true
|
||||||
|
outputs:
|
||||||
|
image-name:
|
||||||
|
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ steps.create-short-sha.outputs.SHORT_SHA }}
|
||||||
|
cache-name:
|
||||||
|
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:cache
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Remove unnecessary files
|
||||||
|
run: |
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo rm -rf /opt/ghc
|
||||||
|
shell: bash
|
||||||
|
- id: lowercaseRepo
|
||||||
|
uses: ASzc/change-string-case-action@v5
|
||||||
|
with:
|
||||||
|
string: ${{ github.repository }}
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ inputs.GITHUB_TOKEN }}
|
||||||
|
- name: Create version file
|
||||||
|
run: make version
|
||||||
|
shell: bash
|
||||||
|
- id: create-short-sha
|
||||||
|
run: echo "SHORT_SHA=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||||
|
shell: bash
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -13,7 +13,13 @@ updates:
|
|||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
target-branch: dev
|
target-branch: dev
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
directory: "/"
|
directory: "/docker/main"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
target-branch: dev
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/docker/tensorrt"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
|
176
.github/workflows/ci.yml
vendored
176
.github/workflows/ci.yml
vendored
@ -1,6 +1,7 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- dev
|
- dev
|
||||||
@ -15,53 +16,154 @@ env:
|
|||||||
PYTHON_VERSION: 3.9
|
PYTHON_VERSION: 3.9
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
multi_arch_build:
|
amd64_build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Image Build
|
name: AMD64 Build
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up QEMU and Buildx
|
||||||
|
id: setup
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push amd64 standard build
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: docker/main/Dockerfile
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64
|
||||||
|
target: frigate
|
||||||
|
tags: ${{ steps.setup.outputs.image-name }}-amd64
|
||||||
|
cache-from: type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||||
|
- name: Build and push TensorRT (x86 GPU)
|
||||||
|
uses: docker/bake-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
targets: tensorrt
|
||||||
|
files: docker/tensorrt/trt.hcl
|
||||||
|
set: |
|
||||||
|
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||||
|
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||||
|
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||||
|
arm64_build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: ARM Build
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up QEMU and Buildx
|
||||||
|
id: setup
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push arm64 standard build
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: docker/main/Dockerfile
|
||||||
|
push: true
|
||||||
|
platforms: linux/arm64
|
||||||
|
target: frigate
|
||||||
|
tags: |
|
||||||
|
${{ steps.setup.outputs.image-name }}-standard-arm64
|
||||||
|
cache-from: type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64
|
||||||
|
- name: Build and push RPi build
|
||||||
|
uses: docker/bake-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
targets: rpi
|
||||||
|
files: docker/rpi/rpi.hcl
|
||||||
|
set: |
|
||||||
|
rpi.tags=${{ steps.setup.outputs.image-name }}-rpi
|
||||||
|
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64
|
||||||
|
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64,mode=max
|
||||||
|
- name: Build and push RockChip build
|
||||||
|
uses: docker/bake-action@v3
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
targets: rk
|
||||||
|
files: docker/rockchip/rk.hcl
|
||||||
|
set: |
|
||||||
|
rk.tags=${{ steps.setup.outputs.image-name }}-rk
|
||||||
|
*.cache-from=type=gha
|
||||||
|
jetson_jp4_build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Jetson Jetpack 4
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up QEMU and Buildx
|
||||||
|
id: setup
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push TensorRT (Jetson, Jetpack 4)
|
||||||
|
env:
|
||||||
|
ARCH: arm64
|
||||||
|
BASE_IMAGE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||||
|
SLIM_BASE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||||
|
TRT_BASE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||||
|
uses: docker/bake-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
targets: tensorrt
|
||||||
|
files: docker/tensorrt/trt.hcl
|
||||||
|
set: |
|
||||||
|
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt-jp4
|
||||||
|
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp4
|
||||||
|
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp4,mode=max
|
||||||
|
jetson_jp5_build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Jetson Jetpack 5
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up QEMU and Buildx
|
||||||
|
id: setup
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push TensorRT (Jetson, Jetpack 5)
|
||||||
|
env:
|
||||||
|
ARCH: arm64
|
||||||
|
BASE_IMAGE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||||
|
SLIM_BASE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||||
|
TRT_BASE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||||
|
uses: docker/bake-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
targets: tensorrt
|
||||||
|
files: docker/tensorrt/trt.hcl
|
||||||
|
set: |
|
||||||
|
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt-jp5
|
||||||
|
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp5
|
||||||
|
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp5,mode=max
|
||||||
|
# The majority of users running arm64 are rpi users, so the rpi
|
||||||
|
# build should be the primary arm64 image
|
||||||
|
assemble_default_build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Assemble and push default build
|
||||||
|
needs:
|
||||||
|
- amd64_build
|
||||||
|
- arm64_build
|
||||||
steps:
|
steps:
|
||||||
- name: Remove unnecessary files
|
|
||||||
run: |
|
|
||||||
sudo rm -rf /usr/share/dotnet
|
|
||||||
sudo rm -rf /usr/local/lib/android
|
|
||||||
sudo rm -rf /opt/ghc
|
|
||||||
- id: lowercaseRepo
|
- id: lowercaseRepo
|
||||||
uses: ASzc/change-string-case-action@v5
|
uses: ASzc/change-string-case-action@v6
|
||||||
with:
|
with:
|
||||||
string: ${{ github.repository }}
|
string: ${{ github.repository }}
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: Log in to the Container registry
|
- name: Log in to the Container registry
|
||||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Create version file
|
|
||||||
run: make version
|
|
||||||
- name: Create short sha
|
- name: Create short sha
|
||||||
run: echo "SHORT_SHA=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
run: echo "SHORT_SHA=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||||
- name: Build and push
|
- uses: int128/docker-manifest-create-action@v1
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
with:
|
||||||
context: .
|
tags: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
||||||
push: true
|
suffixes: |
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
-amd64
|
||||||
target: frigate
|
-rpi
|
||||||
tags: |
|
|
||||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
- name: Build and push TensorRT
|
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64
|
|
||||||
target: frigate-tensorrt
|
|
||||||
tags: |
|
|
||||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}-tensorrt
|
|
||||||
cache-from: type=gha
|
|
||||||
|
4
.github/workflows/dependabot-auto-merge.yaml
vendored
4
.github/workflows/dependabot-auto-merge.yaml
vendored
@ -16,7 +16,9 @@ jobs:
|
|||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Enable auto-merge for Dependabot PRs
|
- name: Enable auto-merge for Dependabot PRs
|
||||||
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
|
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
|
||||||
run: gh pr merge --auto --squash "$PR_URL"
|
run: |
|
||||||
|
gh pr review --approve "$PR_URL"
|
||||||
|
gh pr merge --auto --squash "$PR_URL"
|
||||||
env:
|
env:
|
||||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
27
.github/workflows/pull_request.yml
vendored
27
.github/workflows/pull_request.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: "1"
|
DOCKER_BUILDKIT: "1"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -34,7 +34,7 @@ jobs:
|
|||||||
name: Web - Lint
|
name: Web - Lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -48,7 +48,7 @@ jobs:
|
|||||||
name: Web - Test
|
name: Web - Test
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -63,25 +63,28 @@ jobs:
|
|||||||
name: Python Checks
|
name: Python Checks
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v4.5.0
|
uses: actions/setup-python@v5.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Install requirements
|
- name: Install requirements
|
||||||
run: |
|
run: |
|
||||||
pip install pip
|
python3 -m pip install -U pip
|
||||||
pip install -r requirements-dev.txt
|
python3 -m pip install -r docker/main/requirements-dev.txt
|
||||||
- name: Lint
|
- name: Check formatting
|
||||||
run: |
|
run: |
|
||||||
python3 -m black frigate --check
|
ruff format --check --diff frigate migrations docker *.py
|
||||||
|
- name: Check lint
|
||||||
|
run: |
|
||||||
|
ruff check frigate migrations docker *.py
|
||||||
|
|
||||||
python_tests:
|
python_tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Python Tests
|
name: Python Tests
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -91,9 +94,9 @@ jobs:
|
|||||||
run: npm run build
|
run: npm run build
|
||||||
working-directory: ./web
|
working-directory: ./web
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: Build
|
- name: Build
|
||||||
run: make
|
run: make
|
||||||
- name: Run mypy
|
- name: Run mypy
|
||||||
|
37
.github/workflows/release.yml
vendored
Normal file
37
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
name: On release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- id: lowercaseRepo
|
||||||
|
uses: ASzc/change-string-case-action@v6
|
||||||
|
with:
|
||||||
|
string: ${{ github.repository }}
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Create tag variables
|
||||||
|
run: |
|
||||||
|
BRANCH=$([[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "master" || echo "dev")
|
||||||
|
echo "BASE=ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}" >> $GITHUB_ENV
|
||||||
|
echo "BUILD_TAG=${BRANCH}-${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||||
|
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
|
||||||
|
- name: Tag and push the main image
|
||||||
|
run: |
|
||||||
|
VERSION_TAG=${BASE}:${CLEAN_VERSION}
|
||||||
|
PULL_TAG=${BASE}:${BUILD_TAG}
|
||||||
|
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${VERSION_TAG}
|
||||||
|
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||||
|
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${VERSION_TAG}-${variant}
|
||||||
|
done
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -4,7 +4,8 @@
|
|||||||
debug
|
debug
|
||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/launch.json
|
!.vscode/launch.json
|
||||||
config/config.yml
|
config/*
|
||||||
|
!config/*.example
|
||||||
models
|
models
|
||||||
*.mp4
|
*.mp4
|
||||||
*.ts
|
*.ts
|
||||||
|
6
CODEOWNERS
Normal file
6
CODEOWNERS
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# Community-supported boards
|
||||||
|
/docker/tensorrt/ @madsciencetist @NateMeyer
|
||||||
|
/docker/tensorrt/*arm64* @madsciencetist
|
||||||
|
/docker/tensorrt/*jetson* @madsciencetist
|
||||||
|
|
||||||
|
/docker/rockchip/ @MarcA711
|
34
Makefile
34
Makefile
@ -1,39 +1,39 @@
|
|||||||
default_target: local
|
default_target: local
|
||||||
|
|
||||||
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
||||||
VERSION = 0.12.1
|
VERSION = 0.13.0
|
||||||
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
|
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
|
||||||
|
GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
CURRENT_UID := $(shell id -u)
|
CURRENT_UID := $(shell id -u)
|
||||||
CURRENT_GID := $(shell id -g)
|
CURRENT_GID := $(shell id -g)
|
||||||
|
BOARDS= #Initialized empty
|
||||||
|
|
||||||
|
include docker/*/*.mk
|
||||||
|
|
||||||
|
build-boards: $(BOARDS:%=build-%)
|
||||||
|
|
||||||
|
push-boards: $(BOARDS:%=push-%)
|
||||||
|
|
||||||
version:
|
version:
|
||||||
echo 'VERSION = "$(VERSION)-$(COMMIT_HASH)"' > frigate/version.py
|
echo 'VERSION = "$(VERSION)-$(COMMIT_HASH)"' > frigate/version.py
|
||||||
|
|
||||||
local: version
|
local: version
|
||||||
docker buildx build --target=frigate --tag frigate:latest --load .
|
docker buildx build --target=frigate --tag frigate:latest --load --file docker/main/Dockerfile .
|
||||||
|
|
||||||
local-trt: version
|
|
||||||
docker buildx build --target=frigate-tensorrt --tag frigate:latest-tensorrt --load .
|
|
||||||
|
|
||||||
amd64:
|
amd64:
|
||||||
docker buildx build --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) .
|
docker buildx build --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||||
docker buildx build --platform linux/amd64 --target=frigate-tensorrt --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH)-tensorrt .
|
|
||||||
|
|
||||||
arm64:
|
arm64:
|
||||||
docker buildx build --platform linux/arm64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) .
|
docker buildx build --platform linux/arm64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||||
|
|
||||||
armv7:
|
build: version amd64 arm64
|
||||||
docker buildx build --platform linux/arm/v7 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) .
|
docker buildx build --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||||
|
|
||||||
build: version amd64 arm64 armv7
|
push: push-boards
|
||||||
docker buildx build --platform linux/arm/v7,linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) .
|
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||||
|
|
||||||
push: build
|
|
||||||
docker buildx build --push --platform linux/arm/v7,linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) .
|
|
||||||
docker buildx build --push --platform linux/amd64 --target=frigate-tensorrt --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt .
|
|
||||||
|
|
||||||
run: local
|
run: local
|
||||||
docker run --rm --publish=5000:5000 --volume=${PWD}/config/config.yml:/config/config.yml frigate:latest
|
docker run --rm --publish=5000:5000 --volume=${PWD}/config:/config frigate:latest
|
||||||
|
|
||||||
run_tests: local
|
run_tests: local
|
||||||
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest python3 -u -m unittest
|
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest python3 -u -m unittest
|
||||||
|
521
audio-labelmap.txt
Normal file
521
audio-labelmap.txt
Normal file
@ -0,0 +1,521 @@
|
|||||||
|
speech
|
||||||
|
speech
|
||||||
|
speech
|
||||||
|
speech
|
||||||
|
babbling
|
||||||
|
speech
|
||||||
|
yell
|
||||||
|
bellow
|
||||||
|
whoop
|
||||||
|
yell
|
||||||
|
yell
|
||||||
|
yell
|
||||||
|
whispering
|
||||||
|
laughter
|
||||||
|
laughter
|
||||||
|
laughter
|
||||||
|
snicker
|
||||||
|
laughter
|
||||||
|
laughter
|
||||||
|
crying
|
||||||
|
crying
|
||||||
|
crying
|
||||||
|
yell
|
||||||
|
sigh
|
||||||
|
singing
|
||||||
|
choir
|
||||||
|
sodeling
|
||||||
|
chant
|
||||||
|
mantra
|
||||||
|
child_singing
|
||||||
|
synthetic_singing
|
||||||
|
rapping
|
||||||
|
humming
|
||||||
|
groan
|
||||||
|
grunt
|
||||||
|
whistling
|
||||||
|
breathing
|
||||||
|
wheeze
|
||||||
|
snoring
|
||||||
|
gasp
|
||||||
|
pant
|
||||||
|
snort
|
||||||
|
cough
|
||||||
|
throat_clearing
|
||||||
|
sneeze
|
||||||
|
sniff
|
||||||
|
run
|
||||||
|
shuffle
|
||||||
|
footsteps
|
||||||
|
chewing
|
||||||
|
biting
|
||||||
|
gargling
|
||||||
|
stomach_rumble
|
||||||
|
burping
|
||||||
|
hiccup
|
||||||
|
fart
|
||||||
|
hands
|
||||||
|
finger_snapping
|
||||||
|
clapping
|
||||||
|
heartbeat
|
||||||
|
heart_murmur
|
||||||
|
cheering
|
||||||
|
applause
|
||||||
|
chatter
|
||||||
|
crowd
|
||||||
|
speech
|
||||||
|
children_playing
|
||||||
|
animal
|
||||||
|
pets
|
||||||
|
dog
|
||||||
|
bark
|
||||||
|
yip
|
||||||
|
howl
|
||||||
|
bow-wow
|
||||||
|
growling
|
||||||
|
whimper_dog
|
||||||
|
cat
|
||||||
|
purr
|
||||||
|
meow
|
||||||
|
hiss
|
||||||
|
caterwaul
|
||||||
|
livestock
|
||||||
|
horse
|
||||||
|
clip-clop
|
||||||
|
neigh
|
||||||
|
cattle
|
||||||
|
moo
|
||||||
|
cowbell
|
||||||
|
pig
|
||||||
|
oink
|
||||||
|
goat
|
||||||
|
bleat
|
||||||
|
sheep
|
||||||
|
fowl
|
||||||
|
chicken
|
||||||
|
cluck
|
||||||
|
cock-a-doodle-doo
|
||||||
|
turkey
|
||||||
|
gobble
|
||||||
|
duck
|
||||||
|
quack
|
||||||
|
goose
|
||||||
|
honk
|
||||||
|
wild_animals
|
||||||
|
roaring_cats
|
||||||
|
roar
|
||||||
|
bird
|
||||||
|
chird
|
||||||
|
chirp
|
||||||
|
squawk
|
||||||
|
pigeon
|
||||||
|
coo
|
||||||
|
crow
|
||||||
|
caw
|
||||||
|
owl
|
||||||
|
hoot
|
||||||
|
flapping_wings
|
||||||
|
dogs
|
||||||
|
rats
|
||||||
|
mouse
|
||||||
|
patter
|
||||||
|
insect
|
||||||
|
cricket
|
||||||
|
mosquito
|
||||||
|
fly
|
||||||
|
buzz
|
||||||
|
buzz
|
||||||
|
frog
|
||||||
|
croak
|
||||||
|
snake
|
||||||
|
rattle
|
||||||
|
whale_vocalization
|
||||||
|
music
|
||||||
|
musical_instrument
|
||||||
|
plucked_string_instrument
|
||||||
|
guitar
|
||||||
|
electric_guitar
|
||||||
|
bass_guitar
|
||||||
|
acoustic_guitar
|
||||||
|
steel_guitar
|
||||||
|
tapping
|
||||||
|
strum
|
||||||
|
banjo
|
||||||
|
sitar
|
||||||
|
mandolin
|
||||||
|
zither
|
||||||
|
ukulele
|
||||||
|
keyboard
|
||||||
|
piano
|
||||||
|
electric_piano
|
||||||
|
organ
|
||||||
|
electronic_organ
|
||||||
|
hammond_organ
|
||||||
|
synthesizer
|
||||||
|
sampler
|
||||||
|
harpsichord
|
||||||
|
percussion
|
||||||
|
drum_kit
|
||||||
|
drum_machine
|
||||||
|
drum
|
||||||
|
snare_drum
|
||||||
|
rimshot
|
||||||
|
drum_roll
|
||||||
|
bass_drum
|
||||||
|
timpani
|
||||||
|
tabla
|
||||||
|
cymbal
|
||||||
|
hi-hat
|
||||||
|
wood_block
|
||||||
|
tambourine
|
||||||
|
rattle
|
||||||
|
maraca
|
||||||
|
gong
|
||||||
|
tubular_bells
|
||||||
|
mallet_percussion
|
||||||
|
marimba
|
||||||
|
glockenspiel
|
||||||
|
vibraphone
|
||||||
|
steelpan
|
||||||
|
orchestra
|
||||||
|
brass_instrument
|
||||||
|
french_horn
|
||||||
|
trumpet
|
||||||
|
trombone
|
||||||
|
bowed_string_instrument
|
||||||
|
string_section
|
||||||
|
violin
|
||||||
|
pizzicato
|
||||||
|
cello
|
||||||
|
double_bass
|
||||||
|
wind_instrument
|
||||||
|
flute
|
||||||
|
saxophone
|
||||||
|
clarinet
|
||||||
|
harp
|
||||||
|
bell
|
||||||
|
church_bell
|
||||||
|
jingle_bell
|
||||||
|
bicycle_bell
|
||||||
|
tuning_fork
|
||||||
|
chime
|
||||||
|
wind_chime
|
||||||
|
change_ringing
|
||||||
|
harmonica
|
||||||
|
accordion
|
||||||
|
bagpipes
|
||||||
|
didgeridoo
|
||||||
|
shofar
|
||||||
|
theremin
|
||||||
|
singing_bowl
|
||||||
|
scratching
|
||||||
|
pop_music
|
||||||
|
hip_hop_music
|
||||||
|
beatboxing
|
||||||
|
rock_music
|
||||||
|
heavy_metal
|
||||||
|
punk_rock
|
||||||
|
grunge
|
||||||
|
progressive_rock
|
||||||
|
rock_and_roll
|
||||||
|
psychedelic_rock
|
||||||
|
rhythm_and_blues
|
||||||
|
soul_music
|
||||||
|
reggae
|
||||||
|
country
|
||||||
|
swing_music
|
||||||
|
bluegrass
|
||||||
|
funk
|
||||||
|
folk_music
|
||||||
|
middle_eastern_music
|
||||||
|
jazz
|
||||||
|
disco
|
||||||
|
classical_music
|
||||||
|
opera
|
||||||
|
electronic_music
|
||||||
|
house_music
|
||||||
|
techno
|
||||||
|
dubstep
|
||||||
|
drum_and_bass
|
||||||
|
electronica
|
||||||
|
electronic_dance_music
|
||||||
|
ambient_music
|
||||||
|
trance_music
|
||||||
|
music_of_latin_america
|
||||||
|
salsa_music
|
||||||
|
flamenco
|
||||||
|
blues
|
||||||
|
music_for_children
|
||||||
|
new-age_music
|
||||||
|
vocal_music
|
||||||
|
a_capella
|
||||||
|
music_of_africa
|
||||||
|
afrobeat
|
||||||
|
christian_music
|
||||||
|
gospel_music
|
||||||
|
music_of_asia
|
||||||
|
carnatic_music
|
||||||
|
music_of_bollywood
|
||||||
|
ska
|
||||||
|
traditional_music
|
||||||
|
independent_music
|
||||||
|
song
|
||||||
|
background_music
|
||||||
|
theme_music
|
||||||
|
jingle
|
||||||
|
soundtrack_music
|
||||||
|
lullaby
|
||||||
|
video_game_music
|
||||||
|
christmas_music
|
||||||
|
dance_music
|
||||||
|
wedding_music
|
||||||
|
happy_music
|
||||||
|
sad_music
|
||||||
|
tender_music
|
||||||
|
exciting_music
|
||||||
|
angry_music
|
||||||
|
scary_music
|
||||||
|
wind
|
||||||
|
rustling_leaves
|
||||||
|
wind_noise
|
||||||
|
thunderstorm
|
||||||
|
thunder
|
||||||
|
water
|
||||||
|
rain
|
||||||
|
raindrop
|
||||||
|
rain_on_surface
|
||||||
|
stream
|
||||||
|
waterfall
|
||||||
|
ocean
|
||||||
|
waves
|
||||||
|
steam
|
||||||
|
gurgling
|
||||||
|
fire
|
||||||
|
crackle
|
||||||
|
vehicle
|
||||||
|
boat
|
||||||
|
sailboat
|
||||||
|
rowboat
|
||||||
|
motorboat
|
||||||
|
ship
|
||||||
|
motor_vehicle
|
||||||
|
car
|
||||||
|
honk
|
||||||
|
toot
|
||||||
|
car_alarm
|
||||||
|
power_windows
|
||||||
|
skidding
|
||||||
|
tire_squeal
|
||||||
|
car_passing_by
|
||||||
|
race_car
|
||||||
|
truck
|
||||||
|
air_brake
|
||||||
|
air_horn
|
||||||
|
reversing_beeps
|
||||||
|
ice_cream_truck
|
||||||
|
bus
|
||||||
|
emergency_vehicle
|
||||||
|
police_car
|
||||||
|
ambulance
|
||||||
|
fire_engine
|
||||||
|
motorcycle
|
||||||
|
traffic_noise
|
||||||
|
rail_transport
|
||||||
|
train
|
||||||
|
train_whistle
|
||||||
|
train_horn
|
||||||
|
railroad_car
|
||||||
|
train_wheels_squealing
|
||||||
|
subway
|
||||||
|
aircraft
|
||||||
|
aircraft_engine
|
||||||
|
jet_engine
|
||||||
|
propeller
|
||||||
|
helicopter
|
||||||
|
fixed-wing_aircraft
|
||||||
|
bicycle
|
||||||
|
skateboard
|
||||||
|
engine
|
||||||
|
light_engine
|
||||||
|
dental_drill's_drill
|
||||||
|
lawn_mower
|
||||||
|
chainsaw
|
||||||
|
medium_engine
|
||||||
|
heavy_engine
|
||||||
|
engine_knocking
|
||||||
|
engine_starting
|
||||||
|
idling
|
||||||
|
accelerating
|
||||||
|
door
|
||||||
|
doorbell
|
||||||
|
ding-dong
|
||||||
|
sliding_door
|
||||||
|
slam
|
||||||
|
knock
|
||||||
|
tap
|
||||||
|
squeak
|
||||||
|
cupboard_open_or_close
|
||||||
|
drawer_open_or_close
|
||||||
|
dishes
|
||||||
|
cutlery
|
||||||
|
chopping
|
||||||
|
frying
|
||||||
|
microwave_oven
|
||||||
|
blender
|
||||||
|
water_tap
|
||||||
|
sink
|
||||||
|
bathtub
|
||||||
|
hair_dryer
|
||||||
|
toilet_flush
|
||||||
|
toothbrush
|
||||||
|
electric_toothbrush
|
||||||
|
vacuum_cleaner
|
||||||
|
zipper
|
||||||
|
keys_jangling
|
||||||
|
coin
|
||||||
|
scissors
|
||||||
|
electric_shaver
|
||||||
|
shuffling_cards
|
||||||
|
typing
|
||||||
|
typewriter
|
||||||
|
computer_keyboard
|
||||||
|
writing
|
||||||
|
alarm
|
||||||
|
telephone
|
||||||
|
telephone_bell_ringing
|
||||||
|
ringtone
|
||||||
|
telephone_dialing
|
||||||
|
dial_tone
|
||||||
|
busy_signal
|
||||||
|
alarm_clock
|
||||||
|
siren
|
||||||
|
civil_defense_siren
|
||||||
|
buzzer
|
||||||
|
smoke_detector
|
||||||
|
fire_alarm
|
||||||
|
foghorn
|
||||||
|
whistle
|
||||||
|
steam_whistle
|
||||||
|
mechanisms
|
||||||
|
ratchet
|
||||||
|
clock
|
||||||
|
tick
|
||||||
|
tick-tock
|
||||||
|
gears
|
||||||
|
pulleys
|
||||||
|
sewing_machine
|
||||||
|
mechanical_fan
|
||||||
|
air_conditioning
|
||||||
|
cash_register
|
||||||
|
printer
|
||||||
|
camera
|
||||||
|
single-lens_reflex_camera
|
||||||
|
tools
|
||||||
|
hammer
|
||||||
|
jackhammer
|
||||||
|
sawing
|
||||||
|
filing
|
||||||
|
sanding
|
||||||
|
power_tool
|
||||||
|
drill
|
||||||
|
explosion
|
||||||
|
gunshot
|
||||||
|
machine_gun
|
||||||
|
fusillade
|
||||||
|
artillery_fire
|
||||||
|
cap_gun
|
||||||
|
fireworks
|
||||||
|
firecracker
|
||||||
|
burst
|
||||||
|
eruption
|
||||||
|
boom
|
||||||
|
wood
|
||||||
|
chop
|
||||||
|
splinter
|
||||||
|
crack
|
||||||
|
glass
|
||||||
|
chink
|
||||||
|
shatter
|
||||||
|
liquid
|
||||||
|
splash
|
||||||
|
slosh
|
||||||
|
squish
|
||||||
|
drip
|
||||||
|
pour
|
||||||
|
trickle
|
||||||
|
gush
|
||||||
|
fill
|
||||||
|
spray
|
||||||
|
pump
|
||||||
|
stir
|
||||||
|
boiling
|
||||||
|
sonar
|
||||||
|
arrow
|
||||||
|
whoosh
|
||||||
|
thump
|
||||||
|
thunk
|
||||||
|
electronic_tuner
|
||||||
|
effects_unit
|
||||||
|
chorus_effect
|
||||||
|
basketball_bounce
|
||||||
|
bang
|
||||||
|
slap
|
||||||
|
whack
|
||||||
|
smash
|
||||||
|
breaking
|
||||||
|
bouncing
|
||||||
|
whip
|
||||||
|
flap
|
||||||
|
scratch
|
||||||
|
scrape
|
||||||
|
rub
|
||||||
|
roll
|
||||||
|
crushing
|
||||||
|
crumpling
|
||||||
|
tearing
|
||||||
|
beep
|
||||||
|
ping
|
||||||
|
ding
|
||||||
|
clang
|
||||||
|
squeal
|
||||||
|
creak
|
||||||
|
rustle
|
||||||
|
whir
|
||||||
|
clatter
|
||||||
|
sizzle
|
||||||
|
clicking
|
||||||
|
clickety-clack
|
||||||
|
rumble
|
||||||
|
plop
|
||||||
|
jingle
|
||||||
|
hum
|
||||||
|
zing
|
||||||
|
boing
|
||||||
|
crunch
|
||||||
|
silence
|
||||||
|
sine_wave
|
||||||
|
harmonic
|
||||||
|
chirp_tone
|
||||||
|
sound_effect
|
||||||
|
pulse
|
||||||
|
inside
|
||||||
|
inside
|
||||||
|
inside
|
||||||
|
outside
|
||||||
|
outside
|
||||||
|
reverberation
|
||||||
|
echo
|
||||||
|
noise
|
||||||
|
environmental_noise
|
||||||
|
static
|
||||||
|
mains_hum
|
||||||
|
distortion
|
||||||
|
sidetone
|
||||||
|
cacophony
|
||||||
|
white_noise
|
||||||
|
pink_noise
|
||||||
|
throbbing
|
||||||
|
vibration
|
||||||
|
television
|
||||||
|
radio
|
||||||
|
field_recording
|
12
benchmark.py
12
benchmark.py
@ -1,11 +1,11 @@
|
|||||||
import os
|
|
||||||
from statistics import mean
|
|
||||||
import multiprocessing as mp
|
|
||||||
import numpy as np
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import multiprocessing as mp
|
||||||
|
from statistics import mean
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import DetectorTypeEnum
|
from frigate.config import DetectorTypeEnum
|
||||||
from frigate.object_detection import (
|
from frigate.object_detection import (
|
||||||
LocalObjectDetector,
|
|
||||||
ObjectDetectProcess,
|
ObjectDetectProcess,
|
||||||
RemoteObjectDetector,
|
RemoteObjectDetector,
|
||||||
load_labels,
|
load_labels,
|
||||||
@ -53,7 +53,7 @@ def start(id, num_detections, detection_queue, event):
|
|||||||
frame_times = []
|
frame_times = []
|
||||||
for x in range(0, num_detections):
|
for x in range(0, num_detections):
|
||||||
start_frame = datetime.datetime.now().timestamp()
|
start_frame = datetime.datetime.now().timestamp()
|
||||||
detections = object_detector.detect(my_frame)
|
object_detector.detect(my_frame)
|
||||||
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
||||||
|
|
||||||
duration = datetime.datetime.now().timestamp() - start
|
duration = datetime.datetime.now().timestamp() - start
|
||||||
|
118
benchmark_motion.py
Normal file
118
benchmark_motion.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
import datetime
|
||||||
|
import multiprocessing as mp
|
||||||
|
import os
|
||||||
|
|
||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from frigate.config import MotionConfig
|
||||||
|
from frigate.motion.improved_motion import ImprovedMotionDetector
|
||||||
|
from frigate.util import create_mask
|
||||||
|
|
||||||
|
# get info on the video
|
||||||
|
# cap = cv2.VideoCapture("debug/front_cam_2023_05_23_08_41__2023_05_23_08_43.mp4")
|
||||||
|
# cap = cv2.VideoCapture("debug/motion_test_clips/rain_1.mp4")
|
||||||
|
cap = cv2.VideoCapture("debug/motion_test_clips/lawn_mower_night_1.mp4")
|
||||||
|
# cap = cv2.VideoCapture("airport.mp4")
|
||||||
|
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
|
||||||
|
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
||||||
|
fps = cap.get(cv2.CAP_PROP_FPS)
|
||||||
|
frame_shape = (height, width, 3)
|
||||||
|
# Nick back:
|
||||||
|
# "1280,0,1280,316,1170,216,1146,126,1016,127,979,82,839,0",
|
||||||
|
# "310,350,300,402,224,405,241,354",
|
||||||
|
# "378,0,375,26,0,23,0,0",
|
||||||
|
# Front door:
|
||||||
|
# "1080,0,1080,339,1010,280,1020,169,777,163,452,170,318,299,191,365,186,417,139,470,108,516,40,530,0,514,0,0",
|
||||||
|
# "336,833,438,1024,346,1093,103,1052,24,814",
|
||||||
|
# Back
|
||||||
|
# "1855,0,1851,100,1289,96,1105,161,1045,119,890,121,890,0",
|
||||||
|
# "505,95,506,138,388,153,384,114",
|
||||||
|
# "689,72,689,122,549,134,547,89",
|
||||||
|
# "261,134,264,176,169,195,167,158",
|
||||||
|
# "145,159,146,202,70,220,65,183",
|
||||||
|
|
||||||
|
mask = create_mask(
|
||||||
|
(height, width),
|
||||||
|
[
|
||||||
|
"1080,0,1080,339,1010,280,1020,169,777,163,452,170,318,299,191,365,186,417,139,470,108,516,40,530,0,514,0,0",
|
||||||
|
"336,833,438,1024,346,1093,103,1052,24,814",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# create the motion config
|
||||||
|
motion_config_1 = MotionConfig()
|
||||||
|
motion_config_1.mask = np.zeros((height, width), np.uint8)
|
||||||
|
motion_config_1.mask[:] = mask
|
||||||
|
# motion_config_1.improve_contrast = 1
|
||||||
|
motion_config_1.frame_height = 150
|
||||||
|
# motion_config_1.frame_alpha = 0.02
|
||||||
|
# motion_config_1.threshold = 30
|
||||||
|
# motion_config_1.contour_area = 10
|
||||||
|
|
||||||
|
motion_config_2 = MotionConfig()
|
||||||
|
motion_config_2.mask = np.zeros((height, width), np.uint8)
|
||||||
|
motion_config_2.mask[:] = mask
|
||||||
|
# motion_config_2.improve_contrast = 1
|
||||||
|
motion_config_2.frame_height = 150
|
||||||
|
# motion_config_2.frame_alpha = 0.01
|
||||||
|
motion_config_2.threshold = 20
|
||||||
|
# motion_config.contour_area = 10
|
||||||
|
|
||||||
|
save_images = True
|
||||||
|
|
||||||
|
improved_motion_detector_1 = ImprovedMotionDetector(
|
||||||
|
frame_shape=frame_shape,
|
||||||
|
config=motion_config_1,
|
||||||
|
fps=fps,
|
||||||
|
improve_contrast=mp.Value("i", motion_config_1.improve_contrast),
|
||||||
|
threshold=mp.Value("i", motion_config_1.threshold),
|
||||||
|
contour_area=mp.Value("i", motion_config_1.contour_area),
|
||||||
|
name="default",
|
||||||
|
)
|
||||||
|
improved_motion_detector_1.save_images = save_images
|
||||||
|
|
||||||
|
improved_motion_detector_2 = ImprovedMotionDetector(
|
||||||
|
frame_shape=frame_shape,
|
||||||
|
config=motion_config_2,
|
||||||
|
fps=fps,
|
||||||
|
improve_contrast=mp.Value("i", motion_config_2.improve_contrast),
|
||||||
|
threshold=mp.Value("i", motion_config_2.threshold),
|
||||||
|
contour_area=mp.Value("i", motion_config_2.contour_area),
|
||||||
|
name="compare",
|
||||||
|
)
|
||||||
|
improved_motion_detector_2.save_images = save_images
|
||||||
|
|
||||||
|
# read and process frames
|
||||||
|
ret, frame = cap.read()
|
||||||
|
frame_counter = 1
|
||||||
|
while ret:
|
||||||
|
yuv_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2YUV_I420)
|
||||||
|
|
||||||
|
start_frame = datetime.datetime.now().timestamp()
|
||||||
|
improved_motion_detector_1.detect(yuv_frame)
|
||||||
|
|
||||||
|
start_frame = datetime.datetime.now().timestamp()
|
||||||
|
improved_motion_detector_2.detect(yuv_frame)
|
||||||
|
|
||||||
|
default_frame = f"debug/frames/default-{frame_counter}.jpg"
|
||||||
|
compare_frame = f"debug/frames/compare-{frame_counter}.jpg"
|
||||||
|
if os.path.exists(default_frame) and os.path.exists(compare_frame):
|
||||||
|
images = [
|
||||||
|
cv2.imread(default_frame),
|
||||||
|
cv2.imread(compare_frame),
|
||||||
|
]
|
||||||
|
|
||||||
|
cv2.imwrite(
|
||||||
|
f"debug/frames/all-{frame_counter}.jpg",
|
||||||
|
cv2.vconcat(images)
|
||||||
|
if frame_shape[0] > frame_shape[1]
|
||||||
|
else cv2.hconcat(images),
|
||||||
|
)
|
||||||
|
os.unlink(default_frame)
|
||||||
|
os.unlink(compare_frame)
|
||||||
|
frame_counter += 1
|
||||||
|
|
||||||
|
ret, frame = cap.read()
|
||||||
|
|
||||||
|
cap.release()
|
@ -11,15 +11,19 @@ services:
|
|||||||
shm_size: "256mb"
|
shm_size: "256mb"
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
dockerfile: docker/main/Dockerfile
|
||||||
# Use target devcontainer-trt for TensorRT dev
|
# Use target devcontainer-trt for TensorRT dev
|
||||||
target: devcontainer
|
target: devcontainer
|
||||||
deploy:
|
## Uncomment this block for nvidia gpu support
|
||||||
resources:
|
# deploy:
|
||||||
reservations:
|
# resources:
|
||||||
devices:
|
# reservations:
|
||||||
- driver: nvidia
|
# devices:
|
||||||
count: 1
|
# - driver: nvidia
|
||||||
capabilities: [gpu]
|
# count: 1
|
||||||
|
# capabilities: [gpu]
|
||||||
|
environment:
|
||||||
|
YOLO_MODELS: yolov7-320
|
||||||
devices:
|
devices:
|
||||||
- /dev/bus/usb:/dev/bus/usb
|
- /dev/bus/usb:/dev/bus/usb
|
||||||
# - /dev/dri:/dev/dri # for intel hwaccel, needs to be updated for your hardware
|
# - /dev/dri:/dev/dri # for intel hwaccel, needs to be updated for your hardware
|
||||||
@ -27,10 +31,8 @@ services:
|
|||||||
- .:/workspace/frigate:cached
|
- .:/workspace/frigate:cached
|
||||||
- ./web/dist:/opt/frigate/web:cached
|
- ./web/dist:/opt/frigate/web:cached
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
- ./config/config.yml:/config/config.yml:ro
|
- ./config:/config
|
||||||
- ./debug:/media/frigate
|
- ./debug:/media/frigate
|
||||||
# Create the trt-models folder using the documented method of generating TRT models
|
|
||||||
# - ./debug/trt-models:/trt-models
|
|
||||||
- /dev/bus/usb:/dev/bus/usb
|
- /dev/bus/usb:/dev/bus/usb
|
||||||
mqtt:
|
mqtt:
|
||||||
container_name: mqtt
|
container_name: mqtt
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
# syntax=docker/dockerfile:1.2
|
# syntax=docker/dockerfile:1.6
|
||||||
|
|
||||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
FROM debian:11 AS base
|
ARG BASE_IMAGE=debian:11
|
||||||
|
ARG SLIM_BASE=debian:11-slim
|
||||||
|
|
||||||
FROM --platform=linux/amd64 debian:11 AS base_amd64
|
FROM ${BASE_IMAGE} AS base
|
||||||
|
|
||||||
FROM debian:11-slim AS slim-base
|
FROM --platform=${BUILDPLATFORM} debian:11 AS base_host
|
||||||
|
|
||||||
|
FROM ${SLIM_BASE} AS slim-base
|
||||||
|
|
||||||
FROM slim-base AS wget
|
FROM slim-base AS wget
|
||||||
ARG DEBIAN_FRONTEND
|
ARG DEBIAN_FRONTEND
|
||||||
@ -18,17 +21,19 @@ WORKDIR /rootfs
|
|||||||
|
|
||||||
FROM base AS nginx
|
FROM base AS nginx
|
||||||
ARG DEBIAN_FRONTEND
|
ARG DEBIAN_FRONTEND
|
||||||
|
ENV CCACHE_DIR /root/.ccache
|
||||||
|
ENV CCACHE_MAXSIZE 2G
|
||||||
|
|
||||||
# bind /var/cache/apt to tmpfs to speed up nginx build
|
# bind /var/cache/apt to tmpfs to speed up nginx build
|
||||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||||
--mount=type=bind,source=docker/build_nginx.sh,target=/deps/build_nginx.sh \
|
--mount=type=bind,source=docker/main/build_nginx.sh,target=/deps/build_nginx.sh \
|
||||||
|
--mount=type=cache,target=/root/.ccache \
|
||||||
/deps/build_nginx.sh
|
/deps/build_nginx.sh
|
||||||
|
|
||||||
FROM wget AS go2rtc
|
FROM scratch AS go2rtc
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.2.0/go2rtc_linux_${TARGETARCH}" \
|
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.8.4/go2rtc_linux_${TARGETARCH}" go2rtc
|
||||||
&& chmod +x go2rtc
|
|
||||||
|
|
||||||
|
|
||||||
####
|
####
|
||||||
@ -40,11 +45,11 @@ RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.2.0/
|
|||||||
#
|
#
|
||||||
####
|
####
|
||||||
# Download and Convert OpenVino model
|
# Download and Convert OpenVino model
|
||||||
FROM base_amd64 AS ov-converter
|
FROM base_host AS ov-converter
|
||||||
ARG DEBIAN_FRONTEND
|
ARG DEBIAN_FRONTEND
|
||||||
|
|
||||||
# Install OpenVino Runtime and Dev library
|
# Install OpenVino Runtime and Dev library
|
||||||
COPY requirements-ov.txt /requirements-ov.txt
|
COPY docker/main/requirements-ov.txt /requirements-ov.txt
|
||||||
RUN apt-get -qq update \
|
RUN apt-get -qq update \
|
||||||
&& apt-get -qq install -y wget python3 python3-distutils \
|
&& apt-get -qq install -y wget python3 python3-distutils \
|
||||||
&& wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
&& wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||||
@ -61,25 +66,27 @@ RUN mkdir /models \
|
|||||||
FROM wget as libusb-build
|
FROM wget as libusb-build
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG DEBIAN_FRONTEND
|
ARG DEBIAN_FRONTEND
|
||||||
|
ENV CCACHE_DIR /root/.ccache
|
||||||
|
ENV CCACHE_MAXSIZE 2G
|
||||||
|
|
||||||
# Build libUSB without udev. Needed for Openvino NCS2 support
|
# Build libUSB without udev. Needed for Openvino NCS2 support
|
||||||
WORKDIR /opt
|
WORKDIR /opt
|
||||||
RUN apt-get update && apt-get install -y unzip build-essential automake libtool
|
RUN apt-get update && apt-get install -y unzip build-essential automake libtool ccache pkg-config
|
||||||
RUN wget -q https://github.com/libusb/libusb/archive/v1.0.25.zip -O v1.0.25.zip && \
|
RUN --mount=type=cache,target=/root/.ccache wget -q https://github.com/libusb/libusb/archive/v1.0.26.zip -O v1.0.26.zip && \
|
||||||
unzip v1.0.25.zip && cd libusb-1.0.25 && \
|
unzip v1.0.26.zip && cd libusb-1.0.26 && \
|
||||||
./bootstrap.sh && \
|
./bootstrap.sh && \
|
||||||
./configure --disable-udev --enable-shared && \
|
./configure CC='ccache gcc' CCX='ccache g++' --disable-udev --enable-shared && \
|
||||||
make -j $(nproc --all)
|
make -j $(nproc --all)
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends libusb-1.0-0-dev && \
|
apt-get install -y --no-install-recommends libusb-1.0-0-dev && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
WORKDIR /opt/libusb-1.0.25/libusb
|
WORKDIR /opt/libusb-1.0.26/libusb
|
||||||
RUN /bin/mkdir -p '/usr/local/lib' && \
|
RUN /bin/mkdir -p '/usr/local/lib' && \
|
||||||
/bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib' && \
|
/bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib' && \
|
||||||
/bin/mkdir -p '/usr/local/include/libusb-1.0' && \
|
/bin/mkdir -p '/usr/local/include/libusb-1.0' && \
|
||||||
/usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0' && \
|
/usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0' && \
|
||||||
/bin/mkdir -p '/usr/local/lib/pkgconfig' && \
|
/bin/mkdir -p '/usr/local/lib/pkgconfig' && \
|
||||||
cd /opt/libusb-1.0.25/ && \
|
cd /opt/libusb-1.0.26/ && \
|
||||||
/usr/bin/install -c -m 644 libusb-1.0.pc '/usr/local/lib/pkgconfig' && \
|
/usr/bin/install -c -m 644 libusb-1.0.pc '/usr/local/lib/pkgconfig' && \
|
||||||
ldconfig
|
ldconfig
|
||||||
|
|
||||||
@ -93,12 +100,14 @@ COPY labelmap.txt .
|
|||||||
COPY --from=ov-converter /models/public/ssdlite_mobilenet_v2/FP16 openvino-model
|
COPY --from=ov-converter /models/public/ssdlite_mobilenet_v2/FP16 openvino-model
|
||||||
RUN wget -q https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt -O openvino-model/coco_91cl_bkgr.txt && \
|
RUN wget -q https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt -O openvino-model/coco_91cl_bkgr.txt && \
|
||||||
sed -i 's/truck/car/g' openvino-model/coco_91cl_bkgr.txt
|
sed -i 's/truck/car/g' openvino-model/coco_91cl_bkgr.txt
|
||||||
|
# Get Audio Model and labels
|
||||||
|
RUN wget -qO cpu_audio_model.tflite https://tfhub.dev/google/lite-model/yamnet/classification/tflite/1?lite-format=tflite
|
||||||
|
COPY audio-labelmap.txt .
|
||||||
|
|
||||||
|
|
||||||
FROM wget AS s6-overlay
|
FROM wget AS s6-overlay
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
RUN --mount=type=bind,source=docker/install_s6_overlay.sh,target=/deps/install_s6_overlay.sh \
|
RUN --mount=type=bind,source=docker/main/install_s6_overlay.sh,target=/deps/install_s6_overlay.sh \
|
||||||
/deps/install_s6_overlay.sh
|
/deps/install_s6_overlay.sh
|
||||||
|
|
||||||
|
|
||||||
@ -112,13 +121,15 @@ RUN apt-get -qq update \
|
|||||||
apt-transport-https \
|
apt-transport-https \
|
||||||
gnupg \
|
gnupg \
|
||||||
wget \
|
wget \
|
||||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 9165938D90FDDD2E \
|
# the key fingerprint can be obtained from https://ftp-master.debian.org/keys.html
|
||||||
&& echo "deb http://raspbian.raspberrypi.org/raspbian/ bullseye main contrib non-free rpi" | tee /etc/apt/sources.list.d/raspi.list \
|
&& wget -qO- "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xA4285295FC7B1A81600062A9605C66F00D6C9793" | \
|
||||||
|
gpg --dearmor > /usr/share/keyrings/debian-archive-bullseye-stable.gpg \
|
||||||
|
&& echo "deb [signed-by=/usr/share/keyrings/debian-archive-bullseye-stable.gpg] http://deb.debian.org/debian bullseye main contrib non-free" | \
|
||||||
|
tee /etc/apt/sources.list.d/debian-bullseye-nonfree.list \
|
||||||
&& apt-get -qq update \
|
&& apt-get -qq update \
|
||||||
&& apt-get -qq install -y \
|
&& apt-get -qq install -y \
|
||||||
python3 \
|
python3.9 \
|
||||||
python3-dev \
|
python3.9-dev \
|
||||||
wget \
|
|
||||||
# opencv dependencies
|
# opencv dependencies
|
||||||
build-essential cmake git pkg-config libgtk-3-dev \
|
build-essential cmake git pkg-config libgtk-3-dev \
|
||||||
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
|
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
|
||||||
@ -130,28 +141,17 @@ RUN apt-get -qq update \
|
|||||||
gcc gfortran libopenblas-dev liblapack-dev && \
|
gcc gfortran libopenblas-dev liblapack-dev && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Ensure python3 defaults to python3.9
|
||||||
|
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 1
|
||||||
|
|
||||||
RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||||
&& python3 get-pip.py "pip"
|
&& python3 get-pip.py "pip"
|
||||||
|
|
||||||
RUN if [ "${TARGETARCH}" = "arm" ]; \
|
COPY docker/main/requirements.txt /requirements.txt
|
||||||
then echo "[global]" > /etc/pip.conf \
|
RUN pip3 install -r /requirements.txt
|
||||||
&& echo "extra-index-url=https://www.piwheels.org/simple" >> /etc/pip.conf; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
COPY requirements.txt /requirements.txt
|
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||||
RUN pip3 install -r requirements.txt
|
RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||||
|
|
||||||
COPY requirements-wheels.txt /requirements-wheels.txt
|
|
||||||
RUN pip3 wheel --wheel-dir=/wheels -r requirements-wheels.txt
|
|
||||||
|
|
||||||
# Make this a separate target so it can be built/cached optionally
|
|
||||||
FROM wheels as trt-wheels
|
|
||||||
ARG DEBIAN_FRONTEND
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
# Add TensorRT wheels to another folder
|
|
||||||
COPY requirements-tensorrt.txt /requirements-tensorrt.txt
|
|
||||||
RUN mkdir -p /trt-wheels && pip3 wheel --wheel-dir=/trt-wheels -r requirements-tensorrt.txt
|
|
||||||
|
|
||||||
|
|
||||||
# Collect deps in a single layer
|
# Collect deps in a single layer
|
||||||
@ -161,7 +161,7 @@ COPY --from=go2rtc /rootfs/ /
|
|||||||
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
||||||
COPY --from=s6-overlay /rootfs/ /
|
COPY --from=s6-overlay /rootfs/ /
|
||||||
COPY --from=models /rootfs/ /
|
COPY --from=models /rootfs/ /
|
||||||
COPY docker/rootfs/ /
|
COPY docker/main/rootfs/ /
|
||||||
|
|
||||||
|
|
||||||
# Frigate deps (ffmpeg, python, nginx, go2rtc, s6-overlay, etc)
|
# Frigate deps (ffmpeg, python, nginx, go2rtc, s6-overlay, etc)
|
||||||
@ -179,10 +179,11 @@ ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
|
|||||||
ENV PATH="/usr/lib/btbn-ffmpeg/bin:/usr/local/go2rtc/bin:/usr/local/nginx/sbin:${PATH}"
|
ENV PATH="/usr/lib/btbn-ffmpeg/bin:/usr/local/go2rtc/bin:/usr/local/nginx/sbin:${PATH}"
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
RUN --mount=type=bind,source=docker/install_deps.sh,target=/deps/install_deps.sh \
|
RUN --mount=type=bind,source=docker/main/install_deps.sh,target=/deps/install_deps.sh \
|
||||||
/deps/install_deps.sh
|
/deps/install_deps.sh
|
||||||
|
|
||||||
RUN --mount=type=bind,from=wheels,source=/wheels,target=/deps/wheels \
|
RUN --mount=type=bind,from=wheels,source=/wheels,target=/deps/wheels \
|
||||||
|
python3 -m pip install --upgrade pip && \
|
||||||
pip3 install -U /deps/wheels/*.whl
|
pip3 install -U /deps/wheels/*.whl
|
||||||
|
|
||||||
COPY --from=deps-rootfs / /
|
COPY --from=deps-rootfs / /
|
||||||
@ -200,24 +201,27 @@ ENV S6_LOGGING_SCRIPT="T 1 n0 s10000000 T"
|
|||||||
ENTRYPOINT ["/init"]
|
ENTRYPOINT ["/init"]
|
||||||
CMD []
|
CMD []
|
||||||
|
|
||||||
|
HEALTHCHECK --start-period=120s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||||
|
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
||||||
|
|
||||||
# Frigate deps with Node.js and NPM for devcontainer
|
# Frigate deps with Node.js and NPM for devcontainer
|
||||||
FROM deps AS devcontainer
|
FROM deps AS devcontainer
|
||||||
|
|
||||||
# Do not start the actual Frigate service on devcontainer as it will be started by VSCode
|
# Do not start the actual Frigate service on devcontainer as it will be started by VSCode
|
||||||
# But start a fake service for simulating the logs
|
# But start a fake service for simulating the logs
|
||||||
COPY docker/fake_frigate_run /etc/s6-overlay/s6-rc.d/frigate/run
|
COPY docker/main/fake_frigate_run /etc/s6-overlay/s6-rc.d/frigate/run
|
||||||
|
|
||||||
# Create symbolic link to the frigate source code, as go2rtc's create_config.sh uses it
|
# Create symbolic link to the frigate source code, as go2rtc's create_config.sh uses it
|
||||||
RUN mkdir -p /opt/frigate \
|
RUN mkdir -p /opt/frigate \
|
||||||
&& ln -svf /workspace/frigate/frigate /opt/frigate/frigate
|
&& ln -svf /workspace/frigate/frigate /opt/frigate/frigate
|
||||||
|
|
||||||
# Install Node 16
|
# Install Node 20
|
||||||
RUN apt-get update \
|
RUN curl -SLO https://deb.nodesource.com/nsolid_setup_deb.sh && \
|
||||||
&& apt-get install wget -y \
|
chmod 500 nsolid_setup_deb.sh && \
|
||||||
&& wget -qO- https://deb.nodesource.com/setup_16.x | bash - \
|
./nsolid_setup_deb.sh 20 && \
|
||||||
&& apt-get install -y nodejs \
|
apt-get install nodejs -y \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& npm install -g npm@9
|
&& npm install -g npm@10
|
||||||
|
|
||||||
WORKDIR /workspace/frigate
|
WORKDIR /workspace/frigate
|
||||||
|
|
||||||
@ -225,15 +229,15 @@ RUN apt-get update \
|
|||||||
&& apt-get install make -y \
|
&& apt-get install make -y \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN --mount=type=bind,source=./requirements-dev.txt,target=/workspace/frigate/requirements-dev.txt \
|
RUN --mount=type=bind,source=./docker/main/requirements-dev.txt,target=/workspace/frigate/requirements-dev.txt \
|
||||||
pip3 install -r requirements-dev.txt
|
pip3 install -r requirements-dev.txt
|
||||||
|
|
||||||
CMD ["sleep", "infinity"]
|
CMD ["sleep", "infinity"]
|
||||||
|
|
||||||
|
|
||||||
# Frigate web build
|
# Frigate web build
|
||||||
# force this to run on amd64 because QEMU is painfully slow
|
# This should be architecture agnostic, so speed up the build on multiarch by not using QEMU.
|
||||||
FROM --platform=linux/amd64 node:16 AS web-build
|
FROM --platform=$BUILDPLATFORM node:16 AS web-build
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY web/package.json web/package-lock.json ./
|
COPY web/package.json web/package-lock.json ./
|
||||||
@ -257,16 +261,3 @@ FROM deps AS frigate
|
|||||||
|
|
||||||
WORKDIR /opt/frigate/
|
WORKDIR /opt/frigate/
|
||||||
COPY --from=rootfs / /
|
COPY --from=rootfs / /
|
||||||
|
|
||||||
# Frigate w/ TensorRT Support as separate image
|
|
||||||
FROM frigate AS frigate-tensorrt
|
|
||||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
|
||||||
pip3 install -U /deps/trt-wheels/*.whl && \
|
|
||||||
ln -s libnvrtc.so.11.2 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \
|
|
||||||
ldconfig
|
|
||||||
|
|
||||||
# Dev Container w/ TRT
|
|
||||||
FROM devcontainer AS devcontainer-trt
|
|
||||||
|
|
||||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
|
||||||
pip3 install -U /deps/trt-wheels/*.whl
|
|
@ -2,10 +2,10 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
NGINX_VERSION="1.22.1"
|
NGINX_VERSION="1.25.3"
|
||||||
VOD_MODULE_VERSION="1.30"
|
VOD_MODULE_VERSION="1.31"
|
||||||
SECURE_TOKEN_MODULE_VERSION="1.4"
|
SECURE_TOKEN_MODULE_VERSION="1.5"
|
||||||
RTMP_MODULE_VERSION="1.2.1"
|
RTMP_MODULE_VERSION="1.2.2"
|
||||||
|
|
||||||
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
||||||
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
||||||
@ -15,6 +15,10 @@ apt-get -yqq build-dep nginx
|
|||||||
|
|
||||||
apt-get -yqq install --no-install-recommends ca-certificates wget
|
apt-get -yqq install --no-install-recommends ca-certificates wget
|
||||||
update-ca-certificates -f
|
update-ca-certificates -f
|
||||||
|
apt install -y ccache
|
||||||
|
|
||||||
|
export PATH="/usr/lib/ccache:$PATH"
|
||||||
|
|
||||||
mkdir /tmp/nginx
|
mkdir /tmp/nginx
|
||||||
wget -nv https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz
|
wget -nv https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz
|
||||||
tar -zxf nginx-${NGINX_VERSION}.tar.gz -C /tmp/nginx --strip-components=1
|
tar -zxf nginx-${NGINX_VERSION}.tar.gz -C /tmp/nginx --strip-components=1
|
||||||
@ -62,5 +66,5 @@ cd /tmp/nginx
|
|||||||
--add-module=../nginx-rtmp-module \
|
--add-module=../nginx-rtmp-module \
|
||||||
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
|
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
|
||||||
|
|
||||||
make -j$(nproc) && make install
|
make CC="ccache gcc" -j$(nproc) && make install
|
||||||
rm -rf /usr/local/nginx/html /usr/local/nginx/conf/*.default
|
rm -rf /usr/local/nginx/html /usr/local/nginx/conf/*.default
|
@ -10,9 +10,14 @@ apt-get -qq install --no-install-recommends -y \
|
|||||||
wget \
|
wget \
|
||||||
procps vainfo \
|
procps vainfo \
|
||||||
unzip locales tzdata libxml2 xz-utils \
|
unzip locales tzdata libxml2 xz-utils \
|
||||||
|
python3.9 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
curl \
|
curl \
|
||||||
jq
|
jq \
|
||||||
|
nethogs
|
||||||
|
|
||||||
|
# ensure python3 defaults to python3.9
|
||||||
|
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 1
|
||||||
|
|
||||||
mkdir -p -m 600 /root/.gnupg
|
mkdir -p -m 600 /root/.gnupg
|
||||||
|
|
||||||
@ -22,8 +27,10 @@ curl -fsSLo - https://packages.cloud.google.com/apt/doc/apt-key.gpg | \
|
|||||||
echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||||
echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections
|
echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections
|
||||||
|
|
||||||
# enable non-free repo
|
# enable non-free repo in Debian
|
||||||
sed -i -e's/ main/ main contrib non-free/g' /etc/apt/sources.list
|
if grep -q "Debian" /etc/issue; then
|
||||||
|
sed -i -e's/ main/ main contrib non-free/g' /etc/apt/sources.list
|
||||||
|
fi
|
||||||
|
|
||||||
# coral drivers
|
# coral drivers
|
||||||
apt-get -qq update
|
apt-get -qq update
|
||||||
@ -38,37 +45,26 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
|||||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ffmpeg -> arm32
|
|
||||||
if [[ "${TARGETARCH}" == "arm" ]]; then
|
|
||||||
# add raspberry pi repo
|
|
||||||
gpg --no-default-keyring --keyring /usr/share/keyrings/raspbian.gpg --keyserver keyserver.ubuntu.com --recv-keys 9165938D90FDDD2E
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/raspbian.gpg] http://raspbian.raspberrypi.org/raspbian/ bullseye main contrib non-free rpi" | tee /etc/apt/sources.list.d/raspi.list
|
|
||||||
apt-get -qq update
|
|
||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y ffmpeg
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ffmpeg -> arm64
|
# ffmpeg -> arm64
|
||||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||||
# add raspberry pi repo
|
mkdir -p /usr/lib/btbn-ffmpeg
|
||||||
gpg --no-default-keyring --keyring /usr/share/keyrings/raspbian.gpg --keyserver keyserver.ubuntu.com --recv-keys 82B129927FA3303E
|
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||||
echo "deb [signed-by=/usr/share/keyrings/raspbian.gpg] https://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list
|
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1
|
||||||
apt-get -qq update
|
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y ffmpeg
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# arch specific packages
|
# arch specific packages
|
||||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||||
# Use debian testing repo only for hwaccel packages
|
# use debian bookworm for hwaccel packages
|
||||||
echo 'deb http://deb.debian.org/debian testing main non-free' >/etc/apt/sources.list.d/debian-testing.list
|
echo 'deb https://deb.debian.org/debian bookworm main contrib non-free' >/etc/apt/sources.list.d/debian-bookworm.list
|
||||||
apt-get -qq update
|
apt-get -qq update
|
||||||
# intel-opencl-icd specifically for GPU support in OpenVino
|
|
||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||||
intel-opencl-icd \
|
intel-opencl-icd \
|
||||||
mesa-va-drivers libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 radeontop intel-gpu-tools
|
mesa-va-drivers radeontop libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
|
||||||
# something about this dependency requires it to be installed in a separate call rather than in the line above
|
# something about this dependency requires it to be installed in a separate call rather than in the line above
|
||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||||
i965-va-driver-shaders
|
i965-va-driver-shaders
|
||||||
rm -f /etc/apt/sources.list.d/debian-testing.list
|
rm -f /etc/apt/sources.list.d/debian-bookworm.list
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||||
@ -76,17 +72,13 @@ if [[ "${TARGETARCH}" == "arm64" ]]; then
|
|||||||
libva-drm2 mesa-va-drivers
|
libva-drm2 mesa-va-drivers
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# not sure why 32bit arm requires all these
|
apt-get purge gnupg apt-transport-https xz-utils -y
|
||||||
if [[ "${TARGETARCH}" == "arm" ]]; then
|
|
||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
|
||||||
libgtk-3-dev \
|
|
||||||
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
|
|
||||||
libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \
|
|
||||||
gfortran openexr libatlas-base-dev libtbb-dev libdc1394-22-dev libopenexr-dev \
|
|
||||||
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev
|
|
||||||
fi
|
|
||||||
|
|
||||||
apt-get purge gnupg apt-transport-https wget xz-utils -y
|
|
||||||
apt-get clean autoclean -y
|
apt-get clean autoclean -y
|
||||||
apt-get autoremove --purge -y
|
apt-get autoremove --purge -y
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install yq, for frigate-prepare and go2rtc echo source
|
||||||
|
curl -fsSL \
|
||||||
|
"https://github.com/mikefarah/yq/releases/download/v4.33.3/yq_linux_$(dpkg --print-architecture)" \
|
||||||
|
--output /usr/local/bin/yq
|
||||||
|
chmod +x /usr/local/bin/yq
|
@ -2,12 +2,10 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
s6_version="3.1.4.1"
|
s6_version="3.1.5.0"
|
||||||
|
|
||||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||||
s6_arch="x86_64"
|
s6_arch="x86_64"
|
||||||
elif [[ "${TARGETARCH}" == "arm" ]]; then
|
|
||||||
s6_arch="armhf"
|
|
||||||
elif [[ "${TARGETARCH}" == "arm64" ]]; then
|
elif [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||||
s6_arch="aarch64"
|
s6_arch="aarch64"
|
||||||
fi
|
fi
|
1
docker/main/requirements-dev.txt
Normal file
1
docker/main/requirements-dev.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
ruff
|
5
docker/main/requirements-ov.txt
Normal file
5
docker/main/requirements-ov.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
numpy
|
||||||
|
# Openvino Library - Custom built with MYRIAD support
|
||||||
|
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-manylinux_2_31_x86_64.whl; platform_machine == 'x86_64'
|
||||||
|
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-linux_aarch64.whl; platform_machine == 'aarch64'
|
||||||
|
openvino-dev[tensorflow2] @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino_dev-2022.3.1-1-py3-none-any.whl
|
29
docker/main/requirements-wheels.txt
Normal file
29
docker/main/requirements-wheels.txt
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
click == 8.1.*
|
||||||
|
Flask == 2.3.*
|
||||||
|
imutils == 0.5.*
|
||||||
|
matplotlib == 3.7.*
|
||||||
|
mypy == 1.6.1
|
||||||
|
numpy == 1.23.*
|
||||||
|
onvif_zeep == 0.2.12
|
||||||
|
opencv-python-headless == 4.7.0.*
|
||||||
|
paho-mqtt == 1.6.*
|
||||||
|
peewee == 3.17.*
|
||||||
|
peewee_migrate == 1.12.*
|
||||||
|
psutil == 5.9.*
|
||||||
|
pydantic == 1.10.*
|
||||||
|
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||||
|
PyYAML == 6.0.*
|
||||||
|
pytz == 2023.3.post1
|
||||||
|
ruamel.yaml == 0.18.*
|
||||||
|
tzlocal == 5.2
|
||||||
|
types-PyYAML == 6.0.*
|
||||||
|
requests == 2.31.*
|
||||||
|
types-requests == 2.31.*
|
||||||
|
scipy == 1.11.*
|
||||||
|
norfair == 2.2.*
|
||||||
|
setproctitle == 1.3.*
|
||||||
|
ws4py == 0.5.*
|
||||||
|
unidecode == 1.3.*
|
||||||
|
# Openvino Library - Custom built with MYRIAD support
|
||||||
|
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-manylinux_2_31_x86_64.whl; platform_machine == 'x86_64'
|
||||||
|
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-linux_aarch64.whl; platform_machine == 'aarch64'
|
2
docker/main/requirements.txt
Normal file
2
docker/main/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
scikit-build == 0.17.*
|
||||||
|
nvidia-pyindex
|
55
docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/run
Executable file
55
docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/run
Executable file
@ -0,0 +1,55 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
# Start the Frigate service
|
||||||
|
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
# Logs should be sent to stdout so that s6 can collect them
|
||||||
|
|
||||||
|
# Tell S6-Overlay not to restart this service
|
||||||
|
s6-svc -O .
|
||||||
|
|
||||||
|
function migrate_db_path() {
|
||||||
|
# Find config file in yaml or yml, but prefer yaml
|
||||||
|
local config_file="${CONFIG_FILE:-"/config/config.yml"}"
|
||||||
|
local config_file_yaml="${config_file//.yml/.yaml}"
|
||||||
|
if [[ -f "${config_file_yaml}" ]]; then
|
||||||
|
config_file="${config_file_yaml}"
|
||||||
|
elif [[ ! -f "${config_file}" ]]; then
|
||||||
|
echo "[ERROR] Frigate config file not found"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
unset config_file_yaml
|
||||||
|
|
||||||
|
# Use yq to check if database.path is set
|
||||||
|
local user_db_path
|
||||||
|
user_db_path=$(yq eval '.database.path' "${config_file}")
|
||||||
|
|
||||||
|
if [[ "${user_db_path}" == "null" ]]; then
|
||||||
|
local previous_db_path="/media/frigate/frigate.db"
|
||||||
|
local new_db_dir="/config"
|
||||||
|
if [[ -f "${previous_db_path}" ]]; then
|
||||||
|
if mountpoint --quiet "${new_db_dir}"; then
|
||||||
|
# /config is a mount point, move the db
|
||||||
|
echo "[INFO] Moving db from '${previous_db_path}' to the '${new_db_dir}' dir..."
|
||||||
|
# Move all files that starts with frigate.db to the new directory
|
||||||
|
mv -vf "${previous_db_path}"* "${new_db_dir}"
|
||||||
|
else
|
||||||
|
echo "[ERROR] Trying to migrate the db path from '${previous_db_path}' to the '${new_db_dir}' dir, but '${new_db_dir}' is not a mountpoint, please mount the '${new_db_dir}' dir"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "[INFO] Preparing Frigate..."
|
||||||
|
migrate_db_path
|
||||||
|
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||||
|
|
||||||
|
echo "[INFO] Starting Frigate..."
|
||||||
|
|
||||||
|
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate"
|
||||||
|
|
||||||
|
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
||||||
|
exec 2>&1
|
||||||
|
exec python3 -u -m frigate
|
@ -43,8 +43,15 @@ function get_ip_and_port_from_supervisor() {
|
|||||||
export FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL="${ip_address}:${webrtc_port}"
|
export FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL="${ip_address}:${webrtc_port}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||||
|
|
||||||
|
if [[ -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||||
|
echo "[INFO] Removing stale config from last run..."
|
||||||
|
rm /dev/shm/go2rtc.yaml
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||||
echo "[INFO] Preparing go2rtc config..."
|
echo "[INFO] Preparing new go2rtc config..."
|
||||||
|
|
||||||
if [[ -n "${SUPERVISOR_TOKEN:-}" ]]; then
|
if [[ -n "${SUPERVISOR_TOKEN:-}" ]]; then
|
||||||
# Running as a Home Assistant add-on, infer the IP address and port
|
# Running as a Home Assistant add-on, infer the IP address and port
|
||||||
@ -52,6 +59,8 @@ if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
python3 /usr/local/go2rtc/create_config.py
|
python3 /usr/local/go2rtc/create_config.py
|
||||||
|
else
|
||||||
|
echo "[WARNING] Unable to remove existing go2rtc config. Changes made to your frigate config file may not be recognized. Please remove the /dev/shm/go2rtc.yaml from your docker host manually."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
readonly config_path="/config"
|
readonly config_path="/config"
|
@ -3,16 +3,28 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
sys.path.insert(0, "/opt/frigate")
|
sys.path.insert(0, "/opt/frigate")
|
||||||
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH
|
from frigate.const import BIRDSEYE_PIPE # noqa: E402
|
||||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
from frigate.ffmpeg_presets import ( # noqa: E402
|
||||||
|
parse_preset_hardware_acceleration_encode,
|
||||||
|
)
|
||||||
|
|
||||||
sys.path.remove("/opt/frigate")
|
sys.path.remove("/opt/frigate")
|
||||||
|
|
||||||
|
|
||||||
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
|
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
|
||||||
|
# read docker secret files as env vars too
|
||||||
|
if os.path.isdir("/run/secrets"):
|
||||||
|
for secret_file in os.listdir("/run/secrets"):
|
||||||
|
if secret_file.startswith("FRIGATE_"):
|
||||||
|
FRIGATE_ENV_VARS[secret_file] = Path(
|
||||||
|
os.path.join("/run/secrets", secret_file)
|
||||||
|
).read_text()
|
||||||
|
|
||||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||||
|
|
||||||
# Check if we can use .yaml instead of .yml
|
# Check if we can use .yaml instead of .yml
|
||||||
@ -36,13 +48,25 @@ if go2rtc_config.get("api") is None:
|
|||||||
elif go2rtc_config["api"].get("origin") is None:
|
elif go2rtc_config["api"].get("origin") is None:
|
||||||
go2rtc_config["api"]["origin"] = "*"
|
go2rtc_config["api"]["origin"] = "*"
|
||||||
|
|
||||||
|
# Need to set default location for HA config
|
||||||
|
if go2rtc_config.get("hass") is None:
|
||||||
|
go2rtc_config["hass"] = {"config": "/config"}
|
||||||
|
|
||||||
# we want to ensure that logs are easy to read
|
# we want to ensure that logs are easy to read
|
||||||
if go2rtc_config.get("log") is None:
|
if go2rtc_config.get("log") is None:
|
||||||
go2rtc_config["log"] = {"format": "text"}
|
go2rtc_config["log"] = {"format": "text"}
|
||||||
elif go2rtc_config["log"].get("format") is None:
|
elif go2rtc_config["log"].get("format") is None:
|
||||||
go2rtc_config["log"]["format"] = "text"
|
go2rtc_config["log"]["format"] = "text"
|
||||||
|
|
||||||
if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
# ensure there is a default webrtc config
|
||||||
|
if not go2rtc_config.get("webrtc"):
|
||||||
|
go2rtc_config["webrtc"] = {}
|
||||||
|
|
||||||
|
# go2rtc should listen on 8555 tcp & udp by default
|
||||||
|
if not go2rtc_config["webrtc"].get("listen"):
|
||||||
|
go2rtc_config["webrtc"]["listen"] = ":8555"
|
||||||
|
|
||||||
|
if not go2rtc_config["webrtc"].get("candidates", []):
|
||||||
default_candidates = []
|
default_candidates = []
|
||||||
# use internal candidate if it was discovered when running through the add-on
|
# use internal candidate if it was discovered when running through the add-on
|
||||||
internal_candidate = os.environ.get(
|
internal_candidate = os.environ.get(
|
||||||
@ -64,11 +88,22 @@ else:
|
|||||||
# as source for frigate and the integration supports HLS playback
|
# as source for frigate and the integration supports HLS playback
|
||||||
if go2rtc_config.get("rtsp") is None:
|
if go2rtc_config.get("rtsp") is None:
|
||||||
go2rtc_config["rtsp"] = {"default_query": "mp4"}
|
go2rtc_config["rtsp"] = {"default_query": "mp4"}
|
||||||
elif go2rtc_config["rtsp"].get("default_query") is None:
|
else:
|
||||||
go2rtc_config["rtsp"]["default_query"] = "mp4"
|
if go2rtc_config["rtsp"].get("default_query") is None:
|
||||||
|
go2rtc_config["rtsp"]["default_query"] = "mp4"
|
||||||
|
|
||||||
|
if go2rtc_config["rtsp"].get("username") is not None:
|
||||||
|
go2rtc_config["rtsp"]["username"] = go2rtc_config["rtsp"]["username"].format(
|
||||||
|
**FRIGATE_ENV_VARS
|
||||||
|
)
|
||||||
|
|
||||||
|
if go2rtc_config["rtsp"].get("password") is not None:
|
||||||
|
go2rtc_config["rtsp"]["password"] = go2rtc_config["rtsp"]["password"].format(
|
||||||
|
**FRIGATE_ENV_VARS
|
||||||
|
)
|
||||||
|
|
||||||
# need to replace ffmpeg command when using ffmpeg4
|
# need to replace ffmpeg command when using ffmpeg4
|
||||||
if not os.path.exists(BTBN_PATH):
|
if int(os.environ["LIBAVFORMAT_VERSION_MAJOR"]) < 59:
|
||||||
if go2rtc_config.get("ffmpeg") is None:
|
if go2rtc_config.get("ffmpeg") is None:
|
||||||
go2rtc_config["ffmpeg"] = {
|
go2rtc_config["ffmpeg"] = {
|
||||||
"rtsp": "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
"rtsp": "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||||
@ -78,16 +113,43 @@ if not os.path.exists(BTBN_PATH):
|
|||||||
"rtsp"
|
"rtsp"
|
||||||
] = "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
] = "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||||
|
|
||||||
|
# add hardware acceleration presets for rockchip devices
|
||||||
|
# may be removed if frigate uses a go2rtc version that includes these presets
|
||||||
|
if go2rtc_config.get("ffmpeg") is None:
|
||||||
|
go2rtc_config["ffmpeg"] = {
|
||||||
|
"h264/rk": "-c:v h264_rkmpp_encoder -g 50 -bf 0",
|
||||||
|
"h265/rk": "-c:v hevc_rkmpp_encoder -g 50 -bf 0",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
if go2rtc_config["ffmpeg"].get("h264/rk") is None:
|
||||||
|
go2rtc_config["ffmpeg"]["h264/rk"] = "-c:v h264_rkmpp_encoder -g 50 -bf 0"
|
||||||
|
|
||||||
|
if go2rtc_config["ffmpeg"].get("h265/rk") is None:
|
||||||
|
go2rtc_config["ffmpeg"]["h265/rk"] = "-c:v hevc_rkmpp_encoder -g 50 -bf 0"
|
||||||
|
|
||||||
for name in go2rtc_config.get("streams", {}):
|
for name in go2rtc_config.get("streams", {}):
|
||||||
stream = go2rtc_config["streams"][name]
|
stream = go2rtc_config["streams"][name]
|
||||||
|
|
||||||
if isinstance(stream, str):
|
if isinstance(stream, str):
|
||||||
go2rtc_config["streams"][name] = go2rtc_config["streams"][name].format(
|
try:
|
||||||
**FRIGATE_ENV_VARS
|
go2rtc_config["streams"][name] = go2rtc_config["streams"][name].format(
|
||||||
)
|
**FRIGATE_ENV_VARS
|
||||||
|
)
|
||||||
|
except KeyError as e:
|
||||||
|
print(
|
||||||
|
"[ERROR] Invalid substitution found, see https://docs.frigate.video/configuration/restream#advanced-restream-configurations for more info."
|
||||||
|
)
|
||||||
|
sys.exit(e)
|
||||||
|
|
||||||
elif isinstance(stream, list):
|
elif isinstance(stream, list):
|
||||||
for i, stream in enumerate(stream):
|
for i, stream in enumerate(stream):
|
||||||
go2rtc_config["streams"][name][i] = stream.format(**FRIGATE_ENV_VARS)
|
try:
|
||||||
|
go2rtc_config["streams"][name][i] = stream.format(**FRIGATE_ENV_VARS)
|
||||||
|
except KeyError as e:
|
||||||
|
print(
|
||||||
|
"[ERROR] Invalid substitution found, see https://docs.frigate.video/configuration/restream#advanced-restream-configurations for more info."
|
||||||
|
)
|
||||||
|
sys.exit(e)
|
||||||
|
|
||||||
# add birdseye restream stream if enabled
|
# add birdseye restream stream if enabled
|
||||||
if config.get("birdseye", {}).get("restream", False):
|
if config.get("birdseye", {}).get("restream", False):
|
@ -32,6 +32,13 @@ http {
|
|||||||
gzip_proxied no-cache no-store private expired auth;
|
gzip_proxied no-cache no-store private expired auth;
|
||||||
gzip_vary on;
|
gzip_vary on;
|
||||||
|
|
||||||
|
proxy_cache_path /dev/shm/nginx_cache levels=1:2 keys_zone=api_cache:10m max_size=10m inactive=1m use_temp_path=off;
|
||||||
|
|
||||||
|
map $sent_http_content_type $should_not_cache {
|
||||||
|
'application/json' 0;
|
||||||
|
default 1;
|
||||||
|
}
|
||||||
|
|
||||||
upstream frigate_api {
|
upstream frigate_api {
|
||||||
server 127.0.0.1:5001;
|
server 127.0.0.1:5001;
|
||||||
keepalive 1024;
|
keepalive 1024;
|
||||||
@ -93,10 +100,6 @@ http {
|
|||||||
secure_token $args;
|
secure_token $args;
|
||||||
secure_token_types application/vnd.apple.mpegurl;
|
secure_token_types application/vnd.apple.mpegurl;
|
||||||
|
|
||||||
add_header Access-Control-Allow-Headers '*';
|
|
||||||
add_header Access-Control-Expose-Headers 'Server,range,Content-Length,Content-Range';
|
|
||||||
add_header Access-Control-Allow-Methods 'GET, HEAD, OPTIONS';
|
|
||||||
add_header Access-Control-Allow-Origin '*';
|
|
||||||
add_header Cache-Control "no-store";
|
add_header Cache-Control "no-store";
|
||||||
expires off;
|
expires off;
|
||||||
}
|
}
|
||||||
@ -104,16 +107,6 @@ http {
|
|||||||
location /stream/ {
|
location /stream/ {
|
||||||
add_header Cache-Control "no-store";
|
add_header Cache-Control "no-store";
|
||||||
expires off;
|
expires off;
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
|
|
||||||
add_header 'Access-Control-Allow-Credentials' 'true';
|
|
||||||
add_header 'Access-Control-Expose-Headers' 'Content-Length';
|
|
||||||
if ($request_method = 'OPTIONS') {
|
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin";
|
|
||||||
add_header 'Access-Control-Max-Age' 1728000;
|
|
||||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
|
||||||
add_header 'Content-Length' 0;
|
|
||||||
return 204;
|
|
||||||
}
|
|
||||||
|
|
||||||
types {
|
types {
|
||||||
application/dash+xml mpd;
|
application/dash+xml mpd;
|
||||||
@ -126,16 +119,6 @@ http {
|
|||||||
}
|
}
|
||||||
|
|
||||||
location /clips/ {
|
location /clips/ {
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
|
|
||||||
add_header 'Access-Control-Allow-Credentials' 'true';
|
|
||||||
add_header 'Access-Control-Expose-Headers' 'Content-Length';
|
|
||||||
if ($request_method = 'OPTIONS') {
|
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin";
|
|
||||||
add_header 'Access-Control-Max-Age' 1728000;
|
|
||||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
|
||||||
add_header 'Content-Length' 0;
|
|
||||||
return 204;
|
|
||||||
}
|
|
||||||
|
|
||||||
types {
|
types {
|
||||||
video/mp4 mp4;
|
video/mp4 mp4;
|
||||||
@ -152,17 +135,16 @@ http {
|
|||||||
}
|
}
|
||||||
|
|
||||||
location /recordings/ {
|
location /recordings/ {
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
|
types {
|
||||||
add_header 'Access-Control-Allow-Credentials' 'true';
|
video/mp4 mp4;
|
||||||
add_header 'Access-Control-Expose-Headers' 'Content-Length';
|
|
||||||
if ($request_method = 'OPTIONS') {
|
|
||||||
add_header 'Access-Control-Allow-Origin' "$http_origin";
|
|
||||||
add_header 'Access-Control-Max-Age' 1728000;
|
|
||||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
|
||||||
add_header 'Content-Length' 0;
|
|
||||||
return 204;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
autoindex on;
|
||||||
|
autoindex_format json;
|
||||||
|
root /media/frigate;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /exports/ {
|
||||||
types {
|
types {
|
||||||
video/mp4 mp4;
|
video/mp4 mp4;
|
||||||
}
|
}
|
||||||
@ -174,58 +156,97 @@ http {
|
|||||||
|
|
||||||
location /ws {
|
location /ws {
|
||||||
proxy_pass http://mqtt_ws/;
|
proxy_pass http://mqtt_ws/;
|
||||||
proxy_http_version 1.1;
|
include proxy.conf;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "Upgrade";
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
location /live/jsmpeg/ {
|
location /live/jsmpeg/ {
|
||||||
proxy_pass http://jsmpeg/;
|
proxy_pass http://jsmpeg/;
|
||||||
proxy_http_version 1.1;
|
include proxy.conf;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "Upgrade";
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
location /live/mse/ {
|
# frigate lovelace card uses this path
|
||||||
proxy_pass http://go2rtc/;
|
location /live/mse/api/ws {
|
||||||
proxy_http_version 1.1;
|
limit_except GET {
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
deny all;
|
||||||
proxy_set_header Connection "Upgrade";
|
}
|
||||||
proxy_set_header Host $host;
|
proxy_pass http://go2rtc/api/ws;
|
||||||
|
include proxy.conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /live/webrtc/ {
|
location /live/webrtc/api/ws {
|
||||||
proxy_pass http://go2rtc/;
|
limit_except GET {
|
||||||
proxy_http_version 1.1;
|
deny all;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
}
|
||||||
proxy_set_header Connection "Upgrade";
|
proxy_pass http://go2rtc/api/ws;
|
||||||
proxy_set_header Host $host;
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
# pass through go2rtc player
|
||||||
|
location /live/webrtc/webrtc.html {
|
||||||
|
limit_except GET {
|
||||||
|
deny all;
|
||||||
|
}
|
||||||
|
proxy_pass http://go2rtc/webrtc.html;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
# frontend uses this to fetch the version
|
||||||
|
location /api/go2rtc/api {
|
||||||
|
limit_except GET {
|
||||||
|
deny all;
|
||||||
|
}
|
||||||
|
proxy_pass http://go2rtc/api;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
# integration uses this to add webrtc candidate
|
||||||
|
location /api/go2rtc/webrtc {
|
||||||
|
limit_except POST {
|
||||||
|
deny all;
|
||||||
|
}
|
||||||
|
proxy_pass http://go2rtc/api/webrtc;
|
||||||
|
include proxy.conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
location ~* /api/.*\.(jpg|jpeg|png)$ {
|
location ~* /api/.*\.(jpg|jpeg|png)$ {
|
||||||
add_header 'Access-Control-Allow-Origin' '*';
|
|
||||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS';
|
|
||||||
rewrite ^/api/(.*)$ $1 break;
|
rewrite ^/api/(.*)$ $1 break;
|
||||||
proxy_pass http://frigate_api;
|
proxy_pass http://frigate_api;
|
||||||
proxy_pass_request_headers on;
|
include proxy.conf;
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
add_header Cache-Control "no-store";
|
add_header Cache-Control "no-store";
|
||||||
expires off;
|
expires off;
|
||||||
|
|
||||||
add_header 'Access-Control-Allow-Origin' '*';
|
|
||||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS';
|
|
||||||
proxy_pass http://frigate_api/;
|
proxy_pass http://frigate_api/;
|
||||||
proxy_pass_request_headers on;
|
include proxy.conf;
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_cache api_cache;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_cache_lock on;
|
||||||
|
proxy_cache_use_stale updating;
|
||||||
|
proxy_cache_valid 200 5s;
|
||||||
|
proxy_cache_bypass $http_x_cache_bypass;
|
||||||
|
proxy_no_cache $should_not_cache;
|
||||||
|
add_header X-Cache-Status $upstream_cache_status;
|
||||||
|
|
||||||
|
location /api/vod/ {
|
||||||
|
proxy_pass http://frigate_api/vod/;
|
||||||
|
include proxy.conf;
|
||||||
|
proxy_cache off;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/stats {
|
||||||
|
access_log off;
|
||||||
|
rewrite ^/api/(.*)$ $1 break;
|
||||||
|
proxy_pass http://frigate_api;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/version {
|
||||||
|
access_log off;
|
||||||
|
rewrite ^/api/(.*)$ $1 break;
|
||||||
|
proxy_pass http://frigate_api;
|
||||||
|
include proxy.conf;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
@ -268,4 +289,4 @@ rtmp {
|
|||||||
meta copy;
|
meta copy;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
4
docker/main/rootfs/usr/local/nginx/conf/proxy.conf
Normal file
4
docker/main/rootfs/usr/local/nginx/conf/proxy.conf
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "Upgrade";
|
||||||
|
proxy_set_header Host $host;
|
32
docker/rockchip/Dockerfile
Normal file
32
docker/rockchip/Dockerfile
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
# syntax=docker/dockerfile:1.6
|
||||||
|
|
||||||
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
FROM wheels as rk-wheels
|
||||||
|
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||||
|
COPY docker/rockchip/requirements-wheels-rk.txt /requirements-wheels-rk.txt
|
||||||
|
RUN sed -i "/https:\/\//d" /requirements-wheels.txt
|
||||||
|
RUN pip3 wheel --wheel-dir=/rk-wheels -c /requirements-wheels.txt -r /requirements-wheels-rk.txt
|
||||||
|
|
||||||
|
FROM deps AS rk-deps
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
RUN --mount=type=bind,from=rk-wheels,source=/rk-wheels,target=/deps/rk-wheels \
|
||||||
|
pip3 install -U /deps/rk-wheels/*.whl
|
||||||
|
|
||||||
|
WORKDIR /opt/frigate/
|
||||||
|
COPY --from=rootfs / /
|
||||||
|
|
||||||
|
ADD https://github.com/MarcA711/rknpu2/releases/download/v1.5.2/librknnrt_rk356x.so /usr/lib/
|
||||||
|
ADD https://github.com/MarcA711/rknpu2/releases/download/v1.5.2/librknnrt_rk3588.so /usr/lib/
|
||||||
|
|
||||||
|
ADD https://github.com/MarcA711/rknn-models/releases/download/v1.5.2-rk3562/yolov8n-320x320-rk3562.rknn /models/rknn/
|
||||||
|
ADD https://github.com/MarcA711/rknn-models/releases/download/v1.5.2-rk3566/yolov8n-320x320-rk3566.rknn /models/rknn/
|
||||||
|
ADD https://github.com/MarcA711/rknn-models/releases/download/v1.5.2-rk3568/yolov8n-320x320-rk3568.rknn /models/rknn/
|
||||||
|
ADD https://github.com/MarcA711/rknn-models/releases/download/v1.5.2-rk3588/yolov8n-320x320-rk3588.rknn /models/rknn/
|
||||||
|
|
||||||
|
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffmpeg
|
||||||
|
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffprobe
|
||||||
|
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.0-1/ffmpeg /usr/lib/btbn-ffmpeg/bin/
|
||||||
|
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.0-1/ffprobe /usr/lib/btbn-ffmpeg/bin/
|
2
docker/rockchip/requirements-wheels-rk.txt
Normal file
2
docker/rockchip/requirements-wheels-rk.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
hide-warnings == 0.17
|
||||||
|
rknn-toolkit-lite2 @ https://github.com/MarcA711/rknn-toolkit2/releases/download/v1.5.2/rknn_toolkit_lite2-1.5.2-cp39-cp39-linux_aarch64.whl
|
34
docker/rockchip/rk.hcl
Normal file
34
docker/rockchip/rk.hcl
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
target wget {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "wget"
|
||||||
|
}
|
||||||
|
|
||||||
|
target wheels {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "wheels"
|
||||||
|
}
|
||||||
|
|
||||||
|
target deps {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "deps"
|
||||||
|
}
|
||||||
|
|
||||||
|
target rootfs {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "rootfs"
|
||||||
|
}
|
||||||
|
|
||||||
|
target rk {
|
||||||
|
dockerfile = "docker/rockchip/Dockerfile"
|
||||||
|
contexts = {
|
||||||
|
wget = "target:wget",
|
||||||
|
wheels = "target:wheels",
|
||||||
|
deps = "target:deps",
|
||||||
|
rootfs = "target:rootfs"
|
||||||
|
}
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
}
|
10
docker/rockchip/rk.mk
Normal file
10
docker/rockchip/rk.mk
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
BOARDS += rk
|
||||||
|
|
||||||
|
local-rk: version
|
||||||
|
docker buildx bake --load --file=docker/rockchip/rk.hcl --set rk.tags=frigate:latest-rk rk
|
||||||
|
|
||||||
|
build-rk: version
|
||||||
|
docker buildx bake --file=docker/rockchip/rk.hcl --set rk.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rk rk
|
||||||
|
|
||||||
|
push-rk: build-rk
|
||||||
|
docker buildx bake --push --file=docker/rockchip/rk.hcl --set rk.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rk rk
|
@ -1,18 +0,0 @@
|
|||||||
#!/command/with-contenv bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
# Start the Frigate service
|
|
||||||
|
|
||||||
set -o errexit -o nounset -o pipefail
|
|
||||||
|
|
||||||
# Logs should be sent to stdout so that s6 can collect them
|
|
||||||
|
|
||||||
# Tell S6-Overlay not to restart this service
|
|
||||||
s6-svc -O .
|
|
||||||
|
|
||||||
echo "[INFO] Starting Frigate..."
|
|
||||||
|
|
||||||
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate"
|
|
||||||
|
|
||||||
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
|
||||||
exec 2>&1
|
|
||||||
exec python3 -u -m frigate
|
|
16
docker/rpi/Dockerfile
Normal file
16
docker/rpi/Dockerfile
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
FROM deps AS rpi-deps
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
RUN rm -rf /usr/lib/btbn-ffmpeg/
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
RUN --mount=type=bind,source=docker/rpi/install_deps.sh,target=/deps/install_deps.sh \
|
||||||
|
/deps/install_deps.sh
|
||||||
|
|
||||||
|
WORKDIR /opt/frigate/
|
||||||
|
COPY --from=rootfs / /
|
30
docker/rpi/install_deps.sh
Executable file
30
docker/rpi/install_deps.sh
Executable file
@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
apt-get -qq update
|
||||||
|
|
||||||
|
apt-get -qq install --no-install-recommends -y \
|
||||||
|
apt-transport-https \
|
||||||
|
gnupg \
|
||||||
|
wget \
|
||||||
|
procps vainfo \
|
||||||
|
unzip locales tzdata libxml2 xz-utils \
|
||||||
|
python3-pip \
|
||||||
|
curl \
|
||||||
|
jq \
|
||||||
|
nethogs
|
||||||
|
|
||||||
|
mkdir -p -m 600 /root/.gnupg
|
||||||
|
|
||||||
|
# enable non-free repo
|
||||||
|
sed -i -e's/ main/ main contrib non-free/g' /etc/apt/sources.list
|
||||||
|
|
||||||
|
# ffmpeg -> arm64
|
||||||
|
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||||
|
# add raspberry pi repo
|
||||||
|
gpg --no-default-keyring --keyring /usr/share/keyrings/raspbian.gpg --keyserver keyserver.ubuntu.com --recv-keys 82B129927FA3303E
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/raspbian.gpg] https://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list
|
||||||
|
apt-get -qq update
|
||||||
|
apt-get -qq install --no-install-recommends --no-install-suggests -y ffmpeg
|
||||||
|
fi
|
20
docker/rpi/rpi.hcl
Normal file
20
docker/rpi/rpi.hcl
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
target deps {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "deps"
|
||||||
|
}
|
||||||
|
|
||||||
|
target rootfs {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
target = "rootfs"
|
||||||
|
}
|
||||||
|
|
||||||
|
target rpi {
|
||||||
|
dockerfile = "docker/rpi/Dockerfile"
|
||||||
|
contexts = {
|
||||||
|
deps = "target:deps",
|
||||||
|
rootfs = "target:rootfs"
|
||||||
|
}
|
||||||
|
platforms = ["linux/arm64"]
|
||||||
|
}
|
10
docker/rpi/rpi.mk
Normal file
10
docker/rpi/rpi.mk
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
BOARDS += rpi
|
||||||
|
|
||||||
|
local-rpi: version
|
||||||
|
docker buildx bake --load --file=docker/rpi/rpi.hcl --set rpi.tags=frigate:latest-rpi rpi
|
||||||
|
|
||||||
|
build-rpi: version
|
||||||
|
docker buildx bake --file=docker/rpi/rpi.hcl --set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi rpi
|
||||||
|
|
||||||
|
push-rpi: build-rpi
|
||||||
|
docker buildx bake --push --file=docker/rpi/rpi.hcl --set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi rpi
|
32
docker/tensorrt/Dockerfile.amd64
Normal file
32
docker/tensorrt/Dockerfile.amd64
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# Make this a separate target so it can be built/cached optionally
|
||||||
|
FROM wheels as trt-wheels
|
||||||
|
ARG DEBIAN_FRONTEND
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
# Add TensorRT wheels to another folder
|
||||||
|
COPY docker/tensorrt/requirements-amd64.txt /requirements-tensorrt.txt
|
||||||
|
RUN mkdir -p /trt-wheels && pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt
|
||||||
|
|
||||||
|
FROM tensorrt-base AS frigate-tensorrt
|
||||||
|
ENV TRT_VER=8.5.3
|
||||||
|
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||||
|
pip3 install -U /deps/trt-wheels/*.whl && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
|
WORKDIR /opt/frigate/
|
||||||
|
COPY --from=rootfs / /
|
||||||
|
|
||||||
|
# Dev Container w/ TRT
|
||||||
|
FROM devcontainer AS devcontainer-trt
|
||||||
|
|
||||||
|
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||||
|
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||||
|
COPY docker/tensorrt/detector/rootfs/ /
|
||||||
|
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||||
|
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||||
|
pip3 install -U /deps/trt-wheels/*.whl
|
79
docker/tensorrt/Dockerfile.arm64
Normal file
79
docker/tensorrt/Dockerfile.arm64
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BASE_IMAGE
|
||||||
|
FROM ${BASE_IMAGE} AS build-wheels
|
||||||
|
ARG DEBIAN_FRONTEND
|
||||||
|
|
||||||
|
# Use a separate container to build wheels to prevent build dependencies in final image
|
||||||
|
RUN apt-get -qq update \
|
||||||
|
&& apt-get -qq install -y --no-install-recommends \
|
||||||
|
python3.9 python3.9-dev \
|
||||||
|
wget build-essential cmake git \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Ensure python3 defaults to python3.9
|
||||||
|
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 1
|
||||||
|
|
||||||
|
RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||||
|
&& python3 get-pip.py "pip"
|
||||||
|
|
||||||
|
|
||||||
|
FROM build-wheels AS trt-wheels
|
||||||
|
ARG DEBIAN_FRONTEND
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
# python-tensorrt build deps are 3.4 GB!
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y ccache cuda-cudart-dev-* cuda-nvcc-* libnvonnxparsers-dev libnvparsers-dev libnvinfer-plugin-dev \
|
||||||
|
&& ([ -e /usr/local/cuda ] || ln -s /usr/local/cuda-* /usr/local/cuda) \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*;
|
||||||
|
|
||||||
|
# Determine version of tensorrt already installed in base image, e.g. "Version: 8.4.1-1+cuda11.4"
|
||||||
|
RUN NVINFER_VER=$(dpkg -s libnvinfer8 | grep -Po "Version: \K.*") \
|
||||||
|
&& echo $NVINFER_VER | grep -Po "^\d+\.\d+\.\d+" > /etc/TENSORRT_VER
|
||||||
|
|
||||||
|
RUN --mount=type=bind,source=docker/tensorrt/detector/build_python_tensorrt.sh,target=/deps/build_python_tensorrt.sh \
|
||||||
|
--mount=type=cache,target=/root/.ccache \
|
||||||
|
export PATH="/usr/lib/ccache:$PATH" CCACHE_DIR=/root/.ccache CCACHE_MAXSIZE=2G \
|
||||||
|
&& TENSORRT_VER=$(cat /etc/TENSORRT_VER) /deps/build_python_tensorrt.sh
|
||||||
|
|
||||||
|
COPY docker/tensorrt/requirements-arm64.txt /requirements-tensorrt.txt
|
||||||
|
RUN pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt
|
||||||
|
|
||||||
|
FROM build-wheels AS trt-model-wheels
|
||||||
|
ARG DEBIAN_FRONTEND
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y protobuf-compiler libprotobuf-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN --mount=type=bind,source=docker/tensorrt/requirements-models-arm64.txt,target=/requirements-tensorrt-models.txt \
|
||||||
|
pip3 wheel --wheel-dir=/trt-model-wheels -r /requirements-tensorrt-models.txt
|
||||||
|
|
||||||
|
FROM wget AS jetson-ffmpeg
|
||||||
|
ARG DEBIAN_FRONTEND
|
||||||
|
ENV CCACHE_DIR /root/.ccache
|
||||||
|
ENV CCACHE_MAXSIZE 2G
|
||||||
|
RUN --mount=type=bind,source=docker/tensorrt/build_jetson_ffmpeg.sh,target=/deps/build_jetson_ffmpeg.sh \
|
||||||
|
--mount=type=cache,target=/root/.ccache \
|
||||||
|
/deps/build_jetson_ffmpeg.sh
|
||||||
|
|
||||||
|
# Frigate w/ TensorRT for NVIDIA Jetson platforms
|
||||||
|
FROM tensorrt-base AS frigate-tensorrt
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y python-is-python3 libprotobuf17 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN rm -rf /usr/lib/btbn-ffmpeg/
|
||||||
|
COPY --from=jetson-ffmpeg /rootfs /
|
||||||
|
|
||||||
|
COPY --from=trt-wheels /etc/TENSORRT_VER /etc/TENSORRT_VER
|
||||||
|
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||||
|
--mount=type=bind,from=trt-model-wheels,source=/trt-model-wheels,target=/deps/trt-model-wheels \
|
||||||
|
pip3 install -U /deps/trt-wheels/*.whl /deps/trt-model-wheels/*.whl \
|
||||||
|
&& ldconfig
|
||||||
|
|
||||||
|
WORKDIR /opt/frigate/
|
||||||
|
COPY --from=rootfs / /
|
29
docker/tensorrt/Dockerfile.base
Normal file
29
docker/tensorrt/Dockerfile.base
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# syntax=docker/dockerfile:1.6
|
||||||
|
|
||||||
|
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG TRT_BASE=nvcr.io/nvidia/tensorrt:23.03-py3
|
||||||
|
|
||||||
|
# Build TensorRT-specific library
|
||||||
|
FROM ${TRT_BASE} AS trt-deps
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y git build-essential cuda-nvcc-* cuda-nvtx-* libnvinfer-dev libnvinfer-plugin-dev libnvparsers-dev libnvonnxparsers-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN --mount=type=bind,source=docker/tensorrt/detector/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \
|
||||||
|
/tensorrt_libyolo.sh
|
||||||
|
|
||||||
|
# Frigate w/ TensorRT Support as separate image
|
||||||
|
FROM deps AS tensorrt-base
|
||||||
|
|
||||||
|
#Disable S6 Global timeout
|
||||||
|
ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||||
|
|
||||||
|
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||||
|
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||||
|
COPY docker/tensorrt/detector/rootfs/ /
|
||||||
|
ENV YOLO_MODELS="yolov7-320"
|
||||||
|
|
||||||
|
HEALTHCHECK --start-period=600s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||||
|
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
59
docker/tensorrt/build_jetson_ffmpeg.sh
Executable file
59
docker/tensorrt/build_jetson_ffmpeg.sh
Executable file
@ -0,0 +1,59 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# For jetson platforms, build ffmpeg with custom patches. NVIDIA supplies a deb
|
||||||
|
# with accelerated decoding, but it doesn't have accelerated scaling or encoding
|
||||||
|
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
INSTALL_PREFIX=/rootfs/usr/local
|
||||||
|
|
||||||
|
apt-get -qq update
|
||||||
|
apt-get -qq install -y --no-install-recommends build-essential ccache clang cmake pkg-config
|
||||||
|
apt-get -qq install -y --no-install-recommends libx264-dev libx265-dev
|
||||||
|
|
||||||
|
pushd /tmp
|
||||||
|
|
||||||
|
# Install libnvmpi to enable nvmpi decoders (h264_nvmpi, hevc_nvmpi)
|
||||||
|
if [ -e /usr/local/cuda-10.2 ]; then
|
||||||
|
# assume Jetpack 4.X
|
||||||
|
wget -q https://developer.nvidia.com/embedded/L4T/r32_Release_v5.0/T186/Jetson_Multimedia_API_R32.5.0_aarch64.tbz2 -O jetson_multimedia_api.tbz2
|
||||||
|
else
|
||||||
|
# assume Jetpack 5.X
|
||||||
|
wget -q https://developer.nvidia.com/downloads/embedded/l4t/r35_release_v3.1/release/jetson_multimedia_api_r35.3.1_aarch64.tbz2 -O jetson_multimedia_api.tbz2
|
||||||
|
fi
|
||||||
|
tar xaf jetson_multimedia_api.tbz2 -C / && rm jetson_multimedia_api.tbz2
|
||||||
|
|
||||||
|
wget -q https://github.com/AndBobsYourUncle/jetson-ffmpeg/archive/9c17b09.zip -O jetson-ffmpeg.zip
|
||||||
|
unzip jetson-ffmpeg.zip && rm jetson-ffmpeg.zip && mv jetson-ffmpeg-* jetson-ffmpeg && cd jetson-ffmpeg
|
||||||
|
LD_LIBRARY_PATH=$(pwd)/stubs:$LD_LIBRARY_PATH # tegra multimedia libs aren't available in image, so use stubs for ffmpeg build
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX
|
||||||
|
make -j$(nproc)
|
||||||
|
make install
|
||||||
|
cd ../../
|
||||||
|
|
||||||
|
# Install nv-codec-headers to enable ffnvcodec filters (scale_cuda)
|
||||||
|
wget -q https://github.com/FFmpeg/nv-codec-headers/archive/refs/heads/master.zip
|
||||||
|
unzip master.zip && rm master.zip && cd nv-codec-headers-master
|
||||||
|
make PREFIX=$INSTALL_PREFIX install
|
||||||
|
cd ../ && rm -rf nv-codec-headers-master
|
||||||
|
|
||||||
|
# Build ffmpeg with nvmpi patch
|
||||||
|
wget -q https://ffmpeg.org/releases/ffmpeg-6.0.tar.xz
|
||||||
|
tar xaf ffmpeg-*.tar.xz && rm ffmpeg-*.tar.xz && cd ffmpeg-*
|
||||||
|
patch -p1 < ../jetson-ffmpeg/ffmpeg_patches/ffmpeg6.0_nvmpi.patch
|
||||||
|
export PKG_CONFIG_PATH=$INSTALL_PREFIX/lib/pkgconfig
|
||||||
|
# enable Jetson codecs but disable dGPU codecs
|
||||||
|
./configure --cc='ccache gcc' --cxx='ccache g++' \
|
||||||
|
--enable-shared --disable-static --prefix=$INSTALL_PREFIX \
|
||||||
|
--enable-gpl --enable-libx264 --enable-libx265 \
|
||||||
|
--enable-nvmpi --enable-ffnvcodec --enable-cuda-llvm \
|
||||||
|
--disable-cuvid --disable-nvenc --disable-nvdec \
|
||||||
|
|| { cat ffbuild/config.log && false; }
|
||||||
|
make -j$(nproc)
|
||||||
|
make install
|
||||||
|
cd ../
|
||||||
|
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
popd
|
28
docker/tensorrt/detector/build_python_tensorrt.sh
Executable file
28
docker/tensorrt/detector/build_python_tensorrt.sh
Executable file
@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
mkdir -p /trt-wheels
|
||||||
|
|
||||||
|
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||||
|
|
||||||
|
# NVIDIA supplies python-tensorrt for python3.8, but frigate uses python3.9,
|
||||||
|
# so we must build python-tensorrt ourselves.
|
||||||
|
|
||||||
|
# Get python-tensorrt source
|
||||||
|
mkdir /workspace
|
||||||
|
cd /workspace
|
||||||
|
git clone -b ${TENSORRT_VER} https://github.com/NVIDIA/TensorRT.git --depth=1
|
||||||
|
|
||||||
|
# Collect dependencies
|
||||||
|
EXT_PATH=/workspace/external && mkdir -p $EXT_PATH
|
||||||
|
pip3 install pybind11 && ln -s /usr/local/lib/python3.9/dist-packages/pybind11 $EXT_PATH/pybind11
|
||||||
|
ln -s /usr/include/python3.9 $EXT_PATH/python3.9
|
||||||
|
ln -s /usr/include/aarch64-linux-gnu/NvOnnxParser.h /workspace/TensorRT/parsers/onnx/
|
||||||
|
|
||||||
|
# Build wheel
|
||||||
|
cd /workspace/TensorRT/python
|
||||||
|
EXT_PATH=$EXT_PATH PYTHON_MAJOR_VERSION=3 PYTHON_MINOR_VERSION=9 TARGET_ARCHITECTURE=aarch64 /bin/bash ./build.sh
|
||||||
|
mv build/dist/*.whl /trt-wheels/
|
||||||
|
|
||||||
|
fi
|
@ -1,3 +1,4 @@
|
|||||||
|
/usr/local/lib
|
||||||
/usr/local/lib/python3.9/dist-packages/nvidia/cudnn/lib
|
/usr/local/lib/python3.9/dist-packages/nvidia/cudnn/lib
|
||||||
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_runtime/lib
|
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_runtime/lib
|
||||||
/usr/local/lib/python3.9/dist-packages/nvidia/cublas/lib
|
/usr/local/lib/python3.9/dist-packages/nvidia/cublas/lib
|
109
docker/tensorrt/detector/rootfs/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
Executable file
109
docker/tensorrt/detector/rootfs/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
Executable file
@ -0,0 +1,109 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
# Generate models for the TensorRT detector
|
||||||
|
|
||||||
|
# One or more comma-separated models may be specified via the YOLO_MODELS env.
|
||||||
|
# Append "-dla" to the model name to generate a DLA model with GPU fallback;
|
||||||
|
# otherwise a GPU-only model will be generated.
|
||||||
|
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
MODEL_CACHE_DIR=${MODEL_CACHE_DIR:-"/config/model_cache/tensorrt"}
|
||||||
|
TRT_VER=${TRT_VER:-$(cat /etc/TENSORRT_VER)}
|
||||||
|
OUTPUT_FOLDER="${MODEL_CACHE_DIR}/${TRT_VER}"
|
||||||
|
|
||||||
|
# Create output folder
|
||||||
|
mkdir -p ${OUTPUT_FOLDER}
|
||||||
|
|
||||||
|
FIRST_MODEL=true
|
||||||
|
MODEL_DOWNLOAD=""
|
||||||
|
MODEL_CONVERT=""
|
||||||
|
|
||||||
|
for model in ${YOLO_MODELS//,/ }
|
||||||
|
do
|
||||||
|
# Remove old link in case path/version changed
|
||||||
|
rm -f ${MODEL_CACHE_DIR}/${model}.trt
|
||||||
|
|
||||||
|
if [[ ! -f ${OUTPUT_FOLDER}/${model}.trt ]]; then
|
||||||
|
if [[ ${FIRST_MODEL} = true ]]; then
|
||||||
|
MODEL_DOWNLOAD="${model%-dla}";
|
||||||
|
MODEL_CONVERT="${model}"
|
||||||
|
FIRST_MODEL=false;
|
||||||
|
else
|
||||||
|
MODEL_DOWNLOAD+=",${model%-dla}";
|
||||||
|
MODEL_CONVERT+=",${model}";
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
ln -s ${OUTPUT_FOLDER}/${model}.trt ${MODEL_CACHE_DIR}/${model}.trt
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z ${MODEL_CONVERT} ]]; then
|
||||||
|
echo "No models to convert."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Setup ENV to select GPU for conversion
|
||||||
|
if [ ! -z ${TRT_MODEL_PREP_DEVICE+x} ]; then
|
||||||
|
if [ ! -z ${CUDA_VISIBLE_DEVICES+x} ]; then
|
||||||
|
PREVIOUS_CVD="$CUDA_VISIBLE_DEVICES"
|
||||||
|
unset CUDA_VISIBLE_DEVICES
|
||||||
|
fi
|
||||||
|
export CUDA_VISIBLE_DEVICES="$TRT_MODEL_PREP_DEVICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# On Jetpack 4.6, the nvidia container runtime will mount several host nvidia libraries into the
|
||||||
|
# container which should not be present in the image - if they are, TRT model generation will
|
||||||
|
# fail or produce invalid models. Thus we must request the user to install them on the host in
|
||||||
|
# order to run libyolo here.
|
||||||
|
# On Jetpack 5.0, these libraries are not mounted by the runtime and are supplied by the image.
|
||||||
|
if [[ "$(arch)" == "aarch64" ]]; then
|
||||||
|
if [[ ! -e /usr/lib/aarch64-linux-gnu/tegra ]]; then
|
||||||
|
echo "ERROR: Container must be launched with nvidia runtime"
|
||||||
|
exit 1
|
||||||
|
elif [[ ! -e /usr/lib/aarch64-linux-gnu/libnvinfer.so.8 ||
|
||||||
|
! -e /usr/lib/aarch64-linux-gnu/libnvinfer_plugin.so.8 ||
|
||||||
|
! -e /usr/lib/aarch64-linux-gnu/libnvparsers.so.8 ||
|
||||||
|
! -e /usr/lib/aarch64-linux-gnu/libnvonnxparser.so.8 ]]; then
|
||||||
|
echo "ERROR: Please run the following on the HOST:"
|
||||||
|
echo " sudo apt install libnvinfer8 libnvinfer-plugin8 libnvparsers8 libnvonnxparsers8 nvidia-container"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Generating the following TRT Models: ${MODEL_CONVERT}"
|
||||||
|
|
||||||
|
# Build trt engine
|
||||||
|
cd /usr/local/src/tensorrt_demos/yolo
|
||||||
|
|
||||||
|
echo "Downloading yolo weights"
|
||||||
|
./download_yolo.sh $MODEL_DOWNLOAD 2> /dev/null
|
||||||
|
|
||||||
|
for model in ${MODEL_CONVERT//,/ }
|
||||||
|
do
|
||||||
|
python3 yolo_to_onnx.py -m ${model%-dla} > /dev/null
|
||||||
|
|
||||||
|
echo -e "\nGenerating ${model}.trt. This may take a few minutes.\n"; start=$(date +%s)
|
||||||
|
if [[ $model == *-dla ]]; then
|
||||||
|
cmd="python3 onnx_to_tensorrt.py -m ${model%-dla} --dla_core 0"
|
||||||
|
else
|
||||||
|
cmd="python3 onnx_to_tensorrt.py -m ${model}"
|
||||||
|
fi
|
||||||
|
$cmd > /tmp/onnx_to_tensorrt.log || { cat /tmp/onnx_to_tensorrt.log && continue; }
|
||||||
|
|
||||||
|
mv ${model%-dla}.trt ${OUTPUT_FOLDER}/${model}.trt;
|
||||||
|
ln -s ${OUTPUT_FOLDER}/${model}.trt ${MODEL_CACHE_DIR}/${model}.trt
|
||||||
|
echo "Generated ${model}.trt in $(($(date +%s)-start)) seconds"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Restore ENV after conversion
|
||||||
|
if [ ! -z ${TRT_MODEL_PREP_DEVICE+x} ]; then
|
||||||
|
unset CUDA_VISIBLE_DEVICES
|
||||||
|
if [ ! -z ${PREVIOUS_CVD+x} ]; then
|
||||||
|
export CUDA_VISIBLE_DEVICES="$PREVIOUS_CVD"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Print which models exist in output folder
|
||||||
|
echo "Available tensorrt models:"
|
||||||
|
cd ${OUTPUT_FOLDER} && ls *.trt;
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
|
21
docker/tensorrt/detector/tensorrt_libyolo.sh
Executable file
21
docker/tensorrt/detector/tensorrt_libyolo.sh
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="/usr/local/src/tensorrt_demos"
|
||||||
|
|
||||||
|
# Clone tensorrt_demos repo
|
||||||
|
git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download
|
||||||
|
|
||||||
|
# Build libyolo
|
||||||
|
if [ ! -e /usr/local/cuda ]; then
|
||||||
|
ln -s /usr/local/cuda-* /usr/local/cuda
|
||||||
|
fi
|
||||||
|
cd ./tensorrt_demos/plugins && make all -j$(nproc)
|
||||||
|
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||||
|
|
||||||
|
# Store yolo scripts for later conversion
|
||||||
|
cd ../
|
||||||
|
mkdir -p ${SCRIPT_DIR}/plugins
|
||||||
|
cp plugins/libyolo_layer.so ${SCRIPT_DIR}/plugins/libyolo_layer.so
|
||||||
|
cp -a yolo ${SCRIPT_DIR}/
|
12
docker/tensorrt/requirements-amd64.txt
Normal file
12
docker/tensorrt/requirements-amd64.txt
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# NVidia TensorRT Support (amd64 only)
|
||||||
|
--extra-index-url 'https://pypi.nvidia.com'
|
||||||
|
numpy < 1.24; platform_machine == 'x86_64'
|
||||||
|
tensorrt == 8.5.3.*; platform_machine == 'x86_64'
|
||||||
|
cuda-python == 11.8; platform_machine == 'x86_64'
|
||||||
|
cython == 0.29.*; platform_machine == 'x86_64'
|
||||||
|
nvidia-cuda-runtime-cu12 == 12.1.*; platform_machine == 'x86_64'
|
||||||
|
nvidia-cuda-runtime-cu11 == 11.8.*; platform_machine == 'x86_64'
|
||||||
|
nvidia-cublas-cu11 == 11.11.3.6; platform_machine == 'x86_64'
|
||||||
|
nvidia-cudnn-cu11 == 8.6.0.*; platform_machine == 'x86_64'
|
||||||
|
onnx==1.14.0; platform_machine == 'x86_64'
|
||||||
|
protobuf==3.20.3; platform_machine == 'x86_64'
|
1
docker/tensorrt/requirements-arm64.txt
Normal file
1
docker/tensorrt/requirements-arm64.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
cuda-python == 11.7; platform_machine == 'aarch64'
|
3
docker/tensorrt/requirements-models-arm64.txt
Normal file
3
docker/tensorrt/requirements-models-arm64.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
onnx == 1.14.0; platform_machine == 'aarch64'
|
||||||
|
protobuf == 3.20.3; platform_machine == 'aarch64'
|
||||||
|
numpy == 1.23.*; platform_machine == 'aarch64' # required by python-tensorrt 8.2.1 (Jetpack 4.6)
|
94
docker/tensorrt/trt.hcl
Normal file
94
docker/tensorrt/trt.hcl
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
variable "ARCH" {
|
||||||
|
default = "amd64"
|
||||||
|
}
|
||||||
|
variable "BASE_IMAGE" {
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
variable "SLIM_BASE" {
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
variable "TRT_BASE" {
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
target "_build_args" {
|
||||||
|
args = {
|
||||||
|
BASE_IMAGE = BASE_IMAGE,
|
||||||
|
SLIM_BASE = SLIM_BASE,
|
||||||
|
TRT_BASE = TRT_BASE
|
||||||
|
}
|
||||||
|
platforms = ["linux/${ARCH}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target wget {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
target = "wget"
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target deps {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
target = "deps"
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target rootfs {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
target = "rootfs"
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target wheels {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
target = "wheels"
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target devcontainer {
|
||||||
|
dockerfile = "docker/main/Dockerfile"
|
||||||
|
platforms = ["linux/amd64"]
|
||||||
|
target = "devcontainer"
|
||||||
|
}
|
||||||
|
|
||||||
|
target "trt-deps" {
|
||||||
|
dockerfile = "docker/tensorrt/Dockerfile.base"
|
||||||
|
context = "."
|
||||||
|
contexts = {
|
||||||
|
deps = "target:deps",
|
||||||
|
}
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target "tensorrt-base" {
|
||||||
|
dockerfile = "docker/tensorrt/Dockerfile.base"
|
||||||
|
context = "."
|
||||||
|
contexts = {
|
||||||
|
deps = "target:deps",
|
||||||
|
}
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target "tensorrt" {
|
||||||
|
dockerfile = "docker/tensorrt/Dockerfile.${ARCH}"
|
||||||
|
context = "."
|
||||||
|
contexts = {
|
||||||
|
wget = "target:wget",
|
||||||
|
tensorrt-base = "target:tensorrt-base",
|
||||||
|
rootfs = "target:rootfs"
|
||||||
|
wheels = "target:wheels"
|
||||||
|
}
|
||||||
|
target = "frigate-tensorrt"
|
||||||
|
inherits = ["_build_args"]
|
||||||
|
}
|
||||||
|
|
||||||
|
target "devcontainer-trt" {
|
||||||
|
dockerfile = "docker/tensorrt/Dockerfile.amd64"
|
||||||
|
context = "."
|
||||||
|
contexts = {
|
||||||
|
wheels = "target:wheels",
|
||||||
|
trt-deps = "target:trt-deps",
|
||||||
|
devcontainer = "target:devcontainer"
|
||||||
|
}
|
||||||
|
platforms = ["linux/amd64"]
|
||||||
|
target = "devcontainer-trt"
|
||||||
|
}
|
26
docker/tensorrt/trt.mk
Normal file
26
docker/tensorrt/trt.mk
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
BOARDS += trt
|
||||||
|
|
||||||
|
JETPACK4_BASE ?= timongentzsch/l4t-ubuntu20-opencv:latest # L4T 32.7.1 JetPack 4.6.1
|
||||||
|
JETPACK5_BASE ?= nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime # L4T 35.3.1 JetPack 5.1.1
|
||||||
|
X86_DGPU_ARGS := ARCH=amd64
|
||||||
|
JETPACK4_ARGS := ARCH=arm64 BASE_IMAGE=$(JETPACK4_BASE) SLIM_BASE=$(JETPACK4_BASE) TRT_BASE=$(JETPACK4_BASE)
|
||||||
|
JETPACK5_ARGS := ARCH=arm64 BASE_IMAGE=$(JETPACK5_BASE) SLIM_BASE=$(JETPACK5_BASE) TRT_BASE=$(JETPACK5_BASE)
|
||||||
|
|
||||||
|
local-trt: version
|
||||||
|
$(X86_DGPU_ARGS) docker buildx bake --load --file=docker/tensorrt/trt.hcl --set tensorrt.tags=frigate:latest-tensorrt tensorrt
|
||||||
|
|
||||||
|
local-trt-jp4: version
|
||||||
|
$(JETPACK4_ARGS) docker buildx bake --load --file=docker/tensorrt/trt.hcl --set tensorrt.tags=frigate:latest-tensorrt-jp4 tensorrt
|
||||||
|
|
||||||
|
local-trt-jp5: version
|
||||||
|
$(JETPACK5_ARGS) docker buildx bake --load --file=docker/tensorrt/trt.hcl --set tensorrt.tags=frigate:latest-tensorrt-jp5 tensorrt
|
||||||
|
|
||||||
|
build-trt:
|
||||||
|
$(X86_DGPU_ARGS) docker buildx bake --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt tensorrt
|
||||||
|
$(JETPACK4_ARGS) docker buildx bake --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt-jp4 tensorrt
|
||||||
|
$(JETPACK5_ARGS) docker buildx bake --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt-jp5 tensorrt
|
||||||
|
|
||||||
|
push-trt: build-trt
|
||||||
|
$(X86_DGPU_ARGS) docker buildx bake --push --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt tensorrt
|
||||||
|
$(JETPACK4_ARGS) docker buildx bake --push --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt-jp4 tensorrt
|
||||||
|
$(JETPACK5_ARGS) docker buildx bake --push --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt-jp5 tensorrt
|
@ -1,34 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -euxo pipefail
|
|
||||||
|
|
||||||
CUDA_HOME=/usr/local/cuda
|
|
||||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64
|
|
||||||
OUTPUT_FOLDER=/tensorrt_models
|
|
||||||
echo "Generating the following TRT Models: ${YOLO_MODELS:="yolov4-tiny-288,yolov4-tiny-416,yolov7-tiny-416"}"
|
|
||||||
|
|
||||||
# Create output folder
|
|
||||||
mkdir -p ${OUTPUT_FOLDER}
|
|
||||||
|
|
||||||
# Install packages
|
|
||||||
pip install --upgrade pip && pip install onnx==1.9.0 protobuf==3.20.3
|
|
||||||
|
|
||||||
# Clone tensorrt_demos repo
|
|
||||||
git clone --depth 1 https://github.com/yeahme49/tensorrt_demos.git /tensorrt_demos
|
|
||||||
|
|
||||||
# Build libyolo
|
|
||||||
cd /tensorrt_demos/plugins && make all
|
|
||||||
cp libyolo_layer.so ${OUTPUT_FOLDER}/libyolo_layer.so
|
|
||||||
|
|
||||||
# Download yolo weights
|
|
||||||
cd /tensorrt_demos/yolo && ./download_yolo.sh
|
|
||||||
|
|
||||||
# Build trt engine
|
|
||||||
cd /tensorrt_demos/yolo
|
|
||||||
|
|
||||||
for model in ${YOLO_MODELS//,/ }
|
|
||||||
do
|
|
||||||
python3 yolo_to_onnx.py -m ${model}
|
|
||||||
python3 onnx_to_tensorrt.py -m ${model}
|
|
||||||
cp /tensorrt_demos/yolo/${model}.trt ${OUTPUT_FOLDER}/${model}.trt;
|
|
||||||
done
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user