mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-09-10 17:51:45 +02:00
Merge remote-tracking branch 'upstream/dev' into dev
This commit is contained in:
commit
3193651977
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
- dev
|
- dev
|
||||||
- master
|
- master
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- 'docs/**'
|
- "docs/**"
|
||||||
|
|
||||||
# only run the latest commit to avoid cache overwrites
|
# only run the latest commit to avoid cache overwrites
|
||||||
concurrency:
|
concurrency:
|
||||||
@ -24,6 +24,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -45,6 +47,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -86,6 +90,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -112,6 +118,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -140,6 +148,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -165,6 +175,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
@ -188,6 +200,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up QEMU and Buildx
|
- name: Set up QEMU and Buildx
|
||||||
id: setup
|
id: setup
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
|
24
.github/workflows/dependabot-auto-merge.yaml
vendored
24
.github/workflows/dependabot-auto-merge.yaml
vendored
@ -1,24 +0,0 @@
|
|||||||
name: dependabot-auto-merge
|
|
||||||
on: pull_request
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependabot-auto-merge:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.actor == 'dependabot[bot]'
|
|
||||||
steps:
|
|
||||||
- name: Get Dependabot metadata
|
|
||||||
id: metadata
|
|
||||||
uses: dependabot/fetch-metadata@v2
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Enable auto-merge for Dependabot PRs
|
|
||||||
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
|
|
||||||
run: |
|
|
||||||
gh pr review --approve "$PR_URL"
|
|
||||||
gh pr merge --auto --squash "$PR_URL"
|
|
||||||
env:
|
|
||||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
12
.github/workflows/pull_request.yml
vendored
12
.github/workflows/pull_request.yml
vendored
@ -3,7 +3,7 @@ name: On pull request
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- 'docs/**'
|
- "docs/**"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: 3.9
|
DEFAULT_PYTHON: 3.9
|
||||||
@ -19,6 +19,8 @@ jobs:
|
|||||||
DOCKER_BUILDKIT: "1"
|
DOCKER_BUILDKIT: "1"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -38,6 +40,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
@ -52,6 +56,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
@ -67,6 +73,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.0
|
||||||
with:
|
with:
|
||||||
@ -88,6 +96,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- uses: actions/setup-node@master
|
- uses: actions/setup-node@master
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
|
9
.github/workflows/release.yml
vendored
9
.github/workflows/release.yml
vendored
@ -11,6 +11,8 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: lowercaseRepo
|
- id: lowercaseRepo
|
||||||
uses: ASzc/change-string-case-action@v6
|
uses: ASzc/change-string-case-action@v6
|
||||||
with:
|
with:
|
||||||
@ -22,10 +24,13 @@ jobs:
|
|||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Create tag variables
|
- name: Create tag variables
|
||||||
|
env:
|
||||||
|
TAG: ${{ github.ref_name }}
|
||||||
|
LOWERCASE_REPO: ${{ steps.lowercaseRepo.outputs.lowercase }}
|
||||||
run: |
|
run: |
|
||||||
BUILD_TYPE=$([[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta")
|
BUILD_TYPE=$([[ "${TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta")
|
||||||
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
|
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
|
||||||
echo "BASE=ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}" >> $GITHUB_ENV
|
echo "BASE=ghcr.io/${LOWERCASE_REPO}" >> $GITHUB_ENV
|
||||||
echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||||
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
|
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
|
||||||
- name: Tag and push the main image
|
- name: Tag and push the main image
|
||||||
|
5
.github/workflows/stale.yml
vendored
5
.github/workflows/stale.yml
vendored
@ -23,7 +23,9 @@ jobs:
|
|||||||
exempt-pr-labels: "pinned,security,dependencies"
|
exempt-pr-labels: "pinned,security,dependencies"
|
||||||
operations-per-run: 120
|
operations-per-run: 120
|
||||||
- name: Print outputs
|
- name: Print outputs
|
||||||
run: echo ${{ join(steps.stale.outputs.*, ',') }}
|
env:
|
||||||
|
STALE_OUTPUT: ${{ join(steps.stale.outputs.*, ',') }}
|
||||||
|
run: echo "$STALE_OUTPUT"
|
||||||
|
|
||||||
# clean_ghcr:
|
# clean_ghcr:
|
||||||
# name: Delete outdated dev container images
|
# name: Delete outdated dev container images
|
||||||
@ -38,4 +40,3 @@ jobs:
|
|||||||
# account-type: personal
|
# account-type: personal
|
||||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
# token-type: github-token
|
# token-type: github-token
|
||||||
|
|
||||||
|
@ -174,7 +174,7 @@ NOTE: The folder that is set for the config needs to be the folder that contains
|
|||||||
|
|
||||||
### Custom go2rtc version
|
### Custom go2rtc version
|
||||||
|
|
||||||
Frigate currently includes go2rtc v1.9.4, there may be certain cases where you want to run a different version of go2rtc.
|
Frigate currently includes go2rtc v1.9.2, there may be certain cases where you want to run a different version of go2rtc.
|
||||||
|
|
||||||
To do this:
|
To do this:
|
||||||
|
|
||||||
|
@ -5,6 +5,8 @@ title: Generative AI
|
|||||||
|
|
||||||
Generative AI can be used to automatically generate descriptive text based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate to provide more context about your tracked objects. Descriptions are accessed via the _Explore_ view in the Frigate UI by clicking on a tracked object's thumbnail.
|
Generative AI can be used to automatically generate descriptive text based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate to provide more context about your tracked objects. Descriptions are accessed via the _Explore_ view in the Frigate UI by clicking on a tracked object's thumbnail.
|
||||||
|
|
||||||
|
Requests for a description are sent off automatically to your AI provider at the end of the tracked object's lifecycle. Descriptions can also be regenerated manually via the Frigate UI.
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
Semantic Search must be enabled to use Generative AI.
|
Semantic Search must be enabled to use Generative AI.
|
||||||
|
@ -23,7 +23,7 @@ If you are using go2rtc, you should adjust the following settings in your camera
|
|||||||
|
|
||||||
- Video codec: **H.264** - provides the most compatible video codec with all Live view technologies and browsers. Avoid any kind of "smart codec" or "+" codec like _H.264+_ or _H.265+_. as these non-standard codecs remove keyframes (see below).
|
- Video codec: **H.264** - provides the most compatible video codec with all Live view technologies and browsers. Avoid any kind of "smart codec" or "+" codec like _H.264+_ or _H.265+_. as these non-standard codecs remove keyframes (see below).
|
||||||
- Audio codec: **AAC** - provides the most compatible audio codec with all Live view technologies and browsers that support audio.
|
- Audio codec: **AAC** - provides the most compatible audio codec with all Live view technologies and browsers that support audio.
|
||||||
- I-frame interval (sometimes called the keyframe interval, the interframe space, or the GOP length): match your camera's frame rate, or choose "1x" (for interframe space on Reolink cameras). For example, if your stream outputs 20fps, your i-frame interval should be 20 (or 1x on Reolink). Values higher than the frame rate will cause the stream to take longer to begin playback. See [this page](https://gardinal.net/understanding-the-keyframe-interval/) for more on keyframes.
|
- I-frame interval (sometimes called the keyframe interval, the interframe space, or the GOP length): match your camera's frame rate, or choose "1x" (for interframe space on Reolink cameras). For example, if your stream outputs 20fps, your i-frame interval should be 20 (or 1x on Reolink). Values higher than the frame rate will cause the stream to take longer to begin playback. See [this page](https://gardinal.net/understanding-the-keyframe-interval/) for more on keyframes. For many users this may not be an issue, but it should be noted that that a 1x i-frame interval will cause more storage utilization if you are using the stream for the `record` role as well.
|
||||||
|
|
||||||
The default video and audio codec on your camera may not always be compatible with your browser, which is why setting them to H.264 and AAC is recommended. See the [go2rtc docs](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#codecs-madness) for codec support information.
|
The default video and audio codec on your camera may not always be compatible with your browser, which is why setting them to H.264 and AAC is recommended. See the [go2rtc docs](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#codecs-madness) for codec support information.
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ title: Using Semantic Search
|
|||||||
|
|
||||||
Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results.
|
Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results.
|
||||||
|
|
||||||
Frigate has support for [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create embeddings, which runs locally. Embeddings are then saved to Frigate's database.
|
Frigate uses [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create and save embeddings to Frigate's database. All of this runs locally.
|
||||||
|
|
||||||
Semantic Search is accessed via the _Explore_ view in the Frigate UI.
|
Semantic Search is accessed via the _Explore_ view in the Frigate UI.
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ For best performance, 16GB or more of RAM and a dedicated GPU are recommended.
|
|||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
Semantic Search is disabled by default, and must be enabled in your config file before it can be used. Semantic Search is a global configuration setting.
|
Semantic Search is disabled by default, and must be enabled in your config file or in the UI's Settings page before it can be used. Semantic Search is a global configuration setting.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
semantic_search:
|
semantic_search:
|
||||||
@ -29,9 +29,9 @@ semantic_search:
|
|||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|
||||||
The embeddings database can be re-indexed from the existing tracked objects in your database by adding `reindex: True` to your `semantic_search` configuration. Depending on the number of tracked objects you have, it can take a long while to complete and may max out your CPU while indexing. Make sure to set the config back to `False` before restarting Frigate again.
|
The embeddings database can be re-indexed from the existing tracked objects in your database by adding `reindex: True` to your `semantic_search` configuration or by toggling the switch on the Search Settings page in the UI and restarting Frigate. Depending on the number of tracked objects you have, it can take a long while to complete and may max out your CPU while indexing. Make sure to turn the UI's switch off or set the config back to `False` before restarting Frigate again.
|
||||||
|
|
||||||
If you are enabling the Search feature for the first time, be advised that Frigate does not automatically index older tracked objects. You will need to enable the `reindex` feature in order to do that.
|
If you are enabling Semantic Search for the first time, be advised that Frigate does not automatically index older tracked objects. You will need to enable the `reindex` feature in order to do that.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -39,9 +39,9 @@ If you are enabling the Search feature for the first time, be advised that Friga
|
|||||||
|
|
||||||
The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||||
|
|
||||||
The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Explore page when clicking on thumbnail of a tracked object. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
||||||
|
|
||||||
Differently weighted CLIP models are available and can be selected by setting the `model_size` config option as `small` or `large`:
|
Differently weighted versions of the Jina model are available and can be selected by setting the `model_size` config option as `small` or `large`:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
semantic_search:
|
semantic_search:
|
||||||
@ -50,7 +50,7 @@ semantic_search:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable.
|
- Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable.
|
||||||
- Configuring the `small` model employs a quantized version of the model that uses less RAM and runs on CPU with a very negligible difference in embedding quality.
|
- Configuring the `small` model employs a quantized version of the Jina model that uses less RAM and runs on CPU with a very negligible difference in embedding quality.
|
||||||
|
|
||||||
### GPU Acceleration
|
### GPU Acceleration
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ If the correct build is used for your GPU and the `large` model is configured, t
|
|||||||
|
|
||||||
## Usage and Best Practices
|
## Usage and Best Practices
|
||||||
|
|
||||||
1. Semantic Search is used in conjunction with the other filters available on the Search page. Use a combination of traditional filtering and Semantic Search for the best results.
|
1. Semantic Search is used in conjunction with the other filters available on the Explore page. Use a combination of traditional filtering and Semantic Search for the best results.
|
||||||
2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object.
|
2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object.
|
||||||
3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant.
|
3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant.
|
||||||
4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day".
|
4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day".
|
||||||
|
@ -28,7 +28,7 @@ For the Dahua/Loryta 5442 camera, I use the following settings:
|
|||||||
- Encode Mode: H.264
|
- Encode Mode: H.264
|
||||||
- Resolution: 2688\*1520
|
- Resolution: 2688\*1520
|
||||||
- Frame Rate(FPS): 15
|
- Frame Rate(FPS): 15
|
||||||
- I Frame Interval: 30
|
- I Frame Interval: 30 (15 can also be used to prioritize streaming performance - see the [camera settings recommendations](../configuration/live) for more info)
|
||||||
|
|
||||||
**Sub Stream (Detection)**
|
**Sub Stream (Detection)**
|
||||||
|
|
||||||
|
@ -98,3 +98,11 @@ docker run -d \
|
|||||||
-p 8555:8555/udp \
|
-p 8555:8555/udp \
|
||||||
ghcr.io/blakeblackshear/frigate:stable
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### My RTSP stream works fine in VLC, but it does not work when I put the same URL in my Frigate config. Is this a bug?
|
||||||
|
|
||||||
|
No. Frigate uses the TCP protocol to connect to your camera's RTSP URL. VLC automatically switches between UDP and TCP depending on network conditions and stream availability. So a stream that works in VLC but not in Frigate is likely due to VLC selecting UDP as the transfer protocol.
|
||||||
|
|
||||||
|
TCP ensures that all data packets arrive in the correct order. This is crucial for video recording, decoding, and stream processing, which is why Frigate enforces a TCP connection. UDP is faster but less reliable, as it does not guarantee packet delivery or order, and VLC does not have the same requirements as Frigate.
|
||||||
|
|
||||||
|
You can still configure Frigate to use UDP by using ffmpeg input args or the preset `preset-rtsp-udp`. See the [ffmpeg presets](/configuration/ffmpeg_presets) documentation.
|
||||||
|
@ -21,13 +21,13 @@ from frigate.api.defs.query.app_query_parameters import AppTimelineHourlyQueryPa
|
|||||||
from frigate.api.defs.request.app_body import AppConfigSetBody
|
from frigate.api.defs.request.app_body import AppConfigSetBody
|
||||||
from frigate.api.defs.tags import Tags
|
from frigate.api.defs.tags import Tags
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import CONFIG_DIR
|
|
||||||
from frigate.models import Event, Timeline
|
from frigate.models import Event, Timeline
|
||||||
from frigate.util.builtin import (
|
from frigate.util.builtin import (
|
||||||
clean_camera_user_pass,
|
clean_camera_user_pass,
|
||||||
get_tz_modifiers,
|
get_tz_modifiers,
|
||||||
update_yaml_from_url,
|
update_yaml_from_url,
|
||||||
)
|
)
|
||||||
|
from frigate.util.config import find_config_file
|
||||||
from frigate.util.services import (
|
from frigate.util.services import (
|
||||||
ffprobe_stream,
|
ffprobe_stream,
|
||||||
get_nvidia_driver_info,
|
get_nvidia_driver_info,
|
||||||
@ -134,9 +134,25 @@ def config(request: Request):
|
|||||||
for zone_name, zone in config_obj.cameras[camera_name].zones.items():
|
for zone_name, zone in config_obj.cameras[camera_name].zones.items():
|
||||||
camera_dict["zones"][zone_name]["color"] = zone.color
|
camera_dict["zones"][zone_name]["color"] = zone.color
|
||||||
|
|
||||||
|
# remove go2rtc stream passwords
|
||||||
|
go2rtc: dict[str, any] = config_obj.go2rtc.model_dump(
|
||||||
|
mode="json", warnings="none", exclude_none=True
|
||||||
|
)
|
||||||
|
for stream_name, stream in go2rtc.get("streams", {}).items():
|
||||||
|
if isinstance(stream, str):
|
||||||
|
cleaned = clean_camera_user_pass(stream)
|
||||||
|
else:
|
||||||
|
cleaned = []
|
||||||
|
|
||||||
|
for item in stream:
|
||||||
|
cleaned.append(clean_camera_user_pass(item))
|
||||||
|
|
||||||
|
config["go2rtc"]["streams"][stream_name] = cleaned
|
||||||
|
|
||||||
config["plus"] = {"enabled": request.app.frigate_config.plus_api.is_active()}
|
config["plus"] = {"enabled": request.app.frigate_config.plus_api.is_active()}
|
||||||
config["model"]["colormap"] = config_obj.model.colormap
|
config["model"]["colormap"] = config_obj.model.colormap
|
||||||
|
|
||||||
|
# use merged labelamp
|
||||||
for detector_config in config["detectors"].values():
|
for detector_config in config["detectors"].values():
|
||||||
detector_config["model"]["labelmap"] = (
|
detector_config["model"]["labelmap"] = (
|
||||||
request.app.frigate_config.model.merged_labelmap
|
request.app.frigate_config.model.merged_labelmap
|
||||||
@ -147,13 +163,7 @@ def config(request: Request):
|
|||||||
|
|
||||||
@router.get("/config/raw")
|
@router.get("/config/raw")
|
||||||
def config_raw():
|
def config_raw():
|
||||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
config_file = find_config_file()
|
||||||
|
|
||||||
# Check if we can use .yaml instead of .yml
|
|
||||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
||||||
|
|
||||||
if os.path.isfile(config_file_yaml):
|
|
||||||
config_file = config_file_yaml
|
|
||||||
|
|
||||||
if not os.path.isfile(config_file):
|
if not os.path.isfile(config_file):
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
@ -198,13 +208,7 @@ def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
|
|||||||
|
|
||||||
# Save the config to file
|
# Save the config to file
|
||||||
try:
|
try:
|
||||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
config_file = find_config_file()
|
||||||
|
|
||||||
# Check if we can use .yaml instead of .yml
|
|
||||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
||||||
|
|
||||||
if os.path.isfile(config_file_yaml):
|
|
||||||
config_file = config_file_yaml
|
|
||||||
|
|
||||||
with open(config_file, "w") as f:
|
with open(config_file, "w") as f:
|
||||||
f.write(new_config)
|
f.write(new_config)
|
||||||
@ -253,13 +257,7 @@ def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
|
|||||||
|
|
||||||
@router.put("/config/set")
|
@router.put("/config/set")
|
||||||
def config_set(request: Request, body: AppConfigSetBody):
|
def config_set(request: Request, body: AppConfigSetBody):
|
||||||
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
config_file = find_config_file()
|
||||||
|
|
||||||
# Check if we can use .yaml instead of .yml
|
|
||||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
||||||
|
|
||||||
if os.path.isfile(config_file_yaml):
|
|
||||||
config_file = config_file_yaml
|
|
||||||
|
|
||||||
with open(config_file, "r") as f:
|
with open(config_file, "r") as f:
|
||||||
old_raw_config = f.read()
|
old_raw_config = f.read()
|
||||||
|
@ -329,7 +329,7 @@ def login(request: Request, body: AppPostLoginBody):
|
|||||||
try:
|
try:
|
||||||
db_user: User = User.get_by_id(user)
|
db_user: User = User.get_by_id(user)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
return JSONResponse(content={"message": "Login failed"}, status_code=401)
|
||||||
|
|
||||||
password_hash = db_user.password_hash
|
password_hash = db_user.password_hash
|
||||||
if verify_password(password, password_hash):
|
if verify_password(password, password_hash):
|
||||||
@ -340,7 +340,7 @@ def login(request: Request, body: AppPostLoginBody):
|
|||||||
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
|
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
return JSONResponse(content={"message": "Login failed"}, status_code=401)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/users")
|
@router.get("/users")
|
||||||
|
@ -3,7 +3,7 @@ from typing import Union
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from pydantic.json_schema import SkipJsonSchema
|
from pydantic.json_schema import SkipJsonSchema
|
||||||
|
|
||||||
from frigate.review.maintainer import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
|
|
||||||
|
|
||||||
class ReviewQueryParams(BaseModel):
|
class ReviewQueryParams(BaseModel):
|
||||||
|
@ -3,7 +3,7 @@ from typing import Dict
|
|||||||
|
|
||||||
from pydantic import BaseModel, Json
|
from pydantic import BaseModel, Json
|
||||||
|
|
||||||
from frigate.review.maintainer import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
|
|
||||||
|
|
||||||
class ReviewSegmentResponse(BaseModel):
|
class ReviewSegmentResponse(BaseModel):
|
||||||
|
@ -87,7 +87,11 @@ def create_fastapi_app(
|
|||||||
logger.info("FastAPI started")
|
logger.info("FastAPI started")
|
||||||
|
|
||||||
# Rate limiter (used for login endpoint)
|
# Rate limiter (used for login endpoint)
|
||||||
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit or "")
|
if frigate_config.auth.failed_login_rate_limit is None:
|
||||||
|
limiter.enabled = False
|
||||||
|
else:
|
||||||
|
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit)
|
||||||
|
|
||||||
app.state.limiter = limiter
|
app.state.limiter = limiter
|
||||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||||
app.add_middleware(SlowAPIMiddleware)
|
app.add_middleware(SlowAPIMiddleware)
|
||||||
|
@ -26,7 +26,7 @@ from frigate.api.defs.response.review_response import (
|
|||||||
)
|
)
|
||||||
from frigate.api.defs.tags import Tags
|
from frigate.api.defs.tags import Tags
|
||||||
from frigate.models import Recordings, ReviewSegment
|
from frigate.models import Recordings, ReviewSegment
|
||||||
from frigate.review.maintainer import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
from frigate.util.builtin import get_tz_modifiers
|
from frigate.util.builtin import get_tz_modifiers
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -29,6 +29,7 @@ from frigate.util.builtin import (
|
|||||||
)
|
)
|
||||||
from frigate.util.config import (
|
from frigate.util.config import (
|
||||||
StreamInfoRetriever,
|
StreamInfoRetriever,
|
||||||
|
find_config_file,
|
||||||
get_relative_coordinates,
|
get_relative_coordinates,
|
||||||
migrate_frigate_config,
|
migrate_frigate_config,
|
||||||
)
|
)
|
||||||
@ -67,7 +68,6 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
yaml = YAML()
|
yaml = YAML()
|
||||||
|
|
||||||
DEFAULT_CONFIG_FILE = "/config/config.yml"
|
|
||||||
DEFAULT_CONFIG = """
|
DEFAULT_CONFIG = """
|
||||||
mqtt:
|
mqtt:
|
||||||
enabled: False
|
enabled: False
|
||||||
@ -638,16 +638,13 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, **kwargs):
|
def load(cls, **kwargs):
|
||||||
config_path = os.environ.get("CONFIG_FILE", DEFAULT_CONFIG_FILE)
|
config_path = find_config_file()
|
||||||
|
|
||||||
if not os.path.isfile(config_path):
|
|
||||||
config_path = config_path.replace("yml", "yaml")
|
|
||||||
|
|
||||||
# No configuration file found, create one.
|
# No configuration file found, create one.
|
||||||
new_config = False
|
new_config = False
|
||||||
if not os.path.isfile(config_path):
|
if not os.path.isfile(config_path):
|
||||||
logger.info("No config file found, saving default config")
|
logger.info("No config file found, saving default config")
|
||||||
config_path = DEFAULT_CONFIG_FILE
|
config_path = config_path
|
||||||
new_config = True
|
new_config = True
|
||||||
else:
|
else:
|
||||||
# Check if the config file needs to be migrated.
|
# Check if the config file needs to be migrated.
|
||||||
|
@ -32,6 +32,7 @@ class DeepStack(DetectionApi):
|
|||||||
self.api_timeout = detector_config.api_timeout
|
self.api_timeout = detector_config.api_timeout
|
||||||
self.api_key = detector_config.api_key
|
self.api_key = detector_config.api_key
|
||||||
self.labels = detector_config.model.merged_labelmap
|
self.labels = detector_config.model.merged_labelmap
|
||||||
|
self.session = requests.Session()
|
||||||
|
|
||||||
def get_label_index(self, label_value):
|
def get_label_index(self, label_value):
|
||||||
if label_value.lower() == "truck":
|
if label_value.lower() == "truck":
|
||||||
@ -51,7 +52,7 @@ class DeepStack(DetectionApi):
|
|||||||
data = {"api_key": self.api_key}
|
data = {"api_key": self.api_key}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(
|
response = self.session.post(
|
||||||
self.api_url,
|
self.api_url,
|
||||||
data=data,
|
data=data,
|
||||||
files={"image": image_bytes},
|
files={"image": image_bytes},
|
||||||
|
@ -136,17 +136,17 @@ class Rknn(DetectionApi):
|
|||||||
def check_config(self, config):
|
def check_config(self, config):
|
||||||
if (config.model.width != 320) or (config.model.height != 320):
|
if (config.model.width != 320) or (config.model.height != 320):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Make sure to set the model width and height to 320 in your config.yml."
|
"Make sure to set the model width and height to 320 in your config."
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.model.input_pixel_format != "bgr":
|
if config.model.input_pixel_format != "bgr":
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Make sure to set the model input_pixel_format to "bgr" in your config.yml.'
|
'Make sure to set the model input_pixel_format to "bgr" in your config.'
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.model.input_tensor != "nhwc":
|
if config.model.input_tensor != "nhwc":
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Make sure to set the model input_tensor to "nhwc" in your config.yml.'
|
'Make sure to set the model input_tensor to "nhwc" in your config.'
|
||||||
)
|
)
|
||||||
|
|
||||||
def detect_raw(self, tensor_input):
|
def detect_raw(self, tensor_input):
|
||||||
|
@ -221,7 +221,10 @@ class EmbeddingMaintainer(threading.Thread):
|
|||||||
[snapshot_image]
|
[snapshot_image]
|
||||||
if event.has_snapshot and camera_config.genai.use_snapshot
|
if event.has_snapshot and camera_config.genai.use_snapshot
|
||||||
else (
|
else (
|
||||||
[thumbnail for data in self.tracked_events[event_id]]
|
[
|
||||||
|
data["thumbnail"]
|
||||||
|
for data in self.tracked_events[event_id]
|
||||||
|
]
|
||||||
if len(self.tracked_events.get(event_id, [])) > 0
|
if len(self.tracked_events.get(event_id, [])) > 0
|
||||||
else [thumbnail]
|
else [thumbnail]
|
||||||
)
|
)
|
||||||
@ -325,18 +328,25 @@ class EmbeddingMaintainer(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if event.has_snapshot and source == "snapshot":
|
if event.has_snapshot and source == "snapshot":
|
||||||
with open(
|
snapshot_file = os.path.join(CLIPS_DIR, f"{event.camera}-{event.id}.jpg")
|
||||||
os.path.join(CLIPS_DIR, f"{event.camera}-{event.id}.jpg"),
|
|
||||||
"rb",
|
if not os.path.isfile(snapshot_file):
|
||||||
) as image_file:
|
logger.error(
|
||||||
|
f"Cannot regenerate description for {event.id}, snapshot file not found: {snapshot_file}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(snapshot_file, "rb") as image_file:
|
||||||
snapshot_image = image_file.read()
|
snapshot_image = image_file.read()
|
||||||
img = cv2.imdecode(
|
img = cv2.imdecode(
|
||||||
np.frombuffer(snapshot_image, dtype=np.int8), cv2.IMREAD_COLOR
|
np.frombuffer(snapshot_image, dtype=np.int8), cv2.IMREAD_COLOR
|
||||||
)
|
)
|
||||||
|
|
||||||
# crop snapshot based on region before sending off to genai
|
# crop snapshot based on region before sending off to genai
|
||||||
|
# provide full image if region doesn't exist (manual events)
|
||||||
|
region = event.data.get("region", [0, 0, 1, 1])
|
||||||
height, width = img.shape[:2]
|
height, width = img.shape[:2]
|
||||||
x1_rel, y1_rel, width_rel, height_rel = event.data["region"]
|
x1_rel, y1_rel, width_rel, height_rel = region
|
||||||
|
|
||||||
x1, y1 = int(x1_rel * width), int(y1_rel * height)
|
x1, y1 = int(x1_rel * width), int(y1_rel * height)
|
||||||
cropped_image = img[
|
cropped_image = img[
|
||||||
@ -350,7 +360,7 @@ class EmbeddingMaintainer(threading.Thread):
|
|||||||
[snapshot_image]
|
[snapshot_image]
|
||||||
if event.has_snapshot and source == "snapshot"
|
if event.has_snapshot and source == "snapshot"
|
||||||
else (
|
else (
|
||||||
[thumbnail for data in self.tracked_events[event_id]]
|
[data["thumbnail"] for data in self.tracked_events[event_id]]
|
||||||
if len(self.tracked_events.get(event_id, [])) > 0
|
if len(self.tracked_events.get(event_id, [])) > 0
|
||||||
else [thumbnail]
|
else [thumbnail]
|
||||||
)
|
)
|
||||||
|
@ -4,7 +4,6 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
from enum import Enum
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -16,11 +15,6 @@ from frigate.models import Event, Timeline
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EventCleanupType(str, Enum):
|
|
||||||
clips = "clips"
|
|
||||||
snapshots = "snapshots"
|
|
||||||
|
|
||||||
|
|
||||||
CHUNK_SIZE = 50
|
CHUNK_SIZE = 50
|
||||||
|
|
||||||
|
|
||||||
@ -67,16 +61,8 @@ class EventCleanup(threading.Thread):
|
|||||||
|
|
||||||
return self.camera_labels[camera]["labels"]
|
return self.camera_labels[camera]["labels"]
|
||||||
|
|
||||||
def expire(self, media_type: EventCleanupType) -> list[str]:
|
def expire_snapshots(self) -> list[str]:
|
||||||
## Expire events from unlisted cameras based on the global config
|
## Expire events from unlisted cameras based on the global config
|
||||||
if media_type == EventCleanupType.clips:
|
|
||||||
expire_days = max(
|
|
||||||
self.config.record.alerts.retain.days,
|
|
||||||
self.config.record.detections.retain.days,
|
|
||||||
)
|
|
||||||
file_extension = None # mp4 clips are no longer stored in /clips
|
|
||||||
update_params = {"has_clip": False}
|
|
||||||
else:
|
|
||||||
retain_config = self.config.snapshots.retain
|
retain_config = self.config.snapshots.retain
|
||||||
file_extension = "jpg"
|
file_extension = "jpg"
|
||||||
update_params = {"has_snapshot": False}
|
update_params = {"has_snapshot": False}
|
||||||
@ -87,10 +73,7 @@ class EventCleanup(threading.Thread):
|
|||||||
# loop over object types in db
|
# loop over object types in db
|
||||||
for event in distinct_labels:
|
for event in distinct_labels:
|
||||||
# get expiration time for this label
|
# get expiration time for this label
|
||||||
if media_type == EventCleanupType.snapshots:
|
expire_days = retain_config.objects.get(event.label, retain_config.default)
|
||||||
expire_days = retain_config.objects.get(
|
|
||||||
event.label, retain_config.default
|
|
||||||
)
|
|
||||||
|
|
||||||
expire_after = (
|
expire_after = (
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
@ -162,12 +145,6 @@ class EventCleanup(threading.Thread):
|
|||||||
|
|
||||||
## Expire events from cameras based on the camera config
|
## Expire events from cameras based on the camera config
|
||||||
for name, camera in self.config.cameras.items():
|
for name, camera in self.config.cameras.items():
|
||||||
if media_type == EventCleanupType.clips:
|
|
||||||
expire_days = max(
|
|
||||||
camera.record.alerts.retain.days,
|
|
||||||
camera.record.detections.retain.days,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
retain_config = camera.snapshots.retain
|
retain_config = camera.snapshots.retain
|
||||||
|
|
||||||
# get distinct objects in database for this camera
|
# get distinct objects in database for this camera
|
||||||
@ -176,7 +153,6 @@ class EventCleanup(threading.Thread):
|
|||||||
# loop over object types in db
|
# loop over object types in db
|
||||||
for event in distinct_labels:
|
for event in distinct_labels:
|
||||||
# get expiration time for this label
|
# get expiration time for this label
|
||||||
if media_type == EventCleanupType.snapshots:
|
|
||||||
expire_days = retain_config.objects.get(
|
expire_days = retain_config.objects.get(
|
||||||
event.label, retain_config.default
|
event.label, retain_config.default
|
||||||
)
|
)
|
||||||
@ -206,7 +182,6 @@ class EventCleanup(threading.Thread):
|
|||||||
for event in expired_events:
|
for event in expired_events:
|
||||||
events_to_update.append(event.id)
|
events_to_update.append(event.id)
|
||||||
|
|
||||||
if media_type == EventCleanupType.snapshots:
|
|
||||||
try:
|
try:
|
||||||
media_name = f"{event.camera}-{event.id}"
|
media_name = f"{event.camera}-{event.id}"
|
||||||
media_path = Path(
|
media_path = Path(
|
||||||
@ -228,10 +203,136 @@ class EventCleanup(threading.Thread):
|
|||||||
|
|
||||||
return events_to_update
|
return events_to_update
|
||||||
|
|
||||||
|
def expire_clips(self) -> list[str]:
|
||||||
|
## Expire events from unlisted cameras based on the global config
|
||||||
|
expire_days = max(
|
||||||
|
self.config.record.alerts.retain.days,
|
||||||
|
self.config.record.detections.retain.days,
|
||||||
|
)
|
||||||
|
file_extension = None # mp4 clips are no longer stored in /clips
|
||||||
|
update_params = {"has_clip": False}
|
||||||
|
|
||||||
|
# get expiration time for this label
|
||||||
|
|
||||||
|
expire_after = (
|
||||||
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
|
).timestamp()
|
||||||
|
# grab all events after specific time
|
||||||
|
expired_events: list[Event] = (
|
||||||
|
Event.select(
|
||||||
|
Event.id,
|
||||||
|
Event.camera,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
Event.camera.not_in(self.camera_keys),
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.retain_indefinitely == False,
|
||||||
|
)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
logger.debug(f"{len(list(expired_events))} events can be expired")
|
||||||
|
# delete the media from disk
|
||||||
|
for expired in expired_events:
|
||||||
|
media_name = f"{expired.camera}-{expired.id}"
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
if file_extension == "jpg":
|
||||||
|
media_path = Path(
|
||||||
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||||
|
)
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
except OSError as e:
|
||||||
|
logger.warning(f"Unable to delete event images: {e}")
|
||||||
|
|
||||||
|
# update the clips attribute for the db entry
|
||||||
|
query = Event.select(Event.id).where(
|
||||||
|
Event.camera.not_in(self.camera_keys),
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.retain_indefinitely == False,
|
||||||
|
)
|
||||||
|
|
||||||
|
events_to_update = []
|
||||||
|
|
||||||
|
for event in query.iterator():
|
||||||
|
events_to_update.append(event)
|
||||||
|
|
||||||
|
if len(events_to_update) >= CHUNK_SIZE:
|
||||||
|
logger.debug(
|
||||||
|
f"Updating {update_params} for {len(events_to_update)} events"
|
||||||
|
)
|
||||||
|
Event.update(update_params).where(
|
||||||
|
Event.id << events_to_update
|
||||||
|
).execute()
|
||||||
|
events_to_update = []
|
||||||
|
|
||||||
|
# Update any remaining events
|
||||||
|
if events_to_update:
|
||||||
|
logger.debug(
|
||||||
|
f"Updating clips/snapshots attribute for {len(events_to_update)} events"
|
||||||
|
)
|
||||||
|
Event.update(update_params).where(Event.id << events_to_update).execute()
|
||||||
|
|
||||||
|
events_to_update = []
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
|
## Expire events from cameras based on the camera config
|
||||||
|
for name, camera in self.config.cameras.items():
|
||||||
|
expire_days = max(
|
||||||
|
camera.record.alerts.retain.days,
|
||||||
|
camera.record.detections.retain.days,
|
||||||
|
)
|
||||||
|
alert_expire_date = (
|
||||||
|
now - datetime.timedelta(days=camera.record.alerts.retain.days)
|
||||||
|
).timestamp()
|
||||||
|
detection_expire_date = (
|
||||||
|
now - datetime.timedelta(days=camera.record.detections.retain.days)
|
||||||
|
).timestamp()
|
||||||
|
# grab all events after specific time
|
||||||
|
expired_events = (
|
||||||
|
Event.select(
|
||||||
|
Event.id,
|
||||||
|
Event.camera,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
Event.camera == name,
|
||||||
|
Event.retain_indefinitely == False,
|
||||||
|
(
|
||||||
|
(
|
||||||
|
(Event.data["max_severity"] != "detection")
|
||||||
|
| (Event.data["max_severity"].is_null())
|
||||||
|
)
|
||||||
|
& (Event.end_time < alert_expire_date)
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
(Event.data["max_severity"] == "detection")
|
||||||
|
& (Event.end_time < detection_expire_date)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
# delete the grabbed clips from disk
|
||||||
|
# only snapshots are stored in /clips
|
||||||
|
# so no need to delete mp4 files
|
||||||
|
for event in expired_events:
|
||||||
|
events_to_update.append(event.id)
|
||||||
|
|
||||||
|
# update the clips attribute for the db entry
|
||||||
|
for i in range(0, len(events_to_update), CHUNK_SIZE):
|
||||||
|
batch = events_to_update[i : i + CHUNK_SIZE]
|
||||||
|
logger.debug(f"Updating {update_params} for {len(batch)} events")
|
||||||
|
Event.update(update_params).where(Event.id << batch).execute()
|
||||||
|
|
||||||
|
return events_to_update
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
# only expire events every 5 minutes
|
# only expire events every 5 minutes
|
||||||
while not self.stop_event.wait(300):
|
while not self.stop_event.wait(300):
|
||||||
events_with_expired_clips = self.expire(EventCleanupType.clips)
|
events_with_expired_clips = self.expire_clips()
|
||||||
|
|
||||||
# delete timeline entries for events that have expired recordings
|
# delete timeline entries for events that have expired recordings
|
||||||
# delete up to 100,000 at a time
|
# delete up to 100,000 at a time
|
||||||
@ -242,7 +343,7 @@ class EventCleanup(threading.Thread):
|
|||||||
Timeline.source_id << deleted_events_list[i : i + max_deletes]
|
Timeline.source_id << deleted_events_list[i : i + max_deletes]
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
self.expire(EventCleanupType.snapshots)
|
self.expire_snapshots()
|
||||||
|
|
||||||
# drop events from db where has_clip and has_snapshot are false
|
# drop events from db where has_clip and has_snapshot are false
|
||||||
events = (
|
events = (
|
||||||
|
@ -82,18 +82,23 @@ class EventProcessor(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if source_type == EventTypeEnum.tracked_object:
|
if source_type == EventTypeEnum.tracked_object:
|
||||||
|
id = event_data["id"]
|
||||||
self.timeline_queue.put(
|
self.timeline_queue.put(
|
||||||
(
|
(
|
||||||
camera,
|
camera,
|
||||||
source_type,
|
source_type,
|
||||||
event_type,
|
event_type,
|
||||||
self.events_in_process.get(event_data["id"]),
|
self.events_in_process.get(id),
|
||||||
event_data,
|
event_data,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if event_type == EventStateEnum.start:
|
# if this is the first message, just store it and continue, its not time to insert it in the db
|
||||||
self.events_in_process[event_data["id"]] = event_data
|
if (
|
||||||
|
event_type == EventStateEnum.start
|
||||||
|
or id not in self.events_in_process
|
||||||
|
):
|
||||||
|
self.events_in_process[id] = event_data
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.handle_object_detection(event_type, camera, event_data)
|
self.handle_object_detection(event_type, camera, event_data)
|
||||||
@ -123,10 +128,6 @@ class EventProcessor(threading.Thread):
|
|||||||
"""handle tracked object event updates."""
|
"""handle tracked object event updates."""
|
||||||
updated_db = False
|
updated_db = False
|
||||||
|
|
||||||
# if this is the first message, just store it and continue, its not time to insert it in the db
|
|
||||||
if event_type == EventStateEnum.start:
|
|
||||||
self.events_in_process[event_data["id"]] = event_data
|
|
||||||
|
|
||||||
if should_update_db(self.events_in_process[event_data["id"]], event_data):
|
if should_update_db(self.events_in_process[event_data["id"]], event_data):
|
||||||
updated_db = True
|
updated_db = True
|
||||||
camera_config = self.config.cameras[camera]
|
camera_config = self.config.cameras[camera]
|
||||||
@ -210,6 +211,7 @@ class EventProcessor(threading.Thread):
|
|||||||
"top_score": event_data["top_score"],
|
"top_score": event_data["top_score"],
|
||||||
"attributes": attributes,
|
"attributes": attributes,
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"max_severity": event_data.get("max_severity"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,6 +38,11 @@ class OllamaClient(GenAIClient):
|
|||||||
|
|
||||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||||
"""Submit a request to Ollama"""
|
"""Submit a request to Ollama"""
|
||||||
|
if self.provider is None:
|
||||||
|
logger.warning(
|
||||||
|
"Ollama provider has not been initialized, a description will not be generated. Check your Ollama configuration."
|
||||||
|
)
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
result = self.provider.generate(
|
result = self.provider.generate(
|
||||||
self.genai_config.model,
|
self.genai_config.model,
|
||||||
|
@ -702,30 +702,7 @@ class TrackedObjectProcessor(threading.Thread):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# If the object is not considered an alert or detection
|
# If the object is not considered an alert or detection
|
||||||
review_config = self.config.cameras[camera].review
|
if obj.max_severity is None:
|
||||||
if not (
|
|
||||||
(
|
|
||||||
obj.obj_data["label"] in review_config.alerts.labels
|
|
||||||
and (
|
|
||||||
not review_config.alerts.required_zones
|
|
||||||
or set(obj.entered_zones) & set(review_config.alerts.required_zones)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
(
|
|
||||||
not review_config.detections.labels
|
|
||||||
or obj.obj_data["label"] in review_config.detections.labels
|
|
||||||
)
|
|
||||||
and (
|
|
||||||
not review_config.detections.required_zones
|
|
||||||
or set(obj.entered_zones)
|
|
||||||
& set(review_config.detections.required_zones)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
):
|
|
||||||
logger.debug(
|
|
||||||
f"Not creating clip for {obj.obj_data['id']} because it did not qualify as an alert or detection"
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
@ -29,11 +28,11 @@ from frigate.const import (
|
|||||||
AUTOTRACKING_ZOOM_EDGE_THRESHOLD,
|
AUTOTRACKING_ZOOM_EDGE_THRESHOLD,
|
||||||
AUTOTRACKING_ZOOM_IN_HYSTERESIS,
|
AUTOTRACKING_ZOOM_IN_HYSTERESIS,
|
||||||
AUTOTRACKING_ZOOM_OUT_HYSTERESIS,
|
AUTOTRACKING_ZOOM_OUT_HYSTERESIS,
|
||||||
CONFIG_DIR,
|
|
||||||
)
|
)
|
||||||
from frigate.ptz.onvif import OnvifController
|
from frigate.ptz.onvif import OnvifController
|
||||||
from frigate.track.tracked_object import TrackedObject
|
from frigate.track.tracked_object import TrackedObject
|
||||||
from frigate.util.builtin import update_yaml_file
|
from frigate.util.builtin import update_yaml_file
|
||||||
|
from frigate.util.config import find_config_file
|
||||||
from frigate.util.image import SharedMemoryFrameManager, intersection_over_union
|
from frigate.util.image import SharedMemoryFrameManager, intersection_over_union
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -328,13 +327,7 @@ class PtzAutoTracker:
|
|||||||
self.autotracker_init[camera] = True
|
self.autotracker_init[camera] = True
|
||||||
|
|
||||||
def _write_config(self, camera):
|
def _write_config(self, camera):
|
||||||
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
config_file = find_config_file()
|
||||||
|
|
||||||
# Check if we can use .yaml instead of .yml
|
|
||||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
||||||
|
|
||||||
if os.path.isfile(config_file_yaml):
|
|
||||||
config_file = config_file_yaml
|
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"{camera}: Writing new config with autotracker motion coefficients: {self.config.cameras[camera].onvif.autotracking.movement_weights}"
|
f"{camera}: Writing new config with autotracker motion coefficients: {self.config.cameras[camera].onvif.autotracking.movement_weights}"
|
||||||
|
@ -7,7 +7,6 @@ import random
|
|||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from enum import Enum
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
@ -27,6 +26,7 @@ from frigate.const import (
|
|||||||
from frigate.events.external import ManualEventState
|
from frigate.events.external import ManualEventState
|
||||||
from frigate.models import ReviewSegment
|
from frigate.models import ReviewSegment
|
||||||
from frigate.object_processing import TrackedObject
|
from frigate.object_processing import TrackedObject
|
||||||
|
from frigate.review.types import SeverityEnum
|
||||||
from frigate.util.image import SharedMemoryFrameManager, calculate_16_9_crop
|
from frigate.util.image import SharedMemoryFrameManager, calculate_16_9_crop
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -39,11 +39,6 @@ THRESHOLD_ALERT_ACTIVITY = 120
|
|||||||
THRESHOLD_DETECTION_ACTIVITY = 30
|
THRESHOLD_DETECTION_ACTIVITY = 30
|
||||||
|
|
||||||
|
|
||||||
class SeverityEnum(str, Enum):
|
|
||||||
alert = "alert"
|
|
||||||
detection = "detection"
|
|
||||||
|
|
||||||
|
|
||||||
class PendingReviewSegment:
|
class PendingReviewSegment:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
6
frigate/review/types.py
Normal file
6
frigate/review/types.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class SeverityEnum(str, Enum):
|
||||||
|
alert = "alert"
|
||||||
|
detection = "detection"
|
@ -17,6 +17,8 @@ bandwidth_equation = Recordings.segment_size / (
|
|||||||
Recordings.end_time - Recordings.start_time
|
Recordings.end_time - Recordings.start_time
|
||||||
)
|
)
|
||||||
|
|
||||||
|
MAX_CALCULATED_BANDWIDTH = 10000 # 10Gb/hr
|
||||||
|
|
||||||
|
|
||||||
class StorageMaintainer(threading.Thread):
|
class StorageMaintainer(threading.Thread):
|
||||||
"""Maintain frigates recording storage."""
|
"""Maintain frigates recording storage."""
|
||||||
@ -52,6 +54,12 @@ class StorageMaintainer(threading.Thread):
|
|||||||
* 3600,
|
* 3600,
|
||||||
2,
|
2,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if bandwidth > MAX_CALCULATED_BANDWIDTH:
|
||||||
|
logger.warning(
|
||||||
|
f"{camera} has a bandwidth of {bandwidth} MB/hr which exceeds the expected maximum. This typically indicates an issue with the cameras recordings."
|
||||||
|
)
|
||||||
|
bandwidth = MAX_CALCULATED_BANDWIDTH
|
||||||
except TypeError:
|
except TypeError:
|
||||||
bandwidth = 0
|
bandwidth = 0
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ from playhouse.sqliteq import SqliteQueueDatabase
|
|||||||
from frigate.api.fastapi_app import create_fastapi_app
|
from frigate.api.fastapi_app import create_fastapi_app
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.models import Event, Recordings, ReviewSegment
|
from frigate.models import Event, Recordings, ReviewSegment
|
||||||
from frigate.review.maintainer import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ from datetime import datetime, timedelta
|
|||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
from frigate.models import Event, Recordings, ReviewSegment
|
from frigate.models import Event, Recordings, ReviewSegment
|
||||||
from frigate.review.maintainer import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
from frigate.test.http_api.base_http_test import BaseTestHttp
|
from frigate.test.http_api.base_http_test import BaseTestHttp
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ from frigate.config import (
|
|||||||
CameraConfig,
|
CameraConfig,
|
||||||
ModelConfig,
|
ModelConfig,
|
||||||
)
|
)
|
||||||
|
from frigate.review.types import SeverityEnum
|
||||||
from frigate.util.image import (
|
from frigate.util.image import (
|
||||||
area,
|
area,
|
||||||
calculate_region,
|
calculate_region,
|
||||||
@ -59,6 +60,27 @@ class TrackedObject:
|
|||||||
self.pending_loitering = False
|
self.pending_loitering = False
|
||||||
self.previous = self.to_dict()
|
self.previous = self.to_dict()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_severity(self) -> Optional[str]:
|
||||||
|
review_config = self.camera_config.review
|
||||||
|
|
||||||
|
if self.obj_data["label"] in review_config.alerts.labels and (
|
||||||
|
not review_config.alerts.required_zones
|
||||||
|
or set(self.entered_zones) & set(review_config.alerts.required_zones)
|
||||||
|
):
|
||||||
|
return SeverityEnum.alert
|
||||||
|
|
||||||
|
if (
|
||||||
|
not review_config.detections.labels
|
||||||
|
or self.obj_data["label"] in review_config.detections.labels
|
||||||
|
) and (
|
||||||
|
not review_config.detections.required_zones
|
||||||
|
or set(self.entered_zones) & set(review_config.detections.required_zones)
|
||||||
|
):
|
||||||
|
return SeverityEnum.detection
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
def _is_false_positive(self):
|
def _is_false_positive(self):
|
||||||
# once a true positive, always a true positive
|
# once a true positive, always a true positive
|
||||||
if not self.false_positive:
|
if not self.false_positive:
|
||||||
@ -232,6 +254,7 @@ class TrackedObject:
|
|||||||
"attributes": self.attributes,
|
"attributes": self.attributes,
|
||||||
"current_attributes": self.obj_data["attributes"],
|
"current_attributes": self.obj_data["attributes"],
|
||||||
"pending_loitering": self.pending_loitering,
|
"pending_loitering": self.pending_loitering,
|
||||||
|
"max_severity": self.max_severity,
|
||||||
}
|
}
|
||||||
|
|
||||||
if include_thumbnail:
|
if include_thumbnail:
|
||||||
|
@ -14,6 +14,16 @@ from frigate.util.services import get_video_properties
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
CURRENT_CONFIG_VERSION = "0.15-0"
|
CURRENT_CONFIG_VERSION = "0.15-0"
|
||||||
|
DEFAULT_CONFIG_FILE = "/config/config.yml"
|
||||||
|
|
||||||
|
|
||||||
|
def find_config_file() -> str:
|
||||||
|
config_path = os.environ.get("CONFIG_FILE", DEFAULT_CONFIG_FILE)
|
||||||
|
|
||||||
|
if not os.path.isfile(config_path):
|
||||||
|
config_path = config_path.replace("yml", "yaml")
|
||||||
|
|
||||||
|
return config_path
|
||||||
|
|
||||||
|
|
||||||
def migrate_frigate_config(config_file: str):
|
def migrate_frigate_config(config_file: str):
|
||||||
|
@ -29,8 +29,11 @@ export function ApiProvider({ children, options }: ApiProviderType) {
|
|||||||
error.response &&
|
error.response &&
|
||||||
[401, 302, 307].includes(error.response.status)
|
[401, 302, 307].includes(error.response.status)
|
||||||
) {
|
) {
|
||||||
window.location.href =
|
// redirect to the login page if not already there
|
||||||
error.response.headers.get("location") ?? "login";
|
const loginPage = error.response.headers.get("location") ?? "login";
|
||||||
|
if (window.location.href !== loginPage) {
|
||||||
|
window.location.href = loginPage;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
...options,
|
...options,
|
||||||
|
@ -63,7 +63,7 @@ export function UserAuthForm({ className, ...props }: UserAuthFormProps) {
|
|||||||
toast.error("Exceeded rate limit. Try again later.", {
|
toast.error("Exceeded rate limit. Try again later.", {
|
||||||
position: "top-center",
|
position: "top-center",
|
||||||
});
|
});
|
||||||
} else if (err.response?.status === 400) {
|
} else if (err.response?.status === 401) {
|
||||||
toast.error("Login failed", {
|
toast.error("Login failed", {
|
||||||
position: "top-center",
|
position: "top-center",
|
||||||
});
|
});
|
||||||
|
@ -74,6 +74,23 @@ export default function ReviewDetailDialog({
|
|||||||
return events.length != review?.data.detections.length;
|
return events.length != review?.data.detections.length;
|
||||||
}, [review, events]);
|
}, [review, events]);
|
||||||
|
|
||||||
|
const missingObjects = useMemo(() => {
|
||||||
|
if (!review || !events) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const detectedIds = review.data.detections;
|
||||||
|
const missing = Array.from(
|
||||||
|
new Set(
|
||||||
|
events
|
||||||
|
.filter((event) => !detectedIds.includes(event.id))
|
||||||
|
.map((event) => event.label),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
return missing;
|
||||||
|
}, [review, events]);
|
||||||
|
|
||||||
const formattedDate = useFormattedTimestamp(
|
const formattedDate = useFormattedTimestamp(
|
||||||
review?.start_time ?? 0,
|
review?.start_time ?? 0,
|
||||||
config?.ui.time_format == "24hour"
|
config?.ui.time_format == "24hour"
|
||||||
@ -263,8 +280,25 @@ export default function ReviewDetailDialog({
|
|||||||
</div>
|
</div>
|
||||||
{hasMismatch && (
|
{hasMismatch && (
|
||||||
<div className="p-4 text-center text-sm">
|
<div className="p-4 text-center text-sm">
|
||||||
Some objects that were detected are not included in this list
|
{(() => {
|
||||||
because the object does not have a snapshot
|
const detectedCount = Math.abs(
|
||||||
|
(events?.length ?? 0) -
|
||||||
|
(review?.data.detections.length ?? 0),
|
||||||
|
);
|
||||||
|
const objectLabel =
|
||||||
|
detectedCount === 1 ? "object was" : "objects were";
|
||||||
|
|
||||||
|
return `${detectedCount} unavailable ${objectLabel} detected and included in this review item.`;
|
||||||
|
})()}{" "}
|
||||||
|
Those objects either did not qualify as an alert or detection
|
||||||
|
or have already been cleaned up/deleted.
|
||||||
|
{missingObjects.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
Adjust your configuration if you want Frigate to save
|
||||||
|
tracked objects for the following labels:{" "}
|
||||||
|
{missingObjects.join(", ")}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<div className="relative flex size-full flex-col gap-2">
|
<div className="relative flex size-full flex-col gap-2">
|
||||||
|
@ -469,6 +469,30 @@ function ObjectDetailsTab({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col gap-1.5">
|
<div className="flex flex-col gap-1.5">
|
||||||
|
{config?.cameras[search.camera].genai.enabled &&
|
||||||
|
!search.end_time &&
|
||||||
|
(config.cameras[search.camera].genai.required_zones.length === 0 ||
|
||||||
|
search.zones.some((zone) =>
|
||||||
|
config.cameras[search.camera].genai.required_zones.includes(zone),
|
||||||
|
)) &&
|
||||||
|
(config.cameras[search.camera].genai.objects.length === 0 ||
|
||||||
|
config.cameras[search.camera].genai.objects.includes(
|
||||||
|
search.label,
|
||||||
|
)) ? (
|
||||||
|
<>
|
||||||
|
<div className="text-sm text-primary/40">Description</div>
|
||||||
|
<div className="flex h-64 flex-col items-center justify-center gap-3 border p-4 text-sm text-primary/40">
|
||||||
|
<div className="flex">
|
||||||
|
<ActivityIndicator />
|
||||||
|
</div>
|
||||||
|
<div className="flex">
|
||||||
|
Frigate will not request a description from your Generative AI
|
||||||
|
provider until the tracked object's lifecycle has ended.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
<div className="text-sm text-primary/40">Description</div>
|
<div className="text-sm text-primary/40">Description</div>
|
||||||
<Textarea
|
<Textarea
|
||||||
className="h-64"
|
className="h-64"
|
||||||
@ -476,9 +500,13 @@ function ObjectDetailsTab({
|
|||||||
value={desc}
|
value={desc}
|
||||||
onChange={(e) => setDesc(e.target.value)}
|
onChange={(e) => setDesc(e.target.value)}
|
||||||
/>
|
/>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className="flex w-full flex-row justify-end gap-2">
|
<div className="flex w-full flex-row justify-end gap-2">
|
||||||
{config?.cameras[search.camera].genai.enabled && (
|
{config?.cameras[search.camera].genai.enabled && search.end_time && (
|
||||||
<div className="flex items-center">
|
<>
|
||||||
|
<div className="flex items-start">
|
||||||
<Button
|
<Button
|
||||||
className="rounded-r-none border-r-0"
|
className="rounded-r-none border-r-0"
|
||||||
aria-label="Regenerate tracked object description"
|
aria-label="Regenerate tracked object description"
|
||||||
@ -515,7 +543,7 @@ function ObjectDetailsTab({
|
|||||||
</DropdownMenu>
|
</DropdownMenu>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
<Button
|
<Button
|
||||||
variant="select"
|
variant="select"
|
||||||
aria-label="Save"
|
aria-label="Save"
|
||||||
@ -523,6 +551,8 @@ function ObjectDetailsTab({
|
|||||||
>
|
>
|
||||||
Save
|
Save
|
||||||
</Button>
|
</Button>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -5,6 +5,7 @@ import { usePersistence } from "./use-persistence";
|
|||||||
export function useOverlayState<S>(
|
export function useOverlayState<S>(
|
||||||
key: string,
|
key: string,
|
||||||
defaultValue: S | undefined = undefined,
|
defaultValue: S | undefined = undefined,
|
||||||
|
preserveSearch: boolean = true,
|
||||||
): [S | undefined, (value: S, replace?: boolean) => void] {
|
): [S | undefined, (value: S, replace?: boolean) => void] {
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
@ -15,7 +16,7 @@ export function useOverlayState<S>(
|
|||||||
(value: S, replace: boolean = false) => {
|
(value: S, replace: boolean = false) => {
|
||||||
const newLocationState = { ...currentLocationState };
|
const newLocationState = { ...currentLocationState };
|
||||||
newLocationState[key] = value;
|
newLocationState[key] = value;
|
||||||
navigate(location.pathname + location.search, {
|
navigate(location.pathname + (preserveSearch ? location.search : ""), {
|
||||||
state: newLocationState,
|
state: newLocationState,
|
||||||
replace,
|
replace,
|
||||||
});
|
});
|
||||||
|
@ -39,8 +39,11 @@ export default function Events() {
|
|||||||
|
|
||||||
const [showReviewed, setShowReviewed] = usePersistence("showReviewed", false);
|
const [showReviewed, setShowReviewed] = usePersistence("showReviewed", false);
|
||||||
|
|
||||||
const [recording, setRecording] =
|
const [recording, setRecording] = useOverlayState<RecordingStartingPoint>(
|
||||||
useOverlayState<RecordingStartingPoint>("recording");
|
"recording",
|
||||||
|
undefined,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
useSearchEffect("id", (reviewId: string) => {
|
useSearchEffect("id", (reviewId: string) => {
|
||||||
axios
|
axios
|
||||||
|
Loading…
Reference in New Issue
Block a user