mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-08-18 13:47:20 +02:00
Compare commits
16 Commits
v0.16.0-rc
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
9ed7ccab75 | ||
|
ceced7cc91 | ||
|
1db26cb41e | ||
|
6840415b6c | ||
|
06539c925c | ||
|
addb4e6891 | ||
|
fb290c411b | ||
|
89db960c05 | ||
|
2cde58037d | ||
|
d1be614a10 | ||
|
93c7c8c518 | ||
|
c83a35d090 | ||
|
d31a4e3443 | ||
|
334b6670e1 | ||
|
b5067c07f8 | ||
|
21e9b2f2ce |
2
Makefile
2
Makefile
@ -1,7 +1,7 @@
|
||||
default_target: local
|
||||
|
||||
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
||||
VERSION = 0.16.0
|
||||
VERSION = 0.16.1
|
||||
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
|
||||
GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
BOARDS= #Initialized empty
|
||||
|
@ -152,7 +152,7 @@ ARG TARGETARCH
|
||||
# Use a separate container to build wheels to prevent build dependencies in final image
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y \
|
||||
apt-transport-https wget \
|
||||
apt-transport-https wget unzip \
|
||||
&& apt-get -qq update \
|
||||
&& apt-get -qq install -y \
|
||||
python3.11 \
|
||||
|
@ -2,18 +2,25 @@
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE3_VERSION="96c92aba00c8375bc32fafcdf12429c58bd8aabfcadab6683e35bbb9cdebf19e" # 3.46.0
|
||||
SQLITE3_VERSION="3.46.1"
|
||||
PYSQLITE3_VERSION="0.5.3"
|
||||
|
||||
# Fetch the source code for the latest release of Sqlite.
|
||||
# Fetch the pre-built sqlite amalgamation instead of building from source
|
||||
if [[ ! -d "sqlite" ]]; then
|
||||
wget https://www.sqlite.org/src/tarball/sqlite.tar.gz?r=${SQLITE3_VERSION} -O sqlite.tar.gz
|
||||
tar xzf sqlite.tar.gz
|
||||
cd sqlite/
|
||||
LIBS="-lm" ./configure --disable-tcl --enable-tempstore=always
|
||||
make sqlite3.c
|
||||
mkdir sqlite
|
||||
cd sqlite
|
||||
|
||||
# Download the pre-built amalgamation from sqlite.org
|
||||
# For SQLite 3.46.1, the amalgamation version is 3460100
|
||||
SQLITE_AMALGAMATION_VERSION="3460100"
|
||||
|
||||
wget https://www.sqlite.org/2024/sqlite-amalgamation-${SQLITE_AMALGAMATION_VERSION}.zip -O sqlite-amalgamation.zip
|
||||
unzip sqlite-amalgamation.zip
|
||||
mv sqlite-amalgamation-${SQLITE_AMALGAMATION_VERSION}/* .
|
||||
rmdir sqlite-amalgamation-${SQLITE_AMALGAMATION_VERSION}
|
||||
rm sqlite-amalgamation.zip
|
||||
|
||||
cd ../
|
||||
rm sqlite.tar.gz
|
||||
fi
|
||||
|
||||
# Grab the pysqlite3 source code.
|
||||
|
@ -57,9 +57,17 @@ fi
|
||||
|
||||
# arch specific packages
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
# Install non-free version of i965 driver
|
||||
CODENAME=$(grep VERSION_CODENAME= /etc/os-release | cut -d= -f2) \
|
||||
&& sed -i -E "s/^(deb http:\/\/deb\.debian\.org\/debian ${CODENAME} main)(.*)$/\1 contrib non-free non-free-firmware\2/" /etc/apt/sources.list \
|
||||
&& apt-get -qq update \
|
||||
&& apt-get install --no-install-recommends --no-install-suggests -y i965-va-driver-shaders \
|
||||
&& sed -i -E "s/(deb http:\/\/deb\.debian\.org\/debian ${CODENAME} main) contrib non-free non-free-firmware/\1/" /etc/apt/sources.list \
|
||||
&& apt-get update
|
||||
|
||||
# install amd / intel-i965 driver packages
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver intel-gpu-tools onevpl-tools \
|
||||
intel-gpu-tools onevpl-tools \
|
||||
libva-drm2 \
|
||||
mesa-va-drivers radeontop
|
||||
|
||||
|
@ -99,6 +99,12 @@ genai:
|
||||
model: gemini-1.5-flash
|
||||
```
|
||||
|
||||
:::note
|
||||
|
||||
To use a different Gemini-compatible API endpoint, set the `GEMINI_BASE_URL` environment variable to your provider's API URL.
|
||||
|
||||
:::
|
||||
|
||||
## OpenAI
|
||||
|
||||
OpenAI does not have a free tier for their API. With the release of gpt-4o, pricing has been reduced and each generation should cost fractions of a cent if you choose to go this route.
|
||||
|
@ -438,7 +438,7 @@ record:
|
||||
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
|
||||
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
|
||||
expire_interval: 60
|
||||
# Optional: Sync recordings with disk on startup and once a day (default: shown below).
|
||||
# Optional: Two-way sync recordings database with disk on startup and once a day (default: shown below).
|
||||
sync_recordings: False
|
||||
# Optional: Retention settings for recording
|
||||
retain:
|
||||
|
74
docs/docs/frigate/planning_setup.md
Normal file
74
docs/docs/frigate/planning_setup.md
Normal file
@ -0,0 +1,74 @@
|
||||
---
|
||||
id: planning_setup
|
||||
title: Planning a New Installation
|
||||
---
|
||||
|
||||
Choosing the right hardware for your Frigate NVR setup is important for optimal performance and a smooth experience. This guide will walk you through the key considerations, focusing on the number of cameras and the hardware required for efficient object detection.
|
||||
|
||||
## Key Considerations
|
||||
|
||||
### Number of Cameras and Simultaneous Activity
|
||||
|
||||
The most fundamental factor in your hardware decision is the number of cameras you plan to use. However, it's not just about the raw count; it's also about how many of those cameras are likely to see activity and require object detection simultaneously.
|
||||
|
||||
When motion is detected in a camera's feed, regions of that frame are sent to your chosen [object detection hardware](/configuration/object_detectors).
|
||||
|
||||
- **Low Simultaneous Activity (1-6 cameras with occasional motion)**: If you have a few cameras in areas with infrequent activity (e.g., a seldom-used backyard, a quiet interior), the demand on your object detection hardware will be lower. A single, entry-level AI accelerator will suffice.
|
||||
- **Moderate Simultaneous Activity (6-12 cameras with some overlapping motion)**: For setups with more cameras, especially in areas like a busy street or a property with multiple access points, it's more likely that several cameras will capture activity at the same time. This increases the load on your object detection hardware, requiring more processing power.
|
||||
- **High Simultaneous Activity (12+ cameras or highly active zones)**: Large installations or scenarios where many cameras frequently capture activity (e.g., busy street with overview, identification, dedicated LPR cameras, etc.) will necessitate robust object detection capabilities. You'll likely need multiple entry-level AI accelerators or a more powerful single unit such as a discrete GPU.
|
||||
- **Commercial Installations (40+ cameras)**: Commercial installations or scenarios where a substantial number of cameras capture activity (e.g., a commercial property, an active public space) will necessitate robust object detection capabilities. You'll likely need a modern discrete GPU.
|
||||
|
||||
### Video Decoding
|
||||
|
||||
Modern CPUs with integrated GPUs (Intel Quick Sync, AMD VCN) or dedicated GPUs can significantly offload video decoding from the main CPU, freeing up resources. This is highly recommended, especially for multiple cameras.
|
||||
|
||||
:::tip
|
||||
|
||||
For commercial installations it is important to verify the number of supported concurrent streams on your GPU, many consumer GPUs max out at ~20 concurrent camera streams.
|
||||
|
||||
:::
|
||||
|
||||
## Hardware Considerations
|
||||
|
||||
### Object Detection
|
||||
|
||||
There are many different hardware options for object detection depending on priorities and available hardware. See [the recommended hardware page](./hardware.md#detectors) for more specifics on what hardware is recommended for object detection.
|
||||
|
||||
### Storage
|
||||
|
||||
Storage is an important consideration when planning a new installation. To get a more precise estimate of your storage requirements, you can use an IP camera storage calculator. Websites like [IPConfigure Storage Calculator](https://calculator.ipconfigure.com/) can help you determine the necessary disk space based on your camera settings.
|
||||
|
||||
|
||||
#### SSDs (Solid State Drives)
|
||||
|
||||
SSDs are an excellent choice for Frigate, offering high speed and responsiveness. The older concern that SSDs would quickly "wear out" from constant video recording is largely no longer valid for modern consumer and enterprise-grade SSDs.
|
||||
|
||||
- Longevity: Modern SSDs are designed with advanced wear-leveling algorithms and significantly higher "Terabytes Written" (TBW) ratings than earlier models. For typical home NVR use, a good quality SSD will likely outlast the useful life of your NVR hardware itself.
|
||||
- Performance: SSDs excel at handling the numerous small write operations that occur during continuous video recording and can significantly improve the responsiveness of the Frigate UI and clip retrieval.
|
||||
- Silence and Efficiency: SSDs produce no noise and consume less power than traditional HDDs.
|
||||
|
||||
#### HDDs (Hard Disk Drives)
|
||||
|
||||
Traditional Hard Disk Drives (HDDs) remain a great and often more cost-effective option for long-term video storage, especially for larger setups where raw capacity is prioritized.
|
||||
|
||||
- Cost-Effectiveness: HDDs offer the best cost per gigabyte, making them ideal for storing many days, weeks, or months of continuous footage.
|
||||
- Capacity: HDDs are available in much larger capacities than most consumer SSDs, which is beneficial for extensive video archives.
|
||||
- NVR-Rated Drives: If choosing an HDD, consider drives specifically designed for surveillance (NVR) use, such as Western Digital Purple or Seagate SkyHawk. These drives are engineered for 24/7 operation and continuous write workloads, offering improved reliability compared to standard desktop drives.
|
||||
|
||||
Determining Your Storage Needs
|
||||
The amount of storage you need will depend on several factors:
|
||||
|
||||
- Number of Cameras: More cameras naturally require more space.
|
||||
- Resolution and Framerate: Higher resolution (e.g., 4K) and higher framerate (e.g., 30fps) streams consume significantly more storage.
|
||||
- Recording Method: Continuous recording uses the most space. motion-only recording or object-triggered recording can save space, but may miss some footage.
|
||||
- Retention Period: How many days, weeks, or months of footage do you want to keep?
|
||||
|
||||
#### Network Storage (NFS/SMB)
|
||||
|
||||
While supported, using network-attached storage (NAS) for recordings can introduce latency and network dependency considerations. For optimal performance and reliability, it is generally recommended to have local storage for your Frigate recordings. If using a NAS, ensure your network connection to it is robust and fast (Gigabit Ethernet at minimum) and that the NAS itself can handle the continuous write load.
|
||||
|
||||
### RAM (Memory)
|
||||
|
||||
- **Basic Minimum: 4GB RAM**: This is generally sufficient for a very basic Frigate setup with a few cameras and a dedicated object detection accelerator, without running any enrichments. Performance might be tight, especially with higher resolution streams or numerous detections.
|
||||
- **Minimum for Enrichments: 8GB RAM**: If you plan to utilize Frigate's enrichment features (e.g., facial recognition, license plate recognition, or other AI models that run alongside standard object detection), 8GB of RAM should be considered the minimum. Enrichments require additional memory to load and process their respective models and data.
|
||||
- **Recommended: 16GB RAM**: For most users, especially those with many cameras (8+) or who plan to heavily leverage enrichments, 16GB of RAM is highly recommended. This provides ample headroom for smooth operation, reduces the likelihood of swapping to disk (which can impact performance), and allows for future expansion.
|
@ -5,7 +5,7 @@ title: Updating
|
||||
|
||||
# Updating Frigate
|
||||
|
||||
The current stable version of Frigate is **0.15.0**. The release notes and any breaking changes for this version can be found on the [Frigate GitHub releases page](https://github.com/blakeblackshear/frigate/releases/tag/v0.15.0).
|
||||
The current stable version of Frigate is **0.16.0**. The release notes and any breaking changes for this version can be found on the [Frigate GitHub releases page](https://github.com/blakeblackshear/frigate/releases/tag/v0.16.0).
|
||||
|
||||
Keeping Frigate up to date ensures you benefit from the latest features, performance improvements, and bug fixes. The update process varies slightly depending on your installation method (Docker, Home Assistant Addon, etc.). Below are instructions for the most common setups.
|
||||
|
||||
@ -33,21 +33,21 @@ If you’re running Frigate via Docker (recommended method), follow these steps:
|
||||
2. **Update and Pull the Latest Image**:
|
||||
|
||||
- If using Docker Compose:
|
||||
- Edit your `docker-compose.yml` file to specify the desired version tag (e.g., `0.15.0` instead of `0.14.1`). For example:
|
||||
- Edit your `docker-compose.yml` file to specify the desired version tag (e.g., `0.16.0` instead of `0.15.2`). For example:
|
||||
```yaml
|
||||
services:
|
||||
frigate:
|
||||
image: ghcr.io/blakeblackshear/frigate:0.15.0
|
||||
image: ghcr.io/blakeblackshear/frigate:0.16.0
|
||||
```
|
||||
- Then pull the image:
|
||||
```bash
|
||||
docker pull ghcr.io/blakeblackshear/frigate:0.15.0
|
||||
docker pull ghcr.io/blakeblackshear/frigate:0.16.0
|
||||
```
|
||||
- **Note for `stable` Tag Users**: If your `docker-compose.yml` uses the `stable` tag (e.g., `ghcr.io/blakeblackshear/frigate:stable`), you don’t need to update the tag manually. The `stable` tag always points to the latest stable release after pulling.
|
||||
- If using `docker run`:
|
||||
- Pull the image with the appropriate tag (e.g., `0.15.0`, `0.15.0-tensorrt`, or `stable`):
|
||||
- Pull the image with the appropriate tag (e.g., `0.16.0`, `0.16.0-tensorrt`, or `stable`):
|
||||
```bash
|
||||
docker pull ghcr.io/blakeblackshear/frigate:0.15.0
|
||||
docker pull ghcr.io/blakeblackshear/frigate:0.16.0
|
||||
```
|
||||
|
||||
3. **Start the Container**:
|
||||
@ -105,8 +105,8 @@ If an update causes issues:
|
||||
1. Stop Frigate.
|
||||
2. Restore your backed-up config file and database.
|
||||
3. Revert to the previous image version:
|
||||
- For Docker: Specify an older tag (e.g., `ghcr.io/blakeblackshear/frigate:0.14.1`) in your `docker run` command.
|
||||
- For Docker Compose: Edit your `docker-compose.yml`, specify the older version tag (e.g., `ghcr.io/blakeblackshear/frigate:0.14.1`), and re-run `docker compose up -d`.
|
||||
- For Docker: Specify an older tag (e.g., `ghcr.io/blakeblackshear/frigate:0.15.2`) in your `docker run` command.
|
||||
- For Docker Compose: Edit your `docker-compose.yml`, specify the older version tag (e.g., `ghcr.io/blakeblackshear/frigate:0.15.2`), and re-run `docker compose up -d`.
|
||||
- For Home Assistant: Reinstall the previous addon version manually via the repository if needed and restart the addon.
|
||||
4. Verify the old version is running again.
|
||||
|
||||
|
@ -7,6 +7,7 @@ const sidebars: SidebarsConfig = {
|
||||
Frigate: [
|
||||
'frigate/index',
|
||||
'frigate/hardware',
|
||||
'frigate/planning_setup',
|
||||
'frigate/installation',
|
||||
'frigate/updating',
|
||||
'frigate/camera_setup',
|
||||
|
@ -20,7 +20,7 @@ from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.params import Depends
|
||||
from fastapi.responses import JSONResponse, PlainTextResponse, StreamingResponse
|
||||
from markupsafe import escape
|
||||
from peewee import operator
|
||||
from peewee import SQL, operator
|
||||
from pydantic import ValidationError
|
||||
|
||||
from frigate.api.auth import require_role
|
||||
@ -685,7 +685,14 @@ def plusModels(request: Request, filterByCurrentModelDetector: bool = False):
|
||||
@router.get("/recognized_license_plates")
|
||||
def get_recognized_license_plates(split_joined: Optional[int] = None):
|
||||
try:
|
||||
events = Event.select(Event.data).distinct()
|
||||
query = (
|
||||
Event.select(
|
||||
SQL("json_extract(data, '$.recognized_license_plate') AS plate")
|
||||
)
|
||||
.where(SQL("json_extract(data, '$.recognized_license_plate') IS NOT NULL"))
|
||||
.distinct()
|
||||
)
|
||||
recognized_license_plates = [row[0] for row in query.tuples()]
|
||||
except Exception:
|
||||
return JSONResponse(
|
||||
content=(
|
||||
@ -694,14 +701,6 @@ def get_recognized_license_plates(split_joined: Optional[int] = None):
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
recognized_license_plates = []
|
||||
for e in events:
|
||||
if e.data is not None and "recognized_license_plate" in e.data:
|
||||
recognized_license_plates.append(e.data["recognized_license_plate"])
|
||||
|
||||
while None in recognized_license_plates:
|
||||
recognized_license_plates.remove(None)
|
||||
|
||||
if split_joined:
|
||||
original_recognized_license_plates = recognized_license_plates.copy()
|
||||
for recognized_license_plate in original_recognized_license_plates:
|
||||
|
@ -724,15 +724,24 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends())
|
||||
|
||||
if (sort is None or sort == "relevance") and search_results:
|
||||
processed_events.sort(key=lambda x: x.get("search_distance", float("inf")))
|
||||
elif min_score is not None and max_score is not None and sort == "score_asc":
|
||||
elif sort == "score_asc":
|
||||
processed_events.sort(key=lambda x: x["data"]["score"])
|
||||
elif min_score is not None and max_score is not None and sort == "score_desc":
|
||||
elif sort == "score_desc":
|
||||
processed_events.sort(key=lambda x: x["data"]["score"], reverse=True)
|
||||
elif min_speed is not None and max_speed is not None and sort == "speed_asc":
|
||||
processed_events.sort(key=lambda x: x["data"]["average_estimated_speed"])
|
||||
elif min_speed is not None and max_speed is not None and sort == "speed_desc":
|
||||
elif sort == "speed_asc":
|
||||
processed_events.sort(
|
||||
key=lambda x: x["data"]["average_estimated_speed"], reverse=True
|
||||
key=lambda x: (
|
||||
x["data"].get("average_estimated_speed") is None,
|
||||
x["data"].get("average_estimated_speed"),
|
||||
)
|
||||
)
|
||||
elif sort == "speed_desc":
|
||||
processed_events.sort(
|
||||
key=lambda x: (
|
||||
x["data"].get("average_estimated_speed") is None,
|
||||
x["data"].get("average_estimated_speed", float("-inf")),
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
elif sort == "date_asc":
|
||||
processed_events.sort(key=lambda x: x["start_time"])
|
||||
|
@ -142,15 +142,13 @@ def latest_frame(
|
||||
"regions": params.regions,
|
||||
}
|
||||
quality = params.quality
|
||||
mime_type = extension
|
||||
|
||||
if extension == "png":
|
||||
if extension == Extension.png:
|
||||
quality_params = None
|
||||
elif extension == "webp":
|
||||
elif extension == Extension.webp:
|
||||
quality_params = [int(cv2.IMWRITE_WEBP_QUALITY), quality]
|
||||
else:
|
||||
else: # jpg or jpeg
|
||||
quality_params = [int(cv2.IMWRITE_JPEG_QUALITY), quality]
|
||||
mime_type = "jpeg"
|
||||
|
||||
if camera_name in request.app.frigate_config.cameras:
|
||||
frame = frame_processor.get_current_frame(camera_name, draw_options)
|
||||
@ -193,18 +191,21 @@ def latest_frame(
|
||||
|
||||
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
||||
|
||||
_, img = cv2.imencode(f".{extension}", frame, quality_params)
|
||||
_, img = cv2.imencode(f".{extension.value}", frame, quality_params)
|
||||
return Response(
|
||||
content=img.tobytes(),
|
||||
media_type=f"image/{mime_type}",
|
||||
media_type=f"image/{extension.value}",
|
||||
headers={
|
||||
"Content-Type": f"image/{mime_type}",
|
||||
"Cache-Control": "no-store"
|
||||
if not params.store
|
||||
else "private, max-age=60",
|
||||
},
|
||||
)
|
||||
elif camera_name == "birdseye" and request.app.frigate_config.birdseye.restream:
|
||||
elif (
|
||||
camera_name == "birdseye"
|
||||
and request.app.frigate_config.birdseye.enabled
|
||||
and request.app.frigate_config.birdseye.restream
|
||||
):
|
||||
frame = cv2.cvtColor(
|
||||
frame_processor.get_current_frame(camera_name),
|
||||
cv2.COLOR_YUV2BGR_I420,
|
||||
@ -215,12 +216,11 @@ def latest_frame(
|
||||
|
||||
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
||||
|
||||
_, img = cv2.imencode(f".{extension}", frame, quality_params)
|
||||
_, img = cv2.imencode(f".{extension.value}", frame, quality_params)
|
||||
return Response(
|
||||
content=img.tobytes(),
|
||||
media_type=f"image/{mime_type}",
|
||||
media_type=f"image/{extension.value}",
|
||||
headers={
|
||||
"Content-Type": f"image/{mime_type}",
|
||||
"Cache-Control": "no-store"
|
||||
if not params.store
|
||||
else "private, max-age=60",
|
||||
|
@ -250,6 +250,7 @@ class FrigateApp:
|
||||
and not genai_cameras
|
||||
and not self.config.lpr.enabled
|
||||
and not self.config.face_recognition.enabled
|
||||
and not self.config.classification.bird.enabled
|
||||
):
|
||||
return
|
||||
|
||||
|
@ -66,7 +66,7 @@ def sync_recordings(limited: bool) -> None:
|
||||
|
||||
if float(len(recordings_to_delete)) / max(1, recordings.count()) > 0.5:
|
||||
logger.warning(
|
||||
f"Deleting {(float(len(recordings_to_delete)) / recordings.count()):2f}% of recordings DB entries, could be due to configuration error. Aborting..."
|
||||
f"Deleting {(len(recordings_to_delete) / max(1, recordings.count()) * 100):.2f}% of recordings DB entries, could be due to configuration error. Aborting..."
|
||||
)
|
||||
return False
|
||||
|
||||
@ -106,7 +106,7 @@ def sync_recordings(limited: bool) -> None:
|
||||
|
||||
if float(len(files_to_delete)) / max(1, len(files_on_disk)) > 0.5:
|
||||
logger.debug(
|
||||
f"Deleting {(float(len(files_to_delete)) / len(files_on_disk)):2f}% of recordings DB entries, could be due to configuration error. Aborting..."
|
||||
f"Deleting {(len(files_to_delete) / max(1, len(files_on_disk)) * 100):.2f}% of recordings DB entries, could be due to configuration error. Aborting..."
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -131,10 +131,7 @@ export default function SearchFilterGroup({
|
||||
);
|
||||
|
||||
const availableSortTypes = useMemo(() => {
|
||||
const sortTypes = ["date_asc", "date_desc"];
|
||||
if (filter?.min_score || filter?.max_score) {
|
||||
sortTypes.push("score_desc", "score_asc");
|
||||
}
|
||||
const sortTypes = ["date_asc", "date_desc", "score_desc", "score_asc"];
|
||||
if (filter?.min_speed || filter?.max_speed) {
|
||||
sortTypes.push("speed_desc", "speed_asc");
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ import {
|
||||
CommandList,
|
||||
} from "@/components/ui/command";
|
||||
import { LuCheck } from "react-icons/lu";
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
|
||||
type SearchFilterDialogProps = {
|
||||
config?: FrigateConfig;
|
||||
@ -64,6 +65,9 @@ export default function SearchFilterDialog({
|
||||
const { t } = useTranslation(["components/filter"]);
|
||||
const [currentFilter, setCurrentFilter] = useState(filter ?? {});
|
||||
const { data: allSubLabels } = useSWR(["sub_labels", { split_joined: 1 }]);
|
||||
const { data: allRecognizedLicensePlates } = useSWR<string[]>(
|
||||
"recognized_license_plates",
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (filter) {
|
||||
@ -130,6 +134,7 @@ export default function SearchFilterDialog({
|
||||
}
|
||||
/>
|
||||
<RecognizedLicensePlatesFilterContent
|
||||
allRecognizedLicensePlates={allRecognizedLicensePlates}
|
||||
recognizedLicensePlates={currentFilter.recognized_license_plate}
|
||||
setRecognizedLicensePlates={(plate) =>
|
||||
setCurrentFilter({
|
||||
@ -875,6 +880,7 @@ export function SnapshotClipFilterContent({
|
||||
}
|
||||
|
||||
type RecognizedLicensePlatesFilterContentProps = {
|
||||
allRecognizedLicensePlates: string[] | undefined;
|
||||
recognizedLicensePlates: string[] | undefined;
|
||||
setRecognizedLicensePlates: (
|
||||
recognizedLicensePlates: string[] | undefined,
|
||||
@ -882,18 +888,12 @@ type RecognizedLicensePlatesFilterContentProps = {
|
||||
};
|
||||
|
||||
export function RecognizedLicensePlatesFilterContent({
|
||||
allRecognizedLicensePlates,
|
||||
recognizedLicensePlates,
|
||||
setRecognizedLicensePlates,
|
||||
}: RecognizedLicensePlatesFilterContentProps) {
|
||||
const { t } = useTranslation(["components/filter"]);
|
||||
|
||||
const { data: allRecognizedLicensePlates, error } = useSWR<string[]>(
|
||||
"recognized_license_plates",
|
||||
{
|
||||
revalidateOnFocus: false,
|
||||
},
|
||||
);
|
||||
|
||||
const [selectedRecognizedLicensePlates, setSelectedRecognizedLicensePlates] =
|
||||
useState<string[]>(recognizedLicensePlates || []);
|
||||
const [inputValue, setInputValue] = useState("");
|
||||
@ -923,7 +923,7 @@ export function RecognizedLicensePlatesFilterContent({
|
||||
}
|
||||
};
|
||||
|
||||
if (!allRecognizedLicensePlates || allRecognizedLicensePlates.length === 0) {
|
||||
if (allRecognizedLicensePlates && allRecognizedLicensePlates.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -947,15 +947,12 @@ export function RecognizedLicensePlatesFilterContent({
|
||||
<div className="overflow-x-hidden">
|
||||
<DropdownMenuSeparator className="mb-3" />
|
||||
<div className="mb-3 text-lg">{t("recognizedLicensePlates.title")}</div>
|
||||
{error ? (
|
||||
<p className="text-sm text-red-500">
|
||||
{t("recognizedLicensePlates.loadFailed")}
|
||||
</p>
|
||||
) : !allRecognizedLicensePlates ? (
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{t("recognizedLicensePlates.loading")}
|
||||
</p>
|
||||
) : (
|
||||
{allRecognizedLicensePlates == undefined ? (
|
||||
<div className="flex flex-col items-center justify-center text-sm text-muted-foreground">
|
||||
<ActivityIndicator className="mb-3 mr-2 size-5" />
|
||||
<p>{t("recognizedLicensePlates.loading")}</p>
|
||||
</div>
|
||||
) : allRecognizedLicensePlates.length == 0 ? null : (
|
||||
<>
|
||||
<Command
|
||||
className="border border-input bg-background"
|
||||
@ -1010,11 +1007,11 @@ export function RecognizedLicensePlatesFilterContent({
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
<p className="mt-1 text-sm text-muted-foreground">
|
||||
{t("recognizedLicensePlates.selectPlatesFromList")}
|
||||
</p>
|
||||
</>
|
||||
)}
|
||||
<p className="mt-1 text-sm text-muted-foreground">
|
||||
{t("recognizedLicensePlates.selectPlatesFromList")}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -107,7 +107,9 @@ export function GenericVideoPlayer({
|
||||
>
|
||||
<HlsVideoPlayer
|
||||
videoRef={videoRef}
|
||||
currentSource={source}
|
||||
currentSource={{
|
||||
playlist: source,
|
||||
}}
|
||||
hotKeys
|
||||
visible
|
||||
frigateControls={false}
|
||||
|
@ -28,17 +28,22 @@ const unsupportedErrorCodes = [
|
||||
MediaError.MEDIA_ERR_DECODE,
|
||||
];
|
||||
|
||||
export interface HlsSource {
|
||||
playlist: string;
|
||||
startPosition?: number;
|
||||
}
|
||||
|
||||
type HlsVideoPlayerProps = {
|
||||
videoRef: MutableRefObject<HTMLVideoElement | null>;
|
||||
containerRef?: React.MutableRefObject<HTMLDivElement | null>;
|
||||
visible: boolean;
|
||||
currentSource: string;
|
||||
currentSource: HlsSource;
|
||||
hotKeys: boolean;
|
||||
supportsFullscreen: boolean;
|
||||
fullscreen: boolean;
|
||||
frigateControls?: boolean;
|
||||
inpointOffset?: number;
|
||||
onClipEnded?: () => void;
|
||||
onClipEnded?: (currentTime: number) => void;
|
||||
onPlayerLoaded?: () => void;
|
||||
onTimeUpdate?: (time: number) => void;
|
||||
onPlaying?: () => void;
|
||||
@ -113,17 +118,25 @@ export default function HlsVideoPlayer({
|
||||
const currentPlaybackRate = videoRef.current.playbackRate;
|
||||
|
||||
if (!useHlsCompat) {
|
||||
videoRef.current.src = currentSource;
|
||||
videoRef.current.src = currentSource.playlist;
|
||||
videoRef.current.load();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!hlsRef.current) {
|
||||
hlsRef.current = new Hls();
|
||||
hlsRef.current.attachMedia(videoRef.current);
|
||||
// we must destroy the hlsRef every time the source changes
|
||||
// so that we can create a new HLS instance with startPosition
|
||||
// set at the optimal point in time
|
||||
if (hlsRef.current) {
|
||||
hlsRef.current.destroy();
|
||||
}
|
||||
|
||||
hlsRef.current.loadSource(currentSource);
|
||||
hlsRef.current = new Hls({
|
||||
maxBufferLength: 10,
|
||||
maxBufferSize: 20 * 1000 * 1000,
|
||||
startPosition: currentSource.startPosition,
|
||||
});
|
||||
hlsRef.current.attachMedia(videoRef.current);
|
||||
hlsRef.current.loadSource(currentSource.playlist);
|
||||
videoRef.current.playbackRate = currentPlaybackRate;
|
||||
}, [videoRef, hlsRef, useHlsCompat, currentSource]);
|
||||
|
||||
@ -374,7 +387,11 @@ export default function HlsVideoPlayer({
|
||||
}
|
||||
}
|
||||
}}
|
||||
onEnded={onClipEnded}
|
||||
onEnded={() => {
|
||||
if (onClipEnded) {
|
||||
onClipEnded(getVideoTime() ?? 0);
|
||||
}
|
||||
}}
|
||||
onError={(e) => {
|
||||
if (
|
||||
!hlsRef.current &&
|
||||
|
@ -6,7 +6,7 @@ import { Recording } from "@/types/record";
|
||||
import { Preview } from "@/types/preview";
|
||||
import PreviewPlayer, { PreviewController } from "../PreviewPlayer";
|
||||
import { DynamicVideoController } from "./DynamicVideoController";
|
||||
import HlsVideoPlayer from "../HlsVideoPlayer";
|
||||
import HlsVideoPlayer, { HlsSource } from "../HlsVideoPlayer";
|
||||
import { TimeRange } from "@/types/timeline";
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
import { VideoResolutionType } from "@/types/live";
|
||||
@ -14,6 +14,7 @@ import axios from "axios";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { calculateInpointOffset } from "@/utils/videoUtil";
|
||||
import { isFirefox } from "react-device-detect";
|
||||
|
||||
/**
|
||||
* Dynamically switches between video playback and scrubbing preview player.
|
||||
@ -98,9 +99,10 @@ export default function DynamicVideoPlayer({
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isBuffering, setIsBuffering] = useState(false);
|
||||
const [loadingTimeout, setLoadingTimeout] = useState<NodeJS.Timeout>();
|
||||
const [source, setSource] = useState(
|
||||
`${apiHost}vod/${camera}/start/${timeRange.after}/end/${timeRange.before}/master.m3u8`,
|
||||
);
|
||||
const [source, setSource] = useState<HlsSource>({
|
||||
playlist: `${apiHost}vod/${camera}/start/${timeRange.after}/end/${timeRange.before}/master.m3u8`,
|
||||
startPosition: startTimestamp ? timeRange.after - startTimestamp : 0,
|
||||
});
|
||||
|
||||
// start at correct time
|
||||
|
||||
@ -184,9 +186,28 @@ export default function DynamicVideoPlayer({
|
||||
playerRef.current.autoplay = !isScrubbing;
|
||||
}
|
||||
|
||||
setSource(
|
||||
`${apiHost}vod/${camera}/start/${recordingParams.after}/end/${recordingParams.before}/master.m3u8`,
|
||||
);
|
||||
let startPosition = undefined;
|
||||
|
||||
if (startTimestamp) {
|
||||
const inpointOffset = calculateInpointOffset(
|
||||
recordingParams.after,
|
||||
(recordings || [])[0],
|
||||
);
|
||||
const idealStartPosition = Math.max(
|
||||
0,
|
||||
startTimestamp - timeRange.after - inpointOffset,
|
||||
);
|
||||
|
||||
if (idealStartPosition >= recordings[0].start_time - timeRange.after) {
|
||||
startPosition = idealStartPosition;
|
||||
}
|
||||
}
|
||||
|
||||
setSource({
|
||||
playlist: `${apiHost}vod/${camera}/start/${recordingParams.after}/end/${recordingParams.before}/master.m3u8`,
|
||||
startPosition,
|
||||
});
|
||||
|
||||
setLoadingTimeout(setTimeout(() => setIsLoading(true), 1000));
|
||||
|
||||
controller.newPlayback({
|
||||
@ -203,6 +224,33 @@ export default function DynamicVideoPlayer({
|
||||
[recordingParams, recordings],
|
||||
);
|
||||
|
||||
const onValidateClipEnd = useCallback(
|
||||
(currentTime: number) => {
|
||||
if (!onClipEnded || !controller || !recordings) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isFirefox) {
|
||||
onClipEnded();
|
||||
}
|
||||
|
||||
// Firefox has a bug where clipEnded can be called prematurely due to buffering
|
||||
// we need to validate if the current play-point is truly at the end of available recordings
|
||||
|
||||
const lastRecordingTime = recordings.at(-1)?.start_time;
|
||||
|
||||
if (
|
||||
!lastRecordingTime ||
|
||||
controller.getProgress(currentTime) < lastRecordingTime
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
onClipEnded();
|
||||
},
|
||||
[onClipEnded, controller, recordings],
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<HlsVideoPlayer
|
||||
@ -216,7 +264,7 @@ export default function DynamicVideoPlayer({
|
||||
inpointOffset={inpointOffset}
|
||||
onTimeUpdate={onTimeUpdate}
|
||||
onPlayerLoaded={onPlayerLoaded}
|
||||
onClipEnded={onClipEnded}
|
||||
onClipEnded={onValidateClipEnd}
|
||||
onPlaying={() => {
|
||||
if (isScrubbing) {
|
||||
playerRef.current?.pause();
|
||||
|
Loading…
Reference in New Issue
Block a user