Merge branch 'master' into release-0.9.0

This commit is contained in:
Blake Blackshear 2021-06-05 07:59:07 -05:00
commit 0bb998c465
22 changed files with 268 additions and 122 deletions

View File

@ -4,11 +4,11 @@
# Frigate - NVR With Realtime Object Detection for IP Cameras # Frigate - NVR With Realtime Object Detection for IP Cameras
A complete and local NVR designed for HomeAssistant with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras. A complete and local NVR designed for [Home Assistant](https://www.home-assistant.io) with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras.
Use of a [Google Coral Accelerator](https://coral.ai/products/) is optional, but highly recommended. The Coral will outperform even the best CPUs and can process 100+ FPS with very little overhead. Use of a [Google Coral Accelerator](https://coral.ai/products/) is optional, but highly recommended. The Coral will outperform even the best CPUs and can process 100+ FPS with very little overhead.
- Tight integration with HomeAssistant via a [custom component](https://github.com/blakeblackshear/frigate-hass-integration) - Tight integration with Home Assistant via a [custom component](https://github.com/blakeblackshear/frigate-hass-integration)
- Designed to minimize resource use and maximize performance by only looking for objects when and where it is necessary - Designed to minimize resource use and maximize performance by only looking for objects when and where it is necessary
- Leverages multiprocessing heavily with an emphasis on realtime over processing every frame - Leverages multiprocessing heavily with an emphasis on realtime over processing every frame
- Uses a very low overhead motion detection to determine where to run object detection - Uses a very low overhead motion detection to determine where to run object detection
@ -26,7 +26,7 @@ View the documentation at https://blakeblackshear.github.io/frigate
If you would like to make a donation to support development, please use [Github Sponsors](https://github.com/sponsors/blakeblackshear). If you would like to make a donation to support development, please use [Github Sponsors](https://github.com/sponsors/blakeblackshear).
## Screenshots ## Screenshots
Integration into HomeAssistant Integration into Home Assistant
<div> <div>
<a href="docs/static/img/media_browser.png"><img src="docs/static/img/media_browser.png" height=400></a> <a href="docs/static/img/media_browser.png"><img src="docs/static/img/media_browser.png" height=400></a>
<a href="docs/static/img/notification.png"><img src="docs/static/img/notification.png" height=400></a> <a href="docs/static/img/notification.png"><img src="docs/static/img/notification.png" height=400></a>

View File

@ -81,7 +81,7 @@ environment_vars:
### `database` ### `database`
Event and clip information is managed in a sqlite database at `/media/frigate/clips/frigate.db`. If that database is deleted, clips will be orphaned and will need to be cleaned up manually. They also won't show up in the Media Browser within HomeAssistant. Event and clip information is managed in a sqlite database at `/media/frigate/clips/frigate.db`. If that database is deleted, clips will be orphaned and will need to be cleaned up manually. They also won't show up in the Media Browser within Home Assistant.
If you are storing your clips on a network share (SMB, NFS, etc), you may get a `database is locked` error message on startup. You can customize the location of the database in the config if necessary. If you are storing your clips on a network share (SMB, NFS, etc), you may get a `database is locked` error message on startup. You can customize the location of the database in the config if necessary.
@ -99,7 +99,8 @@ detectors:
# Required: name of the detector # Required: name of the detector
coral: coral:
# Required: type of the detector # Required: type of the detector
# Valid values are 'edgetpu' (requires device property below) and 'cpu'. type: edgetpu # Valid values are 'edgetpu' (requires device property below) and 'cpu'.
type: edgetpu
# Optional: device name as defined here: https://coral.ai/docs/edgetpu/multiple-edgetpu/#using-the-tensorflow-lite-python-api # Optional: device name as defined here: https://coral.ai/docs/edgetpu/multiple-edgetpu/#using-the-tensorflow-lite-python-api
device: usb device: usb
# Optional: num_threads value passed to the tflite.Interpreter (default: shown below) # Optional: num_threads value passed to the tflite.Interpreter (default: shown below)
@ -116,24 +117,3 @@ model:
# Required: width of the trained model # Required: width of the trained model
width: 320 width: 320
``` ```
## Custom Models
Models for both CPU and EdgeTPU (Coral) are bundled in the image. You can use your own models with volume mounts:
- CPU Model: `/cpu_model.tflite`
- EdgeTPU Model: `/edgetpu_model.tflite`
- Labels: `/labelmap.txt`
You also need to update the model width/height in the config if they differ from the defaults.
### Customizing the Labelmap
The labelmap can be customized to your needs. A common reason to do this is to combine multiple object types that are easily confused when you don't need to be as granular such as car/truck. You must retain the same number of labels, but you can change the names. To change:
- Download the [COCO labelmap](https://dl.google.com/coral/canned_models/coco_labels.txt)
- Modify the label names as desired. For example, change `7 truck` to `7 car`
- Mount the new file at `/labelmap.txt` in the container with an additional volume
```
-v ./config/labelmap.txt:/labelmap.txt
```

View File

@ -41,9 +41,11 @@ cameras:
## Masks & Zones ## Masks & Zones
### Masks ### Masks
Masks are used to ignore initial detection in areas of your camera's field of view. Masks are used to ignore initial detection in areas of your camera's field of view.
There are two types of masks available: There are two types of masks available:
- **Motion masks**: Motion masks are used to prevent unwanted types of motion from triggering detection. Try watching the video feed with `Motion Boxes` enabled to see what may be regularly detected as motion. For example, you want to mask out your timestamp, the sky, rooftops, etc. Keep in mind that this mask only prevents motion from being detected and does not prevent objects from being detected if object detection was started due to motion in unmasked areas. Motion is also used during object tracking to refine the object detection area in the next frame. Over masking will make it more difficult for objects to be tracked. To see this effect, create a mask, and then watch the video feed with `Motion Boxes` enabled again. - **Motion masks**: Motion masks are used to prevent unwanted types of motion from triggering detection. Try watching the video feed with `Motion Boxes` enabled to see what may be regularly detected as motion. For example, you want to mask out your timestamp, the sky, rooftops, etc. Keep in mind that this mask only prevents motion from being detected and does not prevent objects from being detected if object detection was started due to motion in unmasked areas. Motion is also used during object tracking to refine the object detection area in the next frame. Over masking will make it more difficult for objects to be tracked. To see this effect, create a mask, and then watch the video feed with `Motion Boxes` enabled again.
- **Object filter masks**: Object filter masks are used to filter out false positives for a given object type. These should be used to filter any areas where it is not possible for an object of that type to be. The bottom center of the detected object's bounding box is evaluated against the mask. If it is in a masked area, it is assumed to be a false positive. For example, you may want to mask out rooftops, walls, the sky, treetops for people. For cars, masking locations other than the street or your driveway will tell frigate that anything in your yard is a false positive. - **Object filter masks**: Object filter masks are used to filter out false positives for a given object type. These should be used to filter any areas where it is not possible for an object of that type to be. The bottom center of the detected object's bounding box is evaluated against the mask. If it is in a masked area, it is assumed to be a false positive. For example, you may want to mask out rooftops, walls, the sky, treetops for people. For cars, masking locations other than the street or your driveway will tell frigate that anything in your yard is a false positive.
@ -60,7 +62,7 @@ Example of a finished row corresponding to the below example image:
```yaml ```yaml
motion: motion:
mask: '0,461,3,0,1919,0,1919,843,1699,492,1344,458,1346,336,973,317,869,375,866,432' mask: "0,461,3,0,1919,0,1919,843,1699,492,1344,458,1346,336,973,317,869,375,866,432"
``` ```
![poly](/img/example-mask-poly.png) ![poly](/img/example-mask-poly.png)
@ -102,6 +104,8 @@ zones:
## Objects ## Objects
For a list of available objects, see the [objects documentation](./objects.mdx).
```yaml ```yaml
# Optional: Camera level object filters config. # Optional: Camera level object filters config.
objects: objects:
@ -109,7 +113,7 @@ objects:
- person - person
- car - car
# Optional: mask to prevent all object types from being detected in certain areas (default: no mask) # Optional: mask to prevent all object types from being detected in certain areas (default: no mask)
# Checks based on the bottom center of the bounding box of the object. # Checks based on the bottom center of the bounding box of the object.
# NOTE: This mask is COMBINED with the object type specific mask below # NOTE: This mask is COMBINED with the object type specific mask below
mask: 0,0,1000,0,1000,200,0,200 mask: 0,0,1000,0,1000,200,0,200
filters: filters:
@ -127,7 +131,7 @@ objects:
Frigate can save video clips without any CPU overhead for encoding by simply copying the stream directly with FFmpeg. It leverages FFmpeg's segment functionality to maintain a cache of video for each camera. The cache files are written to disk at `/tmp/cache` and do not introduce memory overhead. When an object is being tracked, it will extend the cache to ensure it can assemble a clip when the event ends. Once the event ends, it again uses FFmpeg to assemble a clip by combining the video clips without any encoding by the CPU. Assembled clips are are saved to `/media/frigate/clips`. Clips are retained according to the retention settings defined on the config for each object type. Frigate can save video clips without any CPU overhead for encoding by simply copying the stream directly with FFmpeg. It leverages FFmpeg's segment functionality to maintain a cache of video for each camera. The cache files are written to disk at `/tmp/cache` and do not introduce memory overhead. When an object is being tracked, it will extend the cache to ensure it can assemble a clip when the event ends. Once the event ends, it again uses FFmpeg to assemble a clip by combining the video clips without any encoding by the CPU. Assembled clips are are saved to `/media/frigate/clips`. Clips are retained according to the retention settings defined on the config for each object type.
These clips will not be playable in the web UI or in HomeAssistant's media browser unless your camera sends video as h264. These clips will not be playable in the web UI or in Home Assistant's media browser unless your camera sends video as h264.
:::caution :::caution
Previous versions of frigate included `-vsync drop` in input parameters. This is not compatible with FFmpeg's segment feature and must be removed from your input parameters if you have overrides set. Previous versions of frigate included `-vsync drop` in input parameters. This is not compatible with FFmpeg's segment feature and must be removed from your input parameters if you have overrides set.
@ -187,7 +191,7 @@ snapshots:
## 24/7 Recordings ## 24/7 Recordings
24/7 recordings can be enabled and are stored at `/media/frigate/recordings`. The folder structure for the recordings is `YYYY-MM/DD/HH/<camera_name>/MM.SS.mp4`. These recordings are written directly from your camera stream without re-encoding and are available in HomeAssistant's media browser. Each camera supports a configurable retention policy in the config. 24/7 recordings can be enabled and are stored at `/media/frigate/recordings`. The folder structure for the recordings is `YYYY-MM/DD/HH/<camera_name>/MM.SS.mp4`. These recordings are written directly from your camera stream without re-encoding and are available in Home Assistant's media browser. Each camera supports a configurable retention policy in the config.
:::caution :::caution
Previous versions of frigate included `-vsync drop` in input parameters. This is not compatible with FFmpeg's segment feature and must be removed from your input parameters if you have overrides set. Previous versions of frigate included `-vsync drop` in input parameters. This is not compatible with FFmpeg's segment feature and must be removed from your input parameters if you have overrides set.
@ -204,7 +208,7 @@ record:
## RTMP streams ## RTMP streams
Frigate can re-stream your video feed as a RTMP feed for other applications such as HomeAssistant to utilize it at `rtmp://<frigate_host>/live/<camera_name>`. Port 1935 must be open. This allows you to use a video feed for detection in frigate and HomeAssistant live view at the same time without having to make two separate connections to the camera. The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate. Frigate can re-stream your video feed as a RTMP feed for other applications such as Home Assistant to utilize it at `rtmp://<frigate_host>/live/<camera_name>`. Port 1935 must be open. This allows you to use a video feed for detection in frigate and Home Assistant live view at the same time without having to make two separate connections to the camera. The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
Some video feeds are not compatible with RTMP. If you are experiencing issues, check to make sure your camera feed is h264 with AAC audio. If your camera doesn't support a compatible format for RTMP, you can use the ffmpeg args to re-encode it on the fly at the expense of increased CPU utilization. Some video feeds are not compatible with RTMP. If you are experiencing issues, check to make sure your camera feed is h264 with AAC audio. If your camera doesn't support a compatible format for RTMP, you can use the ffmpeg args to re-encode it on the fly at the expense of increased CPU utilization.
@ -368,7 +372,7 @@ cameras:
- person - person
- car - car
# Optional: mask to prevent all object types from being detected in certain areas (default: no mask) # Optional: mask to prevent all object types from being detected in certain areas (default: no mask)
# Checks based on the bottom center of the bounding box of the object. # Checks based on the bottom center of the bounding box of the object.
# NOTE: This mask is COMBINED with the object type specific mask below # NOTE: This mask is COMBINED with the object type specific mask below
mask: 0,0,1000,0,1000,200,0,200 mask: 0,0,1000,0,1000,200,0,200
filters: filters:
@ -384,6 +388,37 @@ cameras:
## Camera specific configuration ## Camera specific configuration
### MJPEG Cameras
The input and output parameters need to be adjusted for MJPEG cameras
```yaml
input_args:
- -avoid_negative_ts
- make_zero
- -fflags
- nobuffer
- -flags
- low_delay
- -strict
- experimental
- -fflags
- +genpts+discardcorrupt
- -r
- "3" # <---- adjust depending on your desired frame rate from the mjpeg image
- -use_wallclock_as_timestamps
- "1"
```
Note that mjpeg cameras require encoding the video into h264 for clips, recording, and rtmp roles. This will use significantly more CPU than if the cameras supported h264 feeds directly.
```yaml
output_args:
record: -f segment -segment_time 60 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v libx264 -an
clips: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v libx264 -an
rtmp: -c:v libx264 -an -f flv
```
### RTMP Cameras ### RTMP Cameras
The input parameters need to be adjusted for RTMP cameras The input parameters need to be adjusted for RTMP cameras
@ -402,10 +437,11 @@ ffmpeg:
- -fflags - -fflags
- +genpts+discardcorrupt - +genpts+discardcorrupt
- -use_wallclock_as_timestamps - -use_wallclock_as_timestamps
- '1' - "1"
``` ```
### Reolink 410/520 (possibly others) ### Reolink 410/520 (possibly others)
Several users have reported success with the rtmp video from Reolink cameras. Several users have reported success with the rtmp video from Reolink cameras.
```yaml ```yaml
@ -422,12 +458,11 @@ ffmpeg:
- -fflags - -fflags
- +genpts+discardcorrupt - +genpts+discardcorrupt
- -rw_timeout - -rw_timeout
- '5000000' - "5000000"
- -use_wallclock_as_timestamps - -use_wallclock_as_timestamps
- '1' - "1"
``` ```
### Blue Iris RTSP Cameras ### Blue Iris RTSP Cameras
You will need to remove `nobuffer` flag for Blue Iris RTSP cameras You will need to remove `nobuffer` flag for Blue Iris RTSP cameras
@ -446,7 +481,7 @@ ffmpeg:
- -rtsp_transport - -rtsp_transport
- tcp - tcp
- -stimeout - -stimeout
- '5000000' - "5000000"
- -use_wallclock_as_timestamps - -use_wallclock_as_timestamps
- '1' - "1"
``` ```

View File

@ -30,6 +30,18 @@ detectors:
device: usb:1 device: usb:1
``` ```
Multiple PCIE/M.2 Corals:
```yaml
detectors:
coral1:
type: edgetpu
device: pci:0
coral2:
type: edgetpu
device: pci:1
```
Mixing Corals: Mixing Corals:
```yaml ```yaml

View File

@ -3,7 +3,9 @@ id: index
title: Configuration title: Configuration
--- ---
HassOS users can manage their configuration directly in the addon Configuration tab. For other installations, the default location for the config file is `/config/config.yml`. This can be overridden with the `CONFIG_FILE` environment variable. Camera specific ffmpeg parameters are documented [here](cameras.md). For HassOS installations, the default location for the config file is `/config/frigate.yml`.
For all other installations, the default location for the config file is '/config/config.yml'. This can be overridden with the `CONFIG_FILE` environment variable. Camera specific ffmpeg parameters are documented [here](cameras.md).
It is recommended to start with a minimal configuration and add to it: It is recommended to start with a minimal configuration and add to it:
@ -112,7 +114,7 @@ ffmpeg:
### `objects` ### `objects`
Can be overridden at the camera level Can be overridden at the camera level. For a list of available objects, see the [objects documentation](./objects.mdx).
```yaml ```yaml
objects: objects:
@ -131,3 +133,19 @@ objects:
# Optional: minimum decimal percentage for tracked object's computed score to be considered a true positive (default: shown below) # Optional: minimum decimal percentage for tracked object's computed score to be considered a true positive (default: shown below)
threshold: 0.7 threshold: 0.7
``` ```
### `record`
Can be overridden at the camera level. 24/7 recordings can be enabled and are stored at `/media/frigate/recordings`. The folder structure for the recordings is `YYYY-MM/DD/HH/<camera_name>/MM.SS.mp4`. These recordings are written directly from your camera stream without re-encoding and are available in Home Assistant's media browser. Each camera supports a configurable retention policy in the config.
:::caution
Previous versions of frigate included `-vsync drop` in input parameters. This is not compatible with FFmpeg's segment feature and must be removed from your input parameters if you have overrides set.
:::
```yaml
record:
# Optional: Enable recording
enabled: False
# Optional: Number of days to retain
retain_days: 30
```

View File

@ -55,7 +55,7 @@ A list of supported codecs (you can use `ffmpeg -decoders | grep cuvid` in the c
``` ```
For example, for H265 video (hevc), you'll select `hevc_cuvid`. Add For example, for H265 video (hevc), you'll select `hevc_cuvid`. Add
`-c:v hevc_covid` to your ffmpeg input arguments: `-c:v hevc_cuvid` to your ffmpeg input arguments:
``` ```
ffmpeg: ffmpeg:

View File

@ -0,0 +1,36 @@
---
id: objects
title: Default available objects
sidebar_label: Available objects
---
import labels from '../../../labelmap.txt';
By default, Frigate includes the following object models from the Google Coral test data.
<ul>
{labels.split('\n').map((label) => (
<li>{label.replace(/^\d+\s+/, '')}</li>
))}
</ul>
## Custom Models
Models for both CPU and EdgeTPU (Coral) are bundled in the image. You can use your own models with volume mounts:
- CPU Model: `/cpu_model.tflite`
- EdgeTPU Model: `/edgetpu_model.tflite`
- Labels: `/labelmap.txt`
You also need to update the model width/height in the config if they differ from the defaults.
### Customizing the Labelmap
The labelmap can be customized to your needs. A common reason to do this is to combine multiple object types that are easily confused when you don't need to be as granular such as car/truck. You must retain the same number of labels, but you can change the names. To change:
- Download the [COCO labelmap](https://dl.google.com/coral/canned_models/coco_labels.txt)
- Modify the label names as desired. For example, change `7 truck` to `7 car`
- Mount the new file at `/labelmap.txt` in the container with an additional volume
```
-v ./config/labelmap.txt:/labelmap.txt
```

View File

@ -3,7 +3,7 @@ id: optimizing
title: Optimizing performance title: Optimizing performance
--- ---
- **Google Coral**: It is strongly recommended to use a Google Coral, but Frigate will fall back to CPU in the event one is not found. Offloading TensorFlow to the Google Coral is an order of magnitude faster and will reduce your CPU load dramatically. A $60 device will outperform $2000 CPU. Frigate should work with any supported Coral device from https://coral.ai - **Google Coral**: It is strongly recommended to use a Google Coral, Frigate will no longer fall back to CPU in the event one is not found. Offloading TensorFlow to the Google Coral is an order of magnitude faster and will reduce your CPU load dramatically. A $60 device will outperform $2000 CPU. Frigate should work with any supported Coral device from https://coral.ai
- **Resolution**: For the `detect` input, choose a camera resolution where the smallest object you want to detect barely fits inside a 300x300px square. The model used by Frigate is trained on 300x300px images, so you will get worse performance and no improvement in accuracy by using a larger resolution since Frigate resizes the area where it is looking for objects to 300x300 anyway. - **Resolution**: For the `detect` input, choose a camera resolution where the smallest object you want to detect barely fits inside a 300x300px square. The model used by Frigate is trained on 300x300px images, so you will get worse performance and no improvement in accuracy by using a larger resolution since Frigate resizes the area where it is looking for objects to 300x300 anyway.
- **FPS**: 5 frames per second should be adequate. Higher frame rates will require more CPU usage without improving detections or accuracy. Reducing the frame rate on your camera will have the greatest improvement on system resources. - **FPS**: 5 frames per second should be adequate. Higher frame rates will require more CPU usage without improving detections or accuracy. Reducing the frame rate on your camera will have the greatest improvement on system resources.
- **Hardware Acceleration**: Make sure you configure the `hwaccel_args` for your hardware. They provide a significant reduction in CPU usage if they are available. - **Hardware Acceleration**: Make sure you configure the `hwaccel_args` for your hardware. They provide a significant reduction in CPU usage if they are available.

View File

@ -5,7 +5,7 @@ title: Recommended hardware
## Cameras ## Cameras
Cameras that output H.264 video and AAC audio will offer the most compatibility with all features of Frigate and HomeAssistant. It is also helpful if your camera supports multiple substreams to allow different resolutions to be used for detection, streaming, clips, and recordings without re-encoding. Cameras that output H.264 video and AAC audio will offer the most compatibility with all features of Frigate and Home Assistant. It is also helpful if your camera supports multiple substreams to allow different resolutions to be used for detection, streaming, clips, and recordings without re-encoding.
## Computer ## Computer
@ -24,6 +24,6 @@ Cameras that output H.264 video and AAC audio will offer the most compatibility
Many people have powerful enough NAS devices or home servers to also run docker. There is a Unraid Community App. Many people have powerful enough NAS devices or home servers to also run docker. There is a Unraid Community App.
To install make sure you have the [community app plugin here](https://forums.unraid.net/topic/38582-plug-in-community-applications/). Then search for "Frigate" in the apps section within Unraid - you can see the online store [here](https://unraid.net/community/apps?q=frigate#r) To install make sure you have the [community app plugin here](https://forums.unraid.net/topic/38582-plug-in-community-applications/). Then search for "Frigate" in the apps section within Unraid - you can see the online store [here](https://unraid.net/community/apps?q=frigate#r)
| Name | Inference Speed | Notes | | Name | Inference Speed | Notes |
| ----------------------- | --------------- | ----------------------------------------------------------------------------------------------------------------------------- | | ------------------------------------ | --------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| [M2 Coral Edge TPU](http://coral.ai) | 6.2ms | Little complicated to get installed, as needs drivers on the host OS, [info here](https://forums.unraid.net/topic/98064-support-blakeblackshear-frigate/?do=findComment&comment=945776) | | [M2 Coral Edge TPU](http://coral.ai) | 6.2ms | Install the Coral plugin from Unraid Community App Center [info here](https://forums.unraid.net/topic/98064-support-blakeblackshear-frigate/?do=findComment&comment=949789) |

View File

@ -5,11 +5,11 @@ sidebar_label: Features
slug: / slug: /
--- ---
A complete and local NVR designed for HomeAssistant with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras. A complete and local NVR designed for Home Assistant with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras.
Use of a [Google Coral Accelerator](https://coral.ai/products/) is optional, but highly recommended. The Coral will outperform even the best CPUs and can process 100+ FPS with very little overhead. Use of a [Google Coral Accelerator](https://coral.ai/products/) is optional, but highly recommended. The Coral will outperform even the best CPUs and can process 100+ FPS with very little overhead.
- Tight integration with HomeAssistant via a [custom component](https://github.com/blakeblackshear/frigate-hass-integration) - Tight integration with Home Assistant via a [custom component](https://github.com/blakeblackshear/frigate-hass-integration)
- Designed to minimize resource use and maximize performance by only looking for objects when and where it is necessary - Designed to minimize resource use and maximize performance by only looking for objects when and where it is necessary
- Leverages multiprocessing heavily with an emphasis on realtime over processing every frame - Leverages multiprocessing heavily with an emphasis on realtime over processing every frame
- Uses a very low overhead motion detection to determine where to run object detection - Uses a very low overhead motion detection to determine where to run object detection

View File

@ -5,7 +5,7 @@ title: Installation
Frigate is a Docker container that can be run on any Docker host including as a [HassOS Addon](https://www.home-assistant.io/addons/). See instructions below for installing the HassOS addon. Frigate is a Docker container that can be run on any Docker host including as a [HassOS Addon](https://www.home-assistant.io/addons/). See instructions below for installing the HassOS addon.
For HomeAssistant users, there is also a [custom component (aka integration)](https://github.com/blakeblackshear/frigate-hass-integration). This custom component adds tighter integration with HomeAssistant by automatically setting up camera entities, sensors, media browser for clips and recordings, and a public API to simplify notifications. For Home Assistant users, there is also a [custom component (aka integration)](https://github.com/blakeblackshear/frigate-hass-integration). This custom component adds tighter integration with Home Assistant by automatically setting up camera entities, sensors, media browser for clips and recordings, and a public API to simplify notifications.
Note that HassOS Addons and custom components are different things. If you are already running Frigate with Docker directly, you do not need the Addon since the Addon would run another instance of Frigate. Note that HassOS Addons and custom components are different things. If you are already running Frigate with Docker directly, you do not need the Addon since the Addon would run another instance of Frigate.
@ -14,26 +14,27 @@ Note that HassOS Addons and custom components are different things. If you are a
HassOS users can install via the addon repository. Frigate requires an MQTT server. HassOS users can install via the addon repository. Frigate requires an MQTT server.
1. Navigate to Supervisor > Add-on Store > Repositories 1. Navigate to Supervisor > Add-on Store > Repositories
1. Add https://github.com/blakeblackshear/frigate-hass-addons 2. Add https://github.com/blakeblackshear/frigate-hass-addons
1. Setup your configuration in the `Configuration` tab 3. Setup your network configuration in the `Configuration` tab if deisred
1. Start the addon container 4. Create the file `frigate.yml` in your `config` directory with your detailed Frigate configuration
1. If you are using hardware acceleration for ffmpeg, you will need to disable "Protection mode" 5. Start the addon container
6. If you are using hardware acceleration for ffmpeg, you will need to disable "Protection mode"
## Docker ## Docker
Make sure you choose the right image for your architecture: Make sure you choose the right image for your architecture:
|Arch|Image Name| | Arch | Image Name |
|-|-| | ----------- | ------------------------------------------ |
|amd64|blakeblackshear/frigate:stable-amd64| | amd64 | blakeblackshear/frigate:stable-amd64 |
|amd64nvidia|blakeblackshear/frigate:stable-amd64nvidia| | amd64nvidia | blakeblackshear/frigate:stable-amd64nvidia |
|armv7|blakeblackshear/frigate:stable-armv7| | armv7 | blakeblackshear/frigate:stable-armv7 |
|aarch64|blakeblackshear/frigate:stable-aarch64| | aarch64 | blakeblackshear/frigate:stable-aarch64 |
It is recommended to run with docker-compose: It is recommended to run with docker-compose:
```yaml ```yaml
version: '3.9' version: "3.9"
services: services:
frigate: frigate:
container_name: frigate container_name: frigate
@ -52,10 +53,10 @@ services:
tmpfs: tmpfs:
size: 1000000000 size: 1000000000
ports: ports:
- '5000:5000' - "5000:5000"
- '1935:1935' # RTMP feeds - "1935:1935" # RTMP feeds
environment: environment:
FRIGATE_RTSP_PASSWORD: 'password' FRIGATE_RTSP_PASSWORD: "password"
``` ```
If you can't use docker compose, you can run the container with something similar to this: If you can't use docker compose, you can run the container with something similar to this:
@ -66,7 +67,7 @@ docker run -d \
--restart=unless-stopped \ --restart=unless-stopped \
--mount type=tmpfs,target=/tmp/cache,tmpfs-size=1000000000 \ --mount type=tmpfs,target=/tmp/cache,tmpfs-size=1000000000 \
--device /dev/bus/usb:/dev/bus/usb \ --device /dev/bus/usb:/dev/bus/usb \
--device /dev/dri/renderD128 --device /dev/dri/renderD128 \
-v <path_to_directory_for_media>:/media/frigate \ -v <path_to_directory_for_media>:/media/frigate \
-v <path_to_config_file>:/config/config.yml:ro \ -v <path_to_config_file>:/config/config.yml:ro \
-v /etc/localtime:/etc/localtime:ro \ -v /etc/localtime:/etc/localtime:ro \
@ -86,7 +87,7 @@ You can calculate the necessary shm-size for each camera with the following form
(width * height * 1.5 * 7 + 270480)/1048576 = <shm size in mb> (width * height * 1.5 * 7 + 270480)/1048576 = <shm size in mb>
``` ```
The shm size cannot be set per container for HomeAssistant Addons. You must set `default-shm-size` in `/etc/docker/daemon.json` to increase the default shm size. This will increase the shm size for all of your docker containers. This may or may not cause issues with your setup. https://docs.docker.com/engine/reference/commandline/dockerd/#daemon-configuration-file The shm size cannot be set per container for Home Assistant Addons. You must set `default-shm-size` in `/etc/docker/daemon.json` to increase the default shm size. This will increase the shm size for all of your docker containers. This may or may not cause issues with your setup. https://docs.docker.com/engine/reference/commandline/dockerd/#daemon-configuration-file
## Kubernetes ## Kubernetes
@ -119,5 +120,5 @@ lxc.cap.drop:
``` ```
### ESX ### ESX
For details on running Frigate under ESX, see details [here](https://github.com/blakeblackshear/frigate/issues/305).
For details on running Frigate under ESX, see details [here](https://github.com/blakeblackshear/frigate/issues/305).

View File

@ -22,7 +22,7 @@ Accepts the following query string parameters:
| `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) | | `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) |
| `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) | | `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) |
You can access a higher resolution mjpeg stream by appending `h=height-in-pixels` to the endpoint. For example `http://localhost:5000/back?h=1080`. You can also increase the FPS by appending `fps=frame-rate` to the URL such as `http://localhost:5000/back?fps=10` or both with `?fps=10&h=1000`. You can access a higher resolution mjpeg stream by appending `h=height-in-pixels` to the endpoint. For example `http://localhost:5000/api/back?h=1080`. You can also increase the FPS by appending `fps=frame-rate` to the URL such as `http://localhost:5000/api/back?fps=10` or both with `?fps=10&h=1000`.
### `GET /api/<camera_name>/<object_name>/best.jpg[?h=300&crop=1]` ### `GET /api/<camera_name>/<object_name>/best.jpg[?h=300&crop=1]`
@ -55,7 +55,7 @@ Example parameters:
### `GET /api/stats` ### `GET /api/stats`
Contains some granular debug info that can be used for sensors in HomeAssistant. Contains some granular debug info that can be used for sensors in Home Assistant.
Sample response: Sample response:
@ -176,7 +176,7 @@ Events from the database. Accepts the following query string parameters:
### `GET /api/events/summary` ### `GET /api/events/summary`
Returns summary data for events in the database. Used by the HomeAssistant integration. Returns summary data for events in the database. Used by the Home Assistant integration.
### `GET /api/events/<id>` ### `GET /api/events/<id>`

View File

@ -4,7 +4,7 @@ title: Integration with Home Assistant
sidebar_label: Home Assistant sidebar_label: Home Assistant
--- ---
The best way to integrate with HomeAssistant is to use the [official integration](https://github.com/blakeblackshear/frigate-hass-integration). When configuring the integration, you will be asked for the `Host` of your frigate instance. This value should be the url you use to access Frigate in the browser and will look like `http://<host>:5000/`. If you are using HassOS with the addon, the host should be `http://ccab4aaf-frigate:5000` (or `http://ccab4aaf-frigate-beta:5000` if your are using the beta version of the addon). HomeAssistant needs access to port 5000 (api) and 1935 (rtmp) for all features. The integration will setup the following entities within HomeAssistant: The best way to integrate with Home Assistant is to use the [official integration](https://github.com/blakeblackshear/frigate-hass-integration). When configuring the integration, you will be asked for the `Host` of your frigate instance. This value should be the url you use to access Frigate in the browser and will look like `http://<host>:5000/`. If you are using HassOS with the addon, the host should be `http://ccab4aaf-frigate:5000` (or `http://ccab4aaf-frigate-beta:5000` if your are using the beta version of the addon). Home Assistant needs access to port 5000 (api) and 1935 (rtmp) for all features. The integration will setup the following entities within Home Assistant:
## Sensors: ## Sensors:
@ -30,17 +30,19 @@ The best way to integrate with HomeAssistant is to use the [official integration
Frigate publishes event information in the form of a change feed via MQTT. This allows lots of customization for notifications to meet your needs. Event changes are published with `before` and `after` information as shown [here](#frigateevents). Frigate publishes event information in the form of a change feed via MQTT. This allows lots of customization for notifications to meet your needs. Event changes are published with `before` and `after` information as shown [here](#frigateevents).
Note that some people may not want to expose frigate to the web, so you can leverage the HA API that frigate custom_integration ties into (which is exposed to the web, and thus can be used for mobile notifications etc): Note that some people may not want to expose frigate to the web, so you can leverage the HA API that frigate custom_integration ties into (which is exposed to the web, and thus can be used for mobile notifications etc):
To load an image taken by frigate from HomeAssistants API see below: To load an image taken by frigate from Home Assistants API see below:
```
```
https://HA_URL/api/frigate/notifications/<event-id>/thumbnail.jpg https://HA_URL/api/frigate/notifications/<event-id>/thumbnail.jpg
``` ```
To load a video clip taken by frigate from HomeAssistants API : To load a video clip taken by frigate from Home Assistants API :
```
```
https://HA_URL/api/frigate/notifications/<event-id>/<camera>/clip.mp4 https://HA_URL/api/frigate/notifications/<event-id>/<camera>/clip.mp4
``` ```
Here is a simple example of a notification automation of events which will update the existing notification for each change. This means the image you see in the notification will update as frigate finds a "better" image. Here is a simple example of a notification automation of events which will update the existing notification for each change. This means the image you see in the notification will update as frigate finds a "better" image.
```yaml ```yaml
automation: automation:
@ -57,7 +59,6 @@ automation:
tag: '{{trigger.payload_json["after"]["id"]}}' tag: '{{trigger.payload_json["after"]["id"]}}'
``` ```
```yaml ```yaml
automation: automation:
- alias: When a person enters a zone named yard - alias: When a person enters a zone named yard
@ -106,7 +107,7 @@ automation:
action: action:
- service: notify.mobile_app_pixel_3 - service: notify.mobile_app_pixel_3
data_template: data_template:
message: 'High confidence dog detection.' message: "High confidence dog detection."
data: data:
image: "https://url.com/api/frigate/notifications/{{trigger.payload_json['after']['id']}}/thumbnail.jpg" image: "https://url.com/api/frigate/notifications/{{trigger.payload_json['after']['id']}}/thumbnail.jpg"
tag: "{{trigger.payload_json['after']['id']}}" tag: "{{trigger.payload_json['after']['id']}}"

11
docs/docs/usage/howtos.md Normal file
View File

@ -0,0 +1,11 @@
---
id: howtos
title: Community Guides
sidebar_label: Community Guides
---
## Communitiy Guides/How-To's
- Best Camera AI Person & Object Detection - How to Setup Frigate w/ Home Assistant - digiblurDIY [YouTube](https://youtu.be/V8vGdoYO6-Y) - [Article](https://www.digiblur.com/2021/05/how-to-setup-frigate-home-assistant.html)
- Even More Free Local Object Detection with Home Assistant - Frigate Install - Everything Smart Home [YouTube](https://youtu.be/pqDCEZSVeRk)
- Home Assistant Frigate integration for local image recognition - KPeyanski [YouTube](https://youtu.be/Q2UT78lFQpo) - [Article](https://peyanski.com/home-assistant-frigate-integration/)

View File

@ -7,17 +7,17 @@ These are the MQTT messages generated by Frigate. The default topic_prefix is `f
### `frigate/available` ### `frigate/available`
Designed to be used as an availability topic with HomeAssistant. Possible message are: Designed to be used as an availability topic with Home Assistant. Possible message are:
"online": published when frigate is running (on startup) "online": published when frigate is running (on startup)
"offline": published right before frigate stops "offline": published right before frigate stops
### `frigate/<camera_name>/<object_name>` ### `frigate/<camera_name>/<object_name>`
Publishes the count of objects for the camera for use as a sensor in HomeAssistant. Publishes the count of objects for the camera for use as a sensor in Home Assistant.
### `frigate/<zone_name>/<object_name>` ### `frigate/<zone_name>/<object_name>`
Publishes the count of objects for the zone for use as a sensor in HomeAssistant. Publishes the count of objects for the zone for use as a sensor in Home Assistant.
### `frigate/<camera_name>/<object_name>/snapshot` ### `frigate/<camera_name>/<object_name>/snapshot`

View File

@ -1,3 +1,5 @@
const path = require('path');
module.exports = { module.exports = {
title: 'Frigate', title: 'Frigate',
tagline: 'NVR With Realtime Object Detection for IP Cameras', tagline: 'NVR With Realtime Object Detection for IP Cameras',
@ -11,7 +13,7 @@ module.exports = {
themeConfig: { themeConfig: {
algolia: { algolia: {
apiKey: '81ec882db78f7fed05c51daf973f0362', apiKey: '81ec882db78f7fed05c51daf973f0362',
indexName: 'frigate' indexName: 'frigate',
}, },
navbar: { navbar: {
title: 'Frigate', title: 'Frigate',
@ -56,6 +58,7 @@ module.exports = {
copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`, copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`,
}, },
}, },
plugins: [path.resolve(__dirname, 'plugins', 'raw-loader')],
presets: [ presets: [
[ [
'@docusaurus/preset-classic', '@docusaurus/preset-classic',

21
docs/package-lock.json generated
View File

@ -9864,6 +9864,27 @@
"unpipe": "1.0.0" "unpipe": "1.0.0"
} }
}, },
"raw-loader": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/raw-loader/-/raw-loader-4.0.2.tgz",
"integrity": "sha512-ZnScIV3ag9A4wPX/ZayxL/jZH+euYb6FcUinPcgiQW0+UBtEv0O6Q3lGd3cqJ+GHH+rksEv3Pj99oxJ3u3VIKA==",
"requires": {
"loader-utils": "^2.0.0",
"schema-utils": "^3.0.0"
},
"dependencies": {
"schema-utils": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.0.0.tgz",
"integrity": "sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA==",
"requires": {
"@types/json-schema": "^7.0.6",
"ajv": "^6.12.5",
"ajv-keywords": "^3.5.2"
}
}
}
},
"rc": { "rc": {
"version": "1.2.8", "version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",

View File

@ -16,6 +16,7 @@
"@docusaurus/preset-classic": "2.0.0-alpha.70", "@docusaurus/preset-classic": "2.0.0-alpha.70",
"@mdx-js/react": "^1.6.21", "@mdx-js/react": "^1.6.21",
"clsx": "^1.1.1", "clsx": "^1.1.1",
"raw-loader": "^4.0.2",
"react": "^16.8.4", "react": "^16.8.4",
"react-dom": "^16.8.4" "react-dom": "^16.8.4"
}, },

View File

@ -0,0 +1,12 @@
module.exports = function (context, options) {
return {
name: 'labelmap',
configureWebpack(config, isServer, utils) {
return {
module: {
rules: [{ test: /\.txt$/, use: 'raw-loader' }],
},
};
},
};
};

View File

@ -7,6 +7,7 @@ module.exports = {
'configuration/optimizing', 'configuration/optimizing',
'configuration/detectors', 'configuration/detectors',
'configuration/false_positives', 'configuration/false_positives',
'configuration/objects',
'configuration/advanced', 'configuration/advanced',
], ],
Usage: ['usage/home-assistant', 'usage/web', 'usage/api', 'usage/mqtt'], Usage: ['usage/home-assistant', 'usage/web', 'usage/api', 'usage/mqtt'],

94
web/package-lock.json generated
View File

@ -4747,16 +4747,42 @@
"dev": true "dev": true
}, },
"browserslist": { "browserslist": {
"version": "4.16.1", "version": "4.16.6",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.1.tgz", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.6.tgz",
"integrity": "sha512-UXhDrwqsNcpTYJBTZsbGATDxZbiVDsx6UjpmRUmtnP10pr8wAYr5LgFoEFw9ixriQH2mv/NX2SfGzE/o8GndLA==", "integrity": "sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ==",
"dev": true, "dev": true,
"requires": { "requires": {
"caniuse-lite": "^1.0.30001173", "caniuse-lite": "^1.0.30001219",
"colorette": "^1.2.1", "colorette": "^1.2.2",
"electron-to-chromium": "^1.3.634", "electron-to-chromium": "^1.3.723",
"escalade": "^3.1.1", "escalade": "^3.1.1",
"node-releases": "^1.1.69" "node-releases": "^1.1.71"
},
"dependencies": {
"caniuse-lite": {
"version": "1.0.30001230",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001230.tgz",
"integrity": "sha512-5yBd5nWCBS+jWKTcHOzXwo5xzcj4ePE/yjtkZyUV1BTUmrBaA9MRGC+e7mxnqXSA90CmCA8L3eKLaSUkt099IQ==",
"dev": true
},
"colorette": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz",
"integrity": "sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==",
"dev": true
},
"electron-to-chromium": {
"version": "1.3.739",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.739.tgz",
"integrity": "sha512-+LPJVRsN7hGZ9EIUUiWCpO7l4E3qBYHNadazlucBfsXBbccDFNKUBAgzE68FnkWGJPwD/AfKhSzL+G+Iqb8A4A==",
"dev": true
},
"node-releases": {
"version": "1.1.72",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.72.tgz",
"integrity": "sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw==",
"dev": true
}
} }
}, },
"bser": { "bser": {
@ -5435,12 +5461,6 @@
"safer-buffer": "^2.1.0" "safer-buffer": "^2.1.0"
} }
}, },
"electron-to-chromium": {
"version": "1.3.641",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.641.tgz",
"integrity": "sha512-b0DLhsHSHESC1I+Nx6n4w4Lr61chMd3m/av1rZQhS2IXTzaS5BMM5N+ldWdMIlni9CITMRM09m8He4+YV/92TA==",
"dev": true
},
"emittery": { "emittery": {
"version": "0.7.2", "version": "0.7.2",
"resolved": "https://registry.npmjs.org/emittery/-/emittery-0.7.2.tgz", "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.7.2.tgz",
@ -6680,9 +6700,9 @@
"dev": true "dev": true
}, },
"hosted-git-info": { "hosted-git-info": {
"version": "2.8.8", "version": "2.8.9",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
"integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
"dev": true "dev": true
}, },
"html-encoding-sniffer": { "html-encoding-sniffer": {
@ -8831,9 +8851,9 @@
} }
}, },
"lodash": { "lodash": {
"version": "4.17.20", "version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"dev": true "dev": true
}, },
"lodash.difference": { "lodash.difference": {
@ -9145,9 +9165,9 @@
} }
}, },
"nanoid": { "nanoid": {
"version": "3.1.20", "version": "3.1.23",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.23.tgz",
"integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", "integrity": "sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==",
"dev": true "dev": true
}, },
"nanomatch": { "nanomatch": {
@ -9223,12 +9243,6 @@
"which": "^2.0.2" "which": "^2.0.2"
} }
}, },
"node-releases": {
"version": "1.1.69",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.69.tgz",
"integrity": "sha512-DGIjo79VDEyAnRlfSqYTsy+yoHd2IOjJiKUozD2MV2D85Vso6Bug56mb9tT/fY5Urt0iqk01H7x+llAruDR2zA==",
"dev": true
},
"normalize-package-data": { "normalize-package-data": {
"version": "2.5.0", "version": "2.5.0",
"resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
@ -9665,20 +9679,20 @@
"dev": true "dev": true
}, },
"postcss": { "postcss": {
"version": "8.2.2", "version": "8.2.10",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.2.tgz", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.10.tgz",
"integrity": "sha512-HM1NDNWLgglJPQQMNwvLxgH2KcrKZklKLi/xXYIOaqQB57p/pDWEJNS83PVICYsn1Dg/9C26TiejNr422/ePaQ==", "integrity": "sha512-b/h7CPV7QEdrqIxtAf2j31U5ef05uBDuvoXv6L51Q4rcS1jdlXAVKJv+atCFdUXYl9dyTHGyoMzIepwowRJjFw==",
"dev": true, "dev": true,
"requires": { "requires": {
"colorette": "^1.2.1", "colorette": "^1.2.2",
"nanoid": "^3.1.20", "nanoid": "^3.1.22",
"source-map": "^0.6.1" "source-map": "^0.6.1"
}, },
"dependencies": { "dependencies": {
"source-map": { "colorette": {
"version": "0.6.1", "version": "1.2.2",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "integrity": "sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==",
"dev": true "dev": true
} }
} }
@ -12115,9 +12129,9 @@
} }
}, },
"ws": { "ws": {
"version": "7.4.3", "version": "7.4.6",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.3.tgz", "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz",
"integrity": "sha512-hr6vCR76GsossIRsr8OLR9acVVm1jyfEWvhbNjtgPOrfvAlKzvyeg/P6r8RuDjRyrcQoPQT7K0DGEPc7Ae6jzA==", "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==",
"dev": true "dev": true
}, },
"xml-name-validator": { "xml-name-validator": {

View File

@ -40,7 +40,7 @@
"eslint-plugin-jest": "^24.1.3", "eslint-plugin-jest": "^24.1.3",
"eslint-plugin-testing-library": "^3.10.1", "eslint-plugin-testing-library": "^3.10.1",
"jest": "^26.6.3", "jest": "^26.6.3",
"postcss": "^8.2.2", "postcss": "^8.2.10",
"postcss-cli": "^8.3.1", "postcss-cli": "^8.3.1",
"prettier": "^2.2.1", "prettier": "^2.2.1",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",