blakeblackshear.frigate/config/config.example.yml

231 lines
8.1 KiB
YAML
Raw Normal View History

web_port: 5000
################
## List of detectors.
## Currently supported types: cpu, edgetpu
## EdgeTPU requires device as defined here: https://coral.ai/docs/edgetpu/multiple-edgetpu/#using-the-tensorflow-lite-python-api
################
detectors:
- type: edgetpu
device: usb
mqtt:
2019-03-30 13:58:31 +01:00
host: mqtt.server.com
topic_prefix: frigate
2020-03-01 15:14:28 +01:00
# client_id: frigate # Optional -- set to override default client id of 'frigate' if running multiple instances
# user: username # Optional
#################
## Environment variables that begin with 'FRIGATE_' may be referenced in {}.
## password: '{FRIGATE_MQTT_PASSWORD}'
#################
# password: password # Optional
################
# Global configuration for saving clips
################
save_clips:
###########
# Maximum length of time to retain video during long events.
# If an object is being tracked for longer than this amount of time, the cache
# will begin to expire and the resulting clip will be the last x seconds of the event.
###########
max_seconds: 300
#################
# Default ffmpeg args. Optional and can be overwritten per camera.
# Should work with most RTSP cameras that send h264 video
# Built from the properties below with:
# "ffmpeg" + global_args + input_args + "-i" + input + output_args
#################
# ffmpeg:
# global_args:
# - -hide_banner
# - -loglevel
# - panic
# hwaccel_args: []
# input_args:
# - -avoid_negative_ts
# - make_zero
# - -fflags
# - nobuffer
# - -flags
# - low_delay
# - -strict
# - experimental
# - -fflags
# - +genpts+discardcorrupt
# - -vsync
# - drop
# - -rtsp_transport
# - tcp
# - -stimeout
# - '5000000'
# - -use_wallclock_as_timestamps
# - '1'
# output_args:
# - -f
# - rawvideo
# - -pix_fmt
# - rgb24
####################
2020-01-15 14:28:12 +01:00
# Global object configuration. Applies to all cameras
# unless overridden at the camera levels.
# Keys must be valid labels. By default, the model uses coco (https://dl.google.com/coral/canned_models/coco_labels.txt).
# All labels from the model are reported over MQTT. These values are used to filter out false positives.
2020-09-07 19:17:42 +02:00
# min_area (optional): minimum width*height of the bounding box for the detected object
# max_area (optional): maximum width*height of the bounding box for the detected object
2020-09-17 14:26:49 +02:00
# min_score (optional): minimum score for the object to initiate tracking
2020-09-07 19:17:42 +02:00
# threshold (optional): The minimum decimal percentage for tracked object's computed score to considered a true positive
####################
objects:
2020-01-15 14:28:12 +01:00
track:
- person
filters:
person:
min_area: 5000
max_area: 100000
2020-09-07 19:17:42 +02:00
min_score: 0.5
threshold: 0.85
cameras:
back:
ffmpeg:
################
# Source passed to ffmpeg after the -i parameter. Supports anything compatible with OpenCV and FFmpeg.
# Environment variables that begin with 'FRIGATE_' may be referenced in {}
################
input: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
#################
# These values will override default values for just this camera
#################
# global_args: []
# hwaccel_args: []
# input_args: []
# output_args: []
################
## Optionally specify the resolution of the video feed. Frigate will try to auto detect if not specified
################
# height: 1280
# width: 720
2020-09-13 14:46:38 +02:00
################
## Specify the framerate of your camera
##
## NOTE: This should only be set in the event ffmpeg is unable to determine your camera's framerate
## on its own and the reported framerate for your camera in frigate is well over what is expected.
################
# fps: 5
################
2020-09-13 15:23:41 +02:00
## Optional mask. Must be the same aspect ratio as your video feed. Value is any of the following:
## - name of a file in the config directory
## - base64 encoded image prefixed with 'base64,' eg. 'base64,asfasdfasdf....'
## - polygon of x,y coordinates prefixed with 'poly,' eg. 'poly,0,900,1080,900,1080,1920,0,1920'
2020-02-22 15:59:16 +01:00
##
## The mask works by looking at the bottom center of the bounding box for the detected
## person in the image. If that pixel in the mask is a black pixel, it ignores it as a
## false positive. In my mask, the grass and driveway visible from my backdoor camera
## are white. The garage doors, sky, and trees (anywhere it would be impossible for a
## person to stand) are black.
2020-02-22 15:59:16 +01:00
##
## Masked areas are also ignored for motion detection.
################
# mask: back-mask.bmp
################
2019-07-13 14:40:53 +02:00
# Allows you to limit the framerate within frigate for cameras that do not support
# custom framerates. A value of 1 tells frigate to look at every frame, 2 every 2nd frame,
# 3 every 3rd frame, etc.
################
2019-07-02 04:20:35 +02:00
take_frame: 1
################
# The number of seconds to retain the highest scoring image for the best.jpg endpoint before allowing it
# to be replaced by a newer image. Defaults to 60 seconds.
################
best_image_timeout: 60
################
# MQTT settings
################
# mqtt:
# crop_to_region: True
# snapshot_height: 300
2020-09-15 04:03:18 +02:00
################
# Zones
################
zones:
#################
# Name of the zone
################
front_steps:
####################
# A list of x,y coordinates to define the polygon of the zone. The top
# left corner is 0,0. Can also be a comma separated string of all x,y coordinates combined.
# The same zone name can exist across multiple cameras if they have overlapping FOVs.
# An object is determined to be in the zone based on whether or not the bottom center
# of it's bounding box is within the polygon. The polygon must have at least 3 points.
# Coordinates can be generated at https://www.image-map.net/
####################
coordinates:
- 545,1077
- 747,939
- 788,805
################
# Zone level object filters. These are applied in addition to the global and camera filters
# and should be more restrictive than the global and camera filters. The global and camera
# filters are applied upstream.
################
filters:
person:
min_area: 5000
max_area: 100000
threshold: 0.8
2020-07-09 13:57:16 +02:00
################
# This will save a clip for each tracked object by frigate along with a json file that contains
# data related to the tracked object. This works by telling ffmpeg to write video segments to /cache
2020-07-12 16:33:12 +02:00
# from the video stream without re-encoding. Clips are then created by using ffmpeg to merge segments
2020-07-12 16:00:09 +02:00
# without re-encoding. The segments saved are unaltered from what frigate receives to avoid re-encoding.
2020-09-17 14:26:49 +02:00
# They do not contain bounding boxes. These are optimized to capture "false_positive" examples for improving frigate.
#
2020-09-17 14:26:49 +02:00
# NOTE: This feature does not work if you have "-vsync drop" configured in your input params.
# This will only work for camera feeds that can be copied into the mp4 container format without
# encoding such as h264. It may not work for some types of streams.
2020-07-09 13:57:16 +02:00
################
2020-07-26 01:59:04 +02:00
save_clips:
enabled: False
#########
# Number of seconds before the event to include in the clips
#########
pre_capture: 30
#########
# Objects to save clips for. Defaults to all tracked object types.
#########
# objects:
# - person
2020-07-09 13:57:16 +02:00
################
2020-02-22 15:59:16 +01:00
# Configuration for the snapshots in the debug view and mqtt
################
snapshots:
show_timestamp: True
2020-07-25 14:44:07 +02:00
draw_zones: False
2020-01-15 14:28:12 +01:00
################
# Camera level object config. If defined, this is used instead of the global config.
2020-01-15 14:28:12 +01:00
################
objects:
2020-01-15 14:28:12 +01:00
track:
- person
- car
2020-01-15 14:28:12 +01:00
filters:
person:
min_area: 5000
max_area: 100000
2020-09-07 19:17:42 +02:00
min_score: 0.5
threshold: 0.85