2023-10-24 04:33:52 +02:00
|
|
|
import argparse
|
2023-06-11 14:23:18 +02:00
|
|
|
import datetime
|
2020-11-04 13:28:07 +01:00
|
|
|
import logging
|
|
|
|
import multiprocessing as mp
|
|
|
|
import os
|
2024-05-18 18:36:13 +02:00
|
|
|
import secrets
|
2023-01-31 00:42:23 +01:00
|
|
|
import shutil
|
2021-06-14 14:31:13 +02:00
|
|
|
import signal
|
|
|
|
import sys
|
2023-05-29 12:31:17 +02:00
|
|
|
import traceback
|
2023-07-16 14:42:56 +02:00
|
|
|
from multiprocessing import Queue
|
2023-05-29 12:31:17 +02:00
|
|
|
from multiprocessing.synchronize import Event as MpEvent
|
2022-04-16 17:40:04 +02:00
|
|
|
from types import FrameType
|
2023-05-29 12:31:17 +02:00
|
|
|
from typing import Optional
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
import psutil
|
2020-12-24 14:47:27 +01:00
|
|
|
from peewee_migrate import Router
|
2020-11-04 13:28:07 +01:00
|
|
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
2021-01-24 13:53:01 +01:00
|
|
|
from playhouse.sqliteq import SqliteQueueDatabase
|
2024-05-08 16:46:31 +02:00
|
|
|
from pydantic import ValidationError
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2024-03-02 23:10:37 +01:00
|
|
|
from frigate.api.app import create_app
|
2024-05-18 18:36:13 +02:00
|
|
|
from frigate.api.auth import hash_password
|
2024-02-19 14:26:59 +01:00
|
|
|
from frigate.comms.config_updater import ConfigPublisher
|
|
|
|
from frigate.comms.detections_updater import DetectionProxy
|
2022-11-24 03:03:20 +01:00
|
|
|
from frigate.comms.dispatcher import Communicator, Dispatcher
|
2023-07-14 02:52:33 +02:00
|
|
|
from frigate.comms.inter_process import InterProcessCommunicator
|
2022-11-24 03:03:20 +01:00
|
|
|
from frigate.comms.mqtt import MqttClient
|
|
|
|
from frigate.comms.ws import WebSocketClient
|
2024-05-18 18:36:13 +02:00
|
|
|
from frigate.config import AuthModeEnum, FrigateConfig
|
2023-04-30 20:32:36 +02:00
|
|
|
from frigate.const import (
|
|
|
|
CACHE_DIR,
|
|
|
|
CLIPS_DIR,
|
|
|
|
CONFIG_DIR,
|
|
|
|
DEFAULT_DB_PATH,
|
2023-06-08 13:32:35 +02:00
|
|
|
EXPORT_DIR,
|
2023-04-30 20:32:36 +02:00
|
|
|
MODEL_CACHE_DIR,
|
|
|
|
RECORD_DIR,
|
|
|
|
)
|
2023-07-01 15:18:33 +02:00
|
|
|
from frigate.events.audio import listen_to_audio
|
2023-05-19 12:16:11 +02:00
|
|
|
from frigate.events.cleanup import EventCleanup
|
|
|
|
from frigate.events.external import ExternalEventProcessor
|
|
|
|
from frigate.events.maintainer import EventProcessor
|
2020-11-04 13:28:07 +01:00
|
|
|
from frigate.log import log_process, root_configurer
|
2023-12-03 15:16:01 +01:00
|
|
|
from frigate.models import (
|
|
|
|
Event,
|
2024-04-20 00:11:41 +02:00
|
|
|
Export,
|
2023-12-03 15:16:01 +01:00
|
|
|
Previews,
|
|
|
|
Recordings,
|
|
|
|
RecordingsToDelete,
|
|
|
|
Regions,
|
2024-02-21 00:26:09 +01:00
|
|
|
ReviewSegment,
|
2023-12-03 15:16:01 +01:00
|
|
|
Timeline,
|
2024-05-18 18:36:13 +02:00
|
|
|
User,
|
2023-12-03 15:16:01 +01:00
|
|
|
)
|
2023-05-29 12:31:17 +02:00
|
|
|
from frigate.object_detection import ObjectDetectProcess
|
2020-11-04 13:28:07 +01:00
|
|
|
from frigate.object_processing import TrackedObjectProcessor
|
2023-12-03 15:16:01 +01:00
|
|
|
from frigate.output.output import output_frames
|
2022-04-03 22:00:11 +02:00
|
|
|
from frigate.plus import PlusApi
|
2023-07-08 14:04:47 +02:00
|
|
|
from frigate.ptz.autotrack import PtzAutoTrackerThread
|
|
|
|
from frigate.ptz.onvif import OnvifController
|
2023-07-26 12:55:08 +02:00
|
|
|
from frigate.record.cleanup import RecordingCleanup
|
2024-04-20 00:11:41 +02:00
|
|
|
from frigate.record.export import migrate_exports
|
2023-04-26 15:25:26 +02:00
|
|
|
from frigate.record.record import manage_recordings
|
2024-02-21 00:26:09 +01:00
|
|
|
from frigate.review.review import manage_review_segments
|
2024-02-21 21:10:28 +01:00
|
|
|
from frigate.stats.emitter import StatsEmitter
|
|
|
|
from frigate.stats.util import stats_init
|
2022-10-09 13:28:26 +02:00
|
|
|
from frigate.storage import StorageMaintainer
|
2023-04-23 17:45:19 +02:00
|
|
|
from frigate.timeline import TimelineProcessor
|
2024-02-19 14:26:59 +01:00
|
|
|
from frigate.types import CameraMetricsTypes, PTZMetricsTypes
|
2024-06-07 01:54:38 +02:00
|
|
|
from frigate.util.builtin import empty_and_close_queue, save_default_config
|
2024-04-13 14:08:20 +02:00
|
|
|
from frigate.util.config import migrate_frigate_config
|
2023-10-19 01:21:52 +02:00
|
|
|
from frigate.util.object import get_camera_regions_grid
|
2021-09-14 05:02:23 +02:00
|
|
|
from frigate.version import VERSION
|
2020-11-04 13:28:07 +01:00
|
|
|
from frigate.video import capture_camera, track_camera
|
|
|
|
from frigate.watchdog import FrigateWatchdog
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
class FrigateApp:
|
2022-04-16 17:40:04 +02:00
|
|
|
def __init__(self) -> None:
|
2022-11-19 14:16:33 +01:00
|
|
|
self.stop_event: MpEvent = mp.Event()
|
2023-07-16 14:42:56 +02:00
|
|
|
self.detection_queue: Queue = mp.Queue()
|
2022-11-04 03:23:09 +01:00
|
|
|
self.detectors: dict[str, ObjectDetectProcess] = {}
|
2022-11-19 14:16:33 +01:00
|
|
|
self.detection_out_events: dict[str, MpEvent] = {}
|
2022-04-16 17:40:04 +02:00
|
|
|
self.detection_shms: list[mp.shared_memory.SharedMemory] = []
|
2023-07-16 14:42:56 +02:00
|
|
|
self.log_queue: Queue = mp.Queue()
|
2022-04-16 17:40:04 +02:00
|
|
|
self.plus_api = PlusApi()
|
|
|
|
self.camera_metrics: dict[str, CameraMetricsTypes] = {}
|
2023-07-11 13:23:20 +02:00
|
|
|
self.ptz_metrics: dict[str, PTZMetricsTypes] = {}
|
2023-05-05 00:58:59 +02:00
|
|
|
self.processes: dict[str, int] = {}
|
2023-10-19 01:21:52 +02:00
|
|
|
self.region_grids: dict[str, list[list[dict[str, int]]]] = {}
|
2022-04-16 17:40:04 +02:00
|
|
|
|
|
|
|
def set_environment_vars(self) -> None:
|
2021-01-16 04:33:53 +01:00
|
|
|
for key, value in self.config.environment_vars.items():
|
|
|
|
os.environ[key] = value
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def ensure_dirs(self) -> None:
|
2023-06-08 13:32:35 +02:00
|
|
|
for d in [
|
|
|
|
CONFIG_DIR,
|
|
|
|
RECORD_DIR,
|
2024-06-09 20:45:26 +02:00
|
|
|
f"{CLIPS_DIR}/cache",
|
2023-06-08 13:32:35 +02:00
|
|
|
CACHE_DIR,
|
|
|
|
MODEL_CACHE_DIR,
|
|
|
|
EXPORT_DIR,
|
|
|
|
]:
|
2020-12-01 14:22:23 +01:00
|
|
|
if not os.path.exists(d) and not os.path.islink(d):
|
2020-12-03 15:01:22 +01:00
|
|
|
logger.info(f"Creating directory: {d}")
|
2020-12-01 14:22:23 +01:00
|
|
|
os.makedirs(d)
|
2020-12-03 15:01:22 +01:00
|
|
|
else:
|
|
|
|
logger.debug(f"Skipping directory: {d}")
|
2020-12-21 14:37:42 +01:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def init_logger(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.log_process = mp.Process(
|
|
|
|
target=log_process, args=(self.log_queue,), name="log_process"
|
|
|
|
)
|
2020-12-05 18:14:18 +01:00
|
|
|
self.log_process.daemon = True
|
2020-11-04 13:28:07 +01:00
|
|
|
self.log_process.start()
|
2023-05-05 00:58:59 +02:00
|
|
|
self.processes["logger"] = self.log_process.pid or 0
|
2020-11-04 13:28:07 +01:00
|
|
|
root_configurer(self.log_queue)
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def init_config(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
2021-12-12 16:27:05 +01:00
|
|
|
|
|
|
|
# Check if we can use .yaml instead of .yml
|
|
|
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
|
|
|
if os.path.isfile(config_file_yaml):
|
|
|
|
config_file = config_file_yaml
|
|
|
|
|
2024-04-05 15:39:43 +02:00
|
|
|
if not os.path.isfile(config_file):
|
|
|
|
print("No config file found, saving default config")
|
|
|
|
config_file = config_file_yaml
|
|
|
|
save_default_config(config_file)
|
|
|
|
|
2024-04-13 14:08:20 +02:00
|
|
|
# check if the config file needs to be migrated
|
|
|
|
migrate_frigate_config(config_file)
|
|
|
|
|
2021-06-24 07:45:27 +02:00
|
|
|
user_config = FrigateConfig.parse_file(config_file)
|
2023-04-30 20:32:36 +02:00
|
|
|
self.config = user_config.runtime_config(self.plus_api)
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-07-15 02:05:14 +02:00
|
|
|
for camera_name in self.config.cameras.keys():
|
2020-11-04 13:28:07 +01:00
|
|
|
# create camera_metrics
|
|
|
|
self.camera_metrics[camera_name] = {
|
2023-07-01 14:47:16 +02:00
|
|
|
"camera_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"skipped_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"process_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
"detection_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"detection_frame": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"read_start": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"ffmpeg_pid": mp.Value("i", 0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
2023-07-08 13:46:31 +02:00
|
|
|
"frame_queue": mp.Queue(maxsize=2),
|
2022-04-16 17:40:04 +02:00
|
|
|
"capture_process": None,
|
|
|
|
"process": None,
|
2023-10-13 13:17:41 +02:00
|
|
|
"audio_rms": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
"audio_dBFS": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
2020-11-04 13:28:07 +01:00
|
|
|
}
|
2023-07-11 13:23:20 +02:00
|
|
|
self.ptz_metrics[camera_name] = {
|
|
|
|
"ptz_autotracker_enabled": mp.Value( # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"i",
|
|
|
|
self.config.cameras[camera_name].onvif.autotracking.enabled,
|
|
|
|
),
|
2023-11-02 00:20:26 +01:00
|
|
|
"ptz_tracking_active": mp.Event(),
|
|
|
|
"ptz_motor_stopped": mp.Event(),
|
2023-07-13 12:32:51 +02:00
|
|
|
"ptz_reset": mp.Event(),
|
2023-07-11 13:23:20 +02:00
|
|
|
"ptz_start_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"ptz_stop_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
2023-09-27 13:19:10 +02:00
|
|
|
"ptz_frame_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"ptz_zoom_level": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
2023-10-22 18:59:13 +02:00
|
|
|
"ptz_max_zoom": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
|
|
|
"ptz_min_zoom": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
|
|
|
# issue https://github.com/python/typeshed/issues/8799
|
|
|
|
# from mypy 0.981 onwards
|
2023-07-11 13:23:20 +02:00
|
|
|
}
|
2023-11-02 00:20:26 +01:00
|
|
|
self.ptz_metrics[camera_name]["ptz_motor_stopped"].set()
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def set_log_levels(self) -> None:
|
2021-06-24 07:45:27 +02:00
|
|
|
logging.getLogger().setLevel(self.config.logger.default.value.upper())
|
2020-12-04 13:59:03 +01:00
|
|
|
for log, level in self.config.logger.logs.items():
|
2021-06-24 07:45:27 +02:00
|
|
|
logging.getLogger(log).setLevel(level.value.upper())
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if "werkzeug" not in self.config.logger.logs:
|
2021-06-14 14:31:13 +02:00
|
|
|
logging.getLogger("werkzeug").setLevel("ERROR")
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-05-29 12:31:17 +02:00
|
|
|
if "ws4py" not in self.config.logger.logs:
|
2023-01-31 00:42:53 +01:00
|
|
|
logging.getLogger("ws4py").setLevel("ERROR")
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def init_queues(self) -> None:
|
2020-11-04 13:28:07 +01:00
|
|
|
# Queue for cameras to push tracked objects to
|
2023-07-08 13:46:31 +02:00
|
|
|
self.detected_frames_queue: Queue = mp.Queue(
|
2023-07-14 13:29:11 +02:00
|
|
|
maxsize=sum(camera.enabled for camera in self.config.cameras.values()) * 2
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
# Queue for timeline events
|
2023-07-16 14:42:56 +02:00
|
|
|
self.timeline_queue: Queue = mp.Queue()
|
2023-04-23 17:45:19 +02:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def init_database(self) -> None:
|
2023-06-11 14:23:18 +02:00
|
|
|
def vacuum_db(db: SqliteExtDatabase) -> None:
|
2024-01-26 14:18:29 +01:00
|
|
|
logger.info("Running database vacuum")
|
2023-06-11 14:23:18 +02:00
|
|
|
db.execute_sql("VACUUM;")
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(f"{CONFIG_DIR}/.vacuum", "w") as f:
|
|
|
|
f.write(str(datetime.datetime.now().timestamp()))
|
|
|
|
except PermissionError:
|
|
|
|
logger.error("Unable to write to /config to save DB state")
|
|
|
|
|
2023-11-06 14:43:26 +01:00
|
|
|
def cleanup_timeline_db(db: SqliteExtDatabase) -> None:
|
|
|
|
db.execute_sql(
|
|
|
|
"DELETE FROM timeline WHERE source_id NOT IN (SELECT id FROM event);"
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(f"{CONFIG_DIR}/.timeline", "w") as f:
|
|
|
|
f.write(str(datetime.datetime.now().timestamp()))
|
|
|
|
except PermissionError:
|
|
|
|
logger.error("Unable to write to /config to save DB state")
|
|
|
|
|
2021-06-07 03:24:36 +02:00
|
|
|
# Migrate DB location
|
2023-04-23 18:35:40 +02:00
|
|
|
old_db_path = DEFAULT_DB_PATH
|
2021-06-07 03:24:36 +02:00
|
|
|
if not os.path.isfile(self.config.database.path) and os.path.isfile(
|
|
|
|
old_db_path
|
|
|
|
):
|
|
|
|
os.rename(old_db_path, self.config.database.path)
|
|
|
|
|
|
|
|
# Migrate DB schema
|
2021-01-24 13:53:01 +01:00
|
|
|
migrate_db = SqliteExtDatabase(self.config.database.path)
|
2020-12-24 14:47:27 +01:00
|
|
|
|
|
|
|
# Run migrations
|
2021-02-17 14:23:32 +01:00
|
|
|
del logging.getLogger("peewee_migrate").handlers[:]
|
2021-01-24 13:53:01 +01:00
|
|
|
router = Router(migrate_db)
|
2024-04-20 15:55:51 +02:00
|
|
|
|
|
|
|
if len(router.diff) > 0:
|
|
|
|
logger.info("Making backup of DB before migrations...")
|
|
|
|
shutil.copyfile(
|
|
|
|
self.config.database.path,
|
|
|
|
self.config.database.path.replace("frigate.db", "backup.db"),
|
|
|
|
)
|
|
|
|
|
2020-12-24 14:47:27 +01:00
|
|
|
router.run()
|
|
|
|
|
2023-11-06 14:43:26 +01:00
|
|
|
# this is a temporary check to clean up user DB from beta
|
|
|
|
# will be removed before final release
|
|
|
|
if not os.path.exists(f"{CONFIG_DIR}/.timeline"):
|
|
|
|
cleanup_timeline_db(migrate_db)
|
|
|
|
|
2023-06-11 14:23:18 +02:00
|
|
|
# check if vacuum needs to be run
|
|
|
|
if os.path.exists(f"{CONFIG_DIR}/.vacuum"):
|
|
|
|
with open(f"{CONFIG_DIR}/.vacuum") as f:
|
|
|
|
try:
|
2023-06-15 00:19:26 +02:00
|
|
|
timestamp = round(float(f.readline()))
|
2023-06-11 14:23:18 +02:00
|
|
|
except Exception:
|
|
|
|
timestamp = 0
|
|
|
|
|
|
|
|
if (
|
|
|
|
timestamp
|
|
|
|
< (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(weeks=2)
|
|
|
|
).timestamp()
|
|
|
|
):
|
|
|
|
vacuum_db(migrate_db)
|
|
|
|
else:
|
|
|
|
vacuum_db(migrate_db)
|
|
|
|
|
2021-01-24 13:53:01 +01:00
|
|
|
migrate_db.close()
|
|
|
|
|
2023-05-05 00:58:59 +02:00
|
|
|
def init_go2rtc(self) -> None:
|
|
|
|
for proc in psutil.process_iter(["pid", "name"]):
|
|
|
|
if proc.info["name"] == "go2rtc":
|
|
|
|
logger.info(f"go2rtc process pid: {proc.info['pid']}")
|
|
|
|
self.processes["go2rtc"] = proc.info["pid"]
|
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
def init_recording_manager(self) -> None:
|
|
|
|
recording_process = mp.Process(
|
|
|
|
target=manage_recordings,
|
|
|
|
name="recording_manager",
|
2024-02-19 14:26:59 +01:00
|
|
|
args=(self.config,),
|
2023-04-26 15:25:26 +02:00
|
|
|
)
|
|
|
|
recording_process.daemon = True
|
|
|
|
self.recording_process = recording_process
|
|
|
|
recording_process.start()
|
2023-05-05 00:58:59 +02:00
|
|
|
self.processes["recording"] = recording_process.pid or 0
|
2023-04-26 15:25:26 +02:00
|
|
|
logger.info(f"Recording process started: {recording_process.pid}")
|
|
|
|
|
2024-02-21 00:26:09 +01:00
|
|
|
def init_review_segment_manager(self) -> None:
|
|
|
|
review_segment_process = mp.Process(
|
|
|
|
target=manage_review_segments,
|
|
|
|
name="review_segment_manager",
|
|
|
|
args=(self.config,),
|
|
|
|
)
|
|
|
|
review_segment_process.daemon = True
|
|
|
|
self.review_segment_process = review_segment_process
|
|
|
|
review_segment_process.start()
|
|
|
|
self.processes["review_segment"] = review_segment_process.pid or 0
|
|
|
|
logger.info(f"Recording process started: {review_segment_process.pid}")
|
|
|
|
|
2023-04-26 15:25:26 +02:00
|
|
|
def bind_database(self) -> None:
|
|
|
|
"""Bind db to the main process."""
|
|
|
|
# NOTE: all db accessing processes need to be created before the db can be bound to the main process
|
2023-06-11 14:23:18 +02:00
|
|
|
self.db = SqliteQueueDatabase(
|
|
|
|
self.config.database.path,
|
|
|
|
pragmas={
|
|
|
|
"auto_vacuum": "FULL", # Does not defragment database
|
|
|
|
"cache_size": -512 * 1000, # 512MB of cache,
|
|
|
|
"synchronous": "NORMAL", # Safe when using WAL https://www.sqlite.org/pragma.html#pragma_synchronous
|
|
|
|
},
|
2023-07-21 14:29:50 +02:00
|
|
|
timeout=max(
|
|
|
|
60, 10 * len([c for c in self.config.cameras.values() if c.enabled])
|
|
|
|
),
|
2023-06-11 14:23:18 +02:00
|
|
|
)
|
2024-02-21 00:26:09 +01:00
|
|
|
models = [
|
|
|
|
Event,
|
2024-04-20 00:11:41 +02:00
|
|
|
Export,
|
2024-02-21 00:26:09 +01:00
|
|
|
Previews,
|
|
|
|
Recordings,
|
|
|
|
RecordingsToDelete,
|
|
|
|
Regions,
|
|
|
|
ReviewSegment,
|
|
|
|
Timeline,
|
2024-05-18 18:36:13 +02:00
|
|
|
User,
|
2024-02-21 00:26:09 +01:00
|
|
|
]
|
2020-11-04 13:28:07 +01:00
|
|
|
self.db.bind(models)
|
|
|
|
|
2024-04-20 00:11:41 +02:00
|
|
|
def check_db_data_migrations(self) -> None:
|
|
|
|
# check if vacuum needs to be run
|
|
|
|
if not os.path.exists(f"{CONFIG_DIR}/.exports"):
|
|
|
|
try:
|
|
|
|
with open(f"{CONFIG_DIR}/.exports", "w") as f:
|
|
|
|
f.write(str(datetime.datetime.now().timestamp()))
|
|
|
|
except PermissionError:
|
|
|
|
logger.error("Unable to write to /config to save export state")
|
|
|
|
|
|
|
|
migrate_exports(self.config.cameras.keys())
|
|
|
|
|
2023-05-19 12:16:11 +02:00
|
|
|
def init_external_event_processor(self) -> None:
|
2024-03-23 17:11:32 +01:00
|
|
|
self.external_event_processor = ExternalEventProcessor(self.config)
|
2023-05-19 12:16:11 +02:00
|
|
|
|
2023-07-14 02:52:33 +02:00
|
|
|
def init_inter_process_communicator(self) -> None:
|
2024-02-15 01:24:36 +01:00
|
|
|
self.inter_process_communicator = InterProcessCommunicator()
|
2024-02-19 14:26:59 +01:00
|
|
|
self.inter_config_updater = ConfigPublisher()
|
|
|
|
self.inter_detection_proxy = DetectionProxy()
|
2023-07-14 02:52:33 +02:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def init_web_server(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.flask_app = create_app(
|
|
|
|
self.config,
|
|
|
|
self.db,
|
|
|
|
self.detected_frames_processor,
|
2022-11-30 02:59:56 +01:00
|
|
|
self.storage_maintainer,
|
2023-04-26 13:08:53 +02:00
|
|
|
self.onvif_controller,
|
2023-05-19 12:16:11 +02:00
|
|
|
self.external_event_processor,
|
2022-04-03 22:00:11 +02:00
|
|
|
self.plus_api,
|
2024-02-21 21:10:28 +01:00
|
|
|
self.stats_emitter,
|
2021-02-17 14:23:32 +01:00
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-04-26 13:08:53 +02:00
|
|
|
def init_onvif(self) -> None:
|
2023-07-11 13:23:20 +02:00
|
|
|
self.onvif_controller = OnvifController(self.config, self.ptz_metrics)
|
2023-04-26 13:08:53 +02:00
|
|
|
|
2022-11-24 03:03:20 +01:00
|
|
|
def init_dispatcher(self) -> None:
|
|
|
|
comms: list[Communicator] = []
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2022-11-24 03:03:20 +01:00
|
|
|
if self.config.mqtt.enabled:
|
|
|
|
comms.append(MqttClient(self.config))
|
|
|
|
|
2023-02-04 03:15:47 +01:00
|
|
|
comms.append(WebSocketClient(self.config))
|
2023-07-14 02:52:33 +02:00
|
|
|
comms.append(self.inter_process_communicator)
|
|
|
|
|
2023-04-26 13:08:53 +02:00
|
|
|
self.dispatcher = Dispatcher(
|
2023-04-26 15:25:26 +02:00
|
|
|
self.config,
|
2024-02-19 14:26:59 +01:00
|
|
|
self.inter_config_updater,
|
2023-04-26 15:25:26 +02:00
|
|
|
self.onvif_controller,
|
2023-07-11 13:23:20 +02:00
|
|
|
self.ptz_metrics,
|
2023-04-26 15:25:26 +02:00
|
|
|
comms,
|
2023-04-26 13:08:53 +02:00
|
|
|
)
|
2021-06-14 14:31:13 +02:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_detectors(self) -> None:
|
2020-11-04 13:28:07 +01:00
|
|
|
for name in self.config.cameras.keys():
|
|
|
|
self.detection_out_events[name] = mp.Event()
|
2021-06-19 14:15:02 +02:00
|
|
|
|
|
|
|
try:
|
2022-12-15 14:12:52 +01:00
|
|
|
largest_frame = max(
|
|
|
|
[
|
|
|
|
det.model.height * det.model.width * 3
|
|
|
|
for (name, det) in self.config.detectors.items()
|
|
|
|
]
|
|
|
|
)
|
2021-06-19 14:15:02 +02:00
|
|
|
shm_in = mp.shared_memory.SharedMemory(
|
2021-06-24 07:45:27 +02:00
|
|
|
name=name,
|
|
|
|
create=True,
|
2022-12-15 14:12:52 +01:00
|
|
|
size=largest_frame,
|
2021-06-19 14:15:02 +02:00
|
|
|
)
|
|
|
|
except FileExistsError:
|
|
|
|
shm_in = mp.shared_memory.SharedMemory(name=name)
|
|
|
|
|
|
|
|
try:
|
|
|
|
shm_out = mp.shared_memory.SharedMemory(
|
|
|
|
name=f"out-{name}", create=True, size=20 * 6 * 4
|
|
|
|
)
|
|
|
|
except FileExistsError:
|
|
|
|
shm_out = mp.shared_memory.SharedMemory(name=f"out-{name}")
|
|
|
|
|
2020-11-04 13:28:07 +01:00
|
|
|
self.detection_shms.append(shm_in)
|
|
|
|
self.detection_shms.append(shm_out)
|
|
|
|
|
2022-12-15 14:12:52 +01:00
|
|
|
for name, detector_config in self.config.detectors.items():
|
2022-11-04 03:23:09 +01:00
|
|
|
self.detectors[name] = ObjectDetectProcess(
|
|
|
|
name,
|
|
|
|
self.detection_queue,
|
|
|
|
self.detection_out_events,
|
2022-12-15 14:12:52 +01:00
|
|
|
detector_config,
|
2022-11-04 03:23:09 +01:00
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2023-07-08 14:04:47 +02:00
|
|
|
def start_ptz_autotracker(self) -> None:
|
|
|
|
self.ptz_autotracker_thread = PtzAutoTrackerThread(
|
|
|
|
self.config,
|
|
|
|
self.onvif_controller,
|
2023-07-11 13:23:20 +02:00
|
|
|
self.ptz_metrics,
|
2023-11-02 00:20:26 +01:00
|
|
|
self.dispatcher,
|
2023-07-08 14:04:47 +02:00
|
|
|
self.stop_event,
|
|
|
|
)
|
|
|
|
self.ptz_autotracker_thread.start()
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_detected_frames_processor(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.detected_frames_processor = TrackedObjectProcessor(
|
|
|
|
self.config,
|
2022-11-24 03:03:20 +01:00
|
|
|
self.dispatcher,
|
2021-02-17 14:23:32 +01:00
|
|
|
self.detected_frames_queue,
|
2023-07-08 14:04:47 +02:00
|
|
|
self.ptz_autotracker_thread,
|
2021-02-17 14:23:32 +01:00
|
|
|
self.stop_event,
|
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
self.detected_frames_processor.start()
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_video_output_processor(self) -> None:
|
2021-05-29 20:27:00 +02:00
|
|
|
output_processor = mp.Process(
|
|
|
|
target=output_frames,
|
2023-05-29 12:31:17 +02:00
|
|
|
name="output_processor",
|
2024-02-19 14:26:59 +01:00
|
|
|
args=(self.config,),
|
2021-05-29 20:27:00 +02:00
|
|
|
)
|
|
|
|
output_processor.daemon = True
|
|
|
|
self.output_processor = output_processor
|
|
|
|
output_processor.start()
|
2021-05-30 13:45:37 +02:00
|
|
|
logger.info(f"Output process started: {output_processor.pid}")
|
2021-05-29 20:27:00 +02:00
|
|
|
|
2023-10-19 01:21:52 +02:00
|
|
|
def init_historical_regions(self) -> None:
|
|
|
|
# delete region grids for removed or renamed cameras
|
|
|
|
cameras = list(self.config.cameras.keys())
|
|
|
|
Regions.delete().where(~(Regions.camera << cameras)).execute()
|
|
|
|
|
|
|
|
# create or update region grids for each camera
|
|
|
|
for camera in self.config.cameras.values():
|
|
|
|
self.region_grids[camera.name] = get_camera_regions_grid(
|
2023-11-06 13:45:37 +01:00
|
|
|
camera.name,
|
|
|
|
camera.detect,
|
|
|
|
max(self.config.model.width, self.config.model.height),
|
2023-10-19 01:21:52 +02:00
|
|
|
)
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_camera_processors(self) -> None:
|
2020-11-04 13:28:07 +01:00
|
|
|
for name, config in self.config.cameras.items():
|
2022-11-02 12:41:44 +01:00
|
|
|
if not self.config.cameras[name].enabled:
|
|
|
|
logger.info(f"Camera processor not started for disabled camera {name}")
|
|
|
|
continue
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
camera_process = mp.Process(
|
|
|
|
target=track_camera,
|
|
|
|
name=f"camera_processor:{name}",
|
|
|
|
args=(
|
|
|
|
name,
|
|
|
|
config,
|
2022-11-04 03:23:09 +01:00
|
|
|
self.config.model,
|
2021-07-08 05:57:19 +02:00
|
|
|
self.config.model.merged_labelmap,
|
2021-02-17 14:23:32 +01:00
|
|
|
self.detection_queue,
|
|
|
|
self.detection_out_events[name],
|
|
|
|
self.detected_frames_queue,
|
|
|
|
self.camera_metrics[name],
|
2023-07-11 13:23:20 +02:00
|
|
|
self.ptz_metrics[name],
|
2023-10-19 01:21:52 +02:00
|
|
|
self.region_grids[name],
|
2021-02-17 14:23:32 +01:00
|
|
|
),
|
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
camera_process.daemon = True
|
2021-02-17 14:23:32 +01:00
|
|
|
self.camera_metrics[name]["process"] = camera_process
|
2020-11-04 13:28:07 +01:00
|
|
|
camera_process.start()
|
|
|
|
logger.info(f"Camera processor started for {name}: {camera_process.pid}")
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_camera_capture_processes(self) -> None:
|
2020-11-04 13:28:07 +01:00
|
|
|
for name, config in self.config.cameras.items():
|
2022-11-02 12:41:44 +01:00
|
|
|
if not self.config.cameras[name].enabled:
|
|
|
|
logger.info(f"Capture process not started for disabled camera {name}")
|
|
|
|
continue
|
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
capture_process = mp.Process(
|
|
|
|
target=capture_camera,
|
|
|
|
name=f"camera_capture:{name}",
|
|
|
|
args=(name, config, self.camera_metrics[name]),
|
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
capture_process.daemon = True
|
2021-02-17 14:23:32 +01:00
|
|
|
self.camera_metrics[name]["capture_process"] = capture_process
|
2020-11-04 13:28:07 +01:00
|
|
|
capture_process.start()
|
|
|
|
logger.info(f"Capture process started for {name}: {capture_process.pid}")
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-07-01 15:18:33 +02:00
|
|
|
def start_audio_processors(self) -> None:
|
2024-06-07 01:54:38 +02:00
|
|
|
self.audio_process = None
|
2023-07-01 15:18:33 +02:00
|
|
|
if len([c for c in self.config.cameras.values() if c.audio.enabled]) > 0:
|
2024-06-07 01:54:38 +02:00
|
|
|
self.audio_process = mp.Process(
|
2023-07-01 15:18:33 +02:00
|
|
|
target=listen_to_audio,
|
|
|
|
name="audio_capture",
|
2023-07-14 02:52:33 +02:00
|
|
|
args=(
|
|
|
|
self.config,
|
2023-10-13 13:17:41 +02:00
|
|
|
self.camera_metrics,
|
2023-07-14 02:52:33 +02:00
|
|
|
),
|
2023-07-01 15:18:33 +02:00
|
|
|
)
|
2024-06-07 01:54:38 +02:00
|
|
|
self.audio_process.daemon = True
|
|
|
|
self.audio_process.start()
|
|
|
|
self.processes["audio_detector"] = self.audio_process.pid or 0
|
|
|
|
logger.info(f"Audio process started: {self.audio_process.pid}")
|
2023-07-01 15:18:33 +02:00
|
|
|
|
2023-04-23 17:45:19 +02:00
|
|
|
def start_timeline_processor(self) -> None:
|
|
|
|
self.timeline_processor = TimelineProcessor(
|
|
|
|
self.config, self.timeline_queue, self.stop_event
|
|
|
|
)
|
|
|
|
self.timeline_processor.start()
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_event_processor(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.event_processor = EventProcessor(
|
|
|
|
self.config,
|
2023-04-23 17:45:19 +02:00
|
|
|
self.timeline_queue,
|
2021-02-17 14:23:32 +01:00
|
|
|
self.stop_event,
|
|
|
|
)
|
2020-11-04 13:28:07 +01:00
|
|
|
self.event_processor.start()
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_event_cleanup(self) -> None:
|
2020-11-24 14:27:51 +01:00
|
|
|
self.event_cleanup = EventCleanup(self.config, self.stop_event)
|
|
|
|
self.event_cleanup.start()
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2023-07-26 12:55:08 +02:00
|
|
|
def start_record_cleanup(self) -> None:
|
|
|
|
self.record_cleanup = RecordingCleanup(self.config, self.stop_event)
|
|
|
|
self.record_cleanup.start()
|
|
|
|
|
2022-10-09 13:28:26 +02:00
|
|
|
def start_storage_maintainer(self) -> None:
|
|
|
|
self.storage_maintainer = StorageMaintainer(self.config, self.stop_event)
|
|
|
|
self.storage_maintainer.start()
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_stats_emitter(self) -> None:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.stats_emitter = StatsEmitter(
|
|
|
|
self.config,
|
2024-02-21 21:10:28 +01:00
|
|
|
stats_init(
|
|
|
|
self.config, self.camera_metrics, self.detectors, self.processes
|
|
|
|
),
|
2021-02-17 14:23:32 +01:00
|
|
|
self.stop_event,
|
|
|
|
)
|
2021-01-04 00:35:58 +01:00
|
|
|
self.stats_emitter.start()
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start_watchdog(self) -> None:
|
2020-11-04 13:28:07 +01:00
|
|
|
self.frigate_watchdog = FrigateWatchdog(self.detectors, self.stop_event)
|
|
|
|
self.frigate_watchdog.start()
|
|
|
|
|
2023-01-31 00:42:23 +01:00
|
|
|
def check_shm(self) -> None:
|
2023-06-11 21:49:13 +02:00
|
|
|
available_shm = round(shutil.disk_usage("/dev/shm").total / pow(2, 20), 1)
|
2023-01-31 00:42:23 +01:00
|
|
|
min_req_shm = 30
|
|
|
|
|
|
|
|
for _, camera in self.config.cameras.items():
|
|
|
|
min_req_shm += round(
|
|
|
|
(camera.detect.width * camera.detect.height * 1.5 * 9 + 270480)
|
|
|
|
/ 1048576,
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
|
|
|
|
if available_shm < min_req_shm:
|
|
|
|
logger.warning(
|
|
|
|
f"The current SHM size of {available_shm}MB is too small, recommend increasing it to at least {min_req_shm}MB."
|
|
|
|
)
|
|
|
|
|
2024-05-18 18:36:13 +02:00
|
|
|
def init_auth(self) -> None:
|
|
|
|
if self.config.auth.mode == AuthModeEnum.native:
|
|
|
|
if User.select().count() == 0:
|
|
|
|
password = secrets.token_hex(16)
|
|
|
|
password_hash = hash_password(
|
|
|
|
password, iterations=self.config.auth.hash_iterations
|
|
|
|
)
|
|
|
|
User.insert(
|
|
|
|
{
|
|
|
|
User.username: "admin",
|
|
|
|
User.password_hash: password_hash,
|
|
|
|
}
|
|
|
|
).execute()
|
|
|
|
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("*** Auth is enabled, but no users exist. ***")
|
|
|
|
logger.info("*** Created a default user: ***")
|
|
|
|
logger.info("*** User: admin ***")
|
|
|
|
logger.info(f"*** Password: {password} ***")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
elif self.config.auth.reset_admin_password:
|
|
|
|
password = secrets.token_hex(16)
|
|
|
|
password_hash = hash_password(
|
|
|
|
password, iterations=self.config.auth.hash_iterations
|
|
|
|
)
|
|
|
|
User.replace(username="admin", password_hash=password_hash).execute()
|
|
|
|
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("*** Reset admin password set in the config. ***")
|
|
|
|
logger.info(f"*** Password: {password} ***")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
logger.info("********************************************************")
|
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def start(self) -> None:
|
2023-10-24 04:33:52 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
prog="Frigate",
|
|
|
|
description="An NVR with realtime local object detection for IP cameras.",
|
|
|
|
)
|
|
|
|
parser.add_argument("--validate-config", action="store_true")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2020-11-04 13:28:07 +01:00
|
|
|
self.init_logger()
|
2021-09-14 05:02:23 +02:00
|
|
|
logger.info(f"Starting Frigate ({VERSION})")
|
2024-02-10 13:30:53 +01:00
|
|
|
|
2020-12-03 15:01:03 +01:00
|
|
|
try:
|
2023-04-30 20:32:36 +02:00
|
|
|
self.ensure_dirs()
|
2020-12-05 16:47:43 +01:00
|
|
|
try:
|
|
|
|
self.init_config()
|
|
|
|
except Exception as e:
|
2021-10-16 14:17:36 +02:00
|
|
|
print("*************************************************************")
|
|
|
|
print("*************************************************************")
|
|
|
|
print("*** Your config file is not valid! ***")
|
|
|
|
print("*** Please check the docs at ***")
|
|
|
|
print("*** https://docs.frigate.video/configuration/index ***")
|
|
|
|
print("*************************************************************")
|
|
|
|
print("*************************************************************")
|
|
|
|
print("*** Config Validation Errors ***")
|
|
|
|
print("*************************************************************")
|
2024-05-08 16:46:31 +02:00
|
|
|
if isinstance(e, ValidationError):
|
|
|
|
for error in e.errors():
|
|
|
|
location = ".".join(str(item) for item in error["loc"])
|
|
|
|
print(f"{location}: {error['msg']}")
|
|
|
|
else:
|
|
|
|
print(e)
|
|
|
|
print(traceback.format_exc())
|
2021-10-16 14:17:36 +02:00
|
|
|
print("*************************************************************")
|
|
|
|
print("*** End Config Validation Errors ***")
|
|
|
|
print("*************************************************************")
|
2020-12-05 16:47:43 +01:00
|
|
|
self.log_process.terminate()
|
|
|
|
sys.exit(1)
|
2023-10-24 04:33:52 +02:00
|
|
|
if args.validate_config:
|
|
|
|
print("*************************************************************")
|
|
|
|
print("*** Your config file is valid. ***")
|
|
|
|
print("*************************************************************")
|
|
|
|
self.log_process.terminate()
|
|
|
|
sys.exit(0)
|
2021-01-16 04:33:53 +01:00
|
|
|
self.set_environment_vars()
|
2020-12-05 16:47:43 +01:00
|
|
|
self.set_log_levels()
|
|
|
|
self.init_queues()
|
|
|
|
self.init_database()
|
2023-04-26 13:08:53 +02:00
|
|
|
self.init_onvif()
|
2023-04-26 15:25:26 +02:00
|
|
|
self.init_recording_manager()
|
2024-02-21 00:26:09 +01:00
|
|
|
self.init_review_segment_manager()
|
2023-05-05 00:58:59 +02:00
|
|
|
self.init_go2rtc()
|
2023-04-26 15:25:26 +02:00
|
|
|
self.bind_database()
|
2024-04-20 00:11:41 +02:00
|
|
|
self.check_db_data_migrations()
|
2023-07-14 02:52:33 +02:00
|
|
|
self.init_inter_process_communicator()
|
2022-11-24 03:03:20 +01:00
|
|
|
self.init_dispatcher()
|
2020-12-03 15:01:03 +01:00
|
|
|
except Exception as e:
|
2020-12-20 15:00:07 +01:00
|
|
|
print(e)
|
2020-12-03 15:01:03 +01:00
|
|
|
self.log_process.terminate()
|
|
|
|
sys.exit(1)
|
2020-11-04 13:28:07 +01:00
|
|
|
self.start_detectors()
|
2021-05-29 20:27:00 +02:00
|
|
|
self.start_video_output_processor()
|
2023-07-08 14:04:47 +02:00
|
|
|
self.start_ptz_autotracker()
|
2023-10-19 01:21:52 +02:00
|
|
|
self.init_historical_regions()
|
2020-11-04 13:28:07 +01:00
|
|
|
self.start_detected_frames_processor()
|
|
|
|
self.start_camera_processors()
|
|
|
|
self.start_camera_capture_processes()
|
2023-07-01 15:18:33 +02:00
|
|
|
self.start_audio_processors()
|
2022-11-30 02:59:56 +01:00
|
|
|
self.start_storage_maintainer()
|
2023-05-19 12:16:11 +02:00
|
|
|
self.init_external_event_processor()
|
2024-02-21 21:10:28 +01:00
|
|
|
self.start_stats_emitter()
|
2020-11-04 13:28:07 +01:00
|
|
|
self.init_web_server()
|
2023-04-23 17:45:19 +02:00
|
|
|
self.start_timeline_processor()
|
2020-11-04 13:28:07 +01:00
|
|
|
self.start_event_processor()
|
2020-11-24 14:27:51 +01:00
|
|
|
self.start_event_cleanup()
|
2023-07-26 12:55:08 +02:00
|
|
|
self.start_record_cleanup()
|
2020-11-04 13:28:07 +01:00
|
|
|
self.start_watchdog()
|
2023-01-31 00:42:23 +01:00
|
|
|
self.check_shm()
|
2024-05-18 18:36:13 +02:00
|
|
|
self.init_auth()
|
2020-12-05 18:14:18 +01:00
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
# Flask only listens for SIGINT, so we need to catch SIGTERM and send SIGINT
|
2022-04-16 17:40:04 +02:00
|
|
|
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
2024-06-07 01:54:38 +02:00
|
|
|
os.kill(os.getpid(), signal.SIGINT)
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2020-12-05 18:14:18 +01:00
|
|
|
signal.signal(signal.SIGTERM, receiveSignal)
|
|
|
|
|
2021-05-18 07:52:08 +02:00
|
|
|
try:
|
2023-09-02 12:41:46 +02:00
|
|
|
self.flask_app.run(host="127.0.0.1", port=5001, debug=False, threaded=True)
|
2021-05-18 07:52:08 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
2021-02-13 15:09:44 +01:00
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
logger.info("Flask has exited...")
|
|
|
|
|
2020-11-04 13:28:07 +01:00
|
|
|
self.stop()
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2022-04-16 17:40:04 +02:00
|
|
|
def stop(self) -> None:
|
2023-05-29 12:31:17 +02:00
|
|
|
logger.info("Stopping...")
|
2024-06-07 01:54:38 +02:00
|
|
|
|
2020-11-04 13:28:07 +01:00
|
|
|
self.stop_event.set()
|
|
|
|
|
2024-04-11 14:42:16 +02:00
|
|
|
# set an end_time on entries without an end_time before exiting
|
2024-05-09 15:20:33 +02:00
|
|
|
Event.update(
|
|
|
|
end_time=datetime.datetime.now().timestamp(), has_snapshot=False
|
|
|
|
).where(Event.end_time == None).execute()
|
2024-04-11 14:42:16 +02:00
|
|
|
ReviewSegment.update(end_time=datetime.datetime.now().timestamp()).where(
|
|
|
|
ReviewSegment.end_time == None
|
|
|
|
).execute()
|
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
# stop the audio process
|
|
|
|
if self.audio_process is not None:
|
|
|
|
self.audio_process.terminate()
|
|
|
|
self.audio_process.join()
|
|
|
|
|
|
|
|
# ensure the capture processes are done
|
|
|
|
for camera in self.camera_metrics.keys():
|
|
|
|
capture_process = self.camera_metrics[camera]["capture_process"]
|
|
|
|
if capture_process is not None:
|
|
|
|
logger.info(f"Waiting for capture process for {camera} to stop")
|
|
|
|
capture_process.terminate()
|
|
|
|
capture_process.join()
|
|
|
|
|
|
|
|
# ensure the camera processors are done
|
|
|
|
for camera in self.camera_metrics.keys():
|
|
|
|
camera_process = self.camera_metrics[camera]["process"]
|
|
|
|
if camera_process is not None:
|
|
|
|
logger.info(f"Waiting for process for {camera} to stop")
|
|
|
|
camera_process.terminate()
|
|
|
|
camera_process.join()
|
|
|
|
logger.info(f"Closing frame queue for {camera}")
|
|
|
|
frame_queue = self.camera_metrics[camera]["frame_queue"]
|
|
|
|
empty_and_close_queue(frame_queue)
|
|
|
|
|
|
|
|
# ensure the detectors are done
|
2023-02-04 15:58:45 +01:00
|
|
|
for detector in self.detectors.values():
|
|
|
|
detector.stop()
|
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
empty_and_close_queue(self.detection_queue)
|
|
|
|
logger.info("Detection queue closed")
|
|
|
|
|
|
|
|
self.detected_frames_processor.join()
|
|
|
|
empty_and_close_queue(self.detected_frames_queue)
|
|
|
|
logger.info("Detected frames queue closed")
|
|
|
|
|
|
|
|
self.timeline_processor.join()
|
|
|
|
self.event_processor.join()
|
|
|
|
empty_and_close_queue(self.timeline_queue)
|
|
|
|
logger.info("Timeline queue closed")
|
|
|
|
|
|
|
|
self.output_processor.terminate()
|
|
|
|
self.output_processor.join()
|
|
|
|
|
|
|
|
self.recording_process.terminate()
|
|
|
|
self.recording_process.join()
|
|
|
|
|
|
|
|
self.review_segment_process.terminate()
|
|
|
|
self.review_segment_process.join()
|
2023-02-04 03:15:47 +01:00
|
|
|
|
2024-03-23 17:11:32 +01:00
|
|
|
self.external_event_processor.stop()
|
2023-02-04 03:15:47 +01:00
|
|
|
self.dispatcher.stop()
|
2023-07-08 14:04:47 +02:00
|
|
|
self.ptz_autotracker_thread.join()
|
2024-06-07 01:54:38 +02:00
|
|
|
|
2020-11-24 14:27:51 +01:00
|
|
|
self.event_cleanup.join()
|
2023-07-26 12:55:08 +02:00
|
|
|
self.record_cleanup.join()
|
2021-01-04 00:35:58 +01:00
|
|
|
self.stats_emitter.join()
|
2020-11-04 13:28:07 +01:00
|
|
|
self.frigate_watchdog.join()
|
2021-02-07 15:38:35 +01:00
|
|
|
self.db.stop()
|
2020-11-04 13:28:07 +01:00
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
# Stop Communicators
|
|
|
|
self.inter_process_communicator.stop()
|
|
|
|
self.inter_config_updater.stop()
|
|
|
|
self.inter_detection_proxy.stop()
|
|
|
|
|
2020-11-04 13:28:07 +01:00
|
|
|
while len(self.detection_shms) > 0:
|
|
|
|
shm = self.detection_shms.pop()
|
|
|
|
shm.close()
|
|
|
|
shm.unlink()
|
2023-02-04 03:15:47 +01:00
|
|
|
|
2024-06-07 01:54:38 +02:00
|
|
|
self.log_process.terminate()
|
|
|
|
self.log_process.join()
|
|
|
|
|
|
|
|
os._exit(os.EX_OK)
|