mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-12-19 19:06:16 +01:00
Metadata Timeline (#6194)
* Create timeline table * Fix indexes * Add other fields * Adjust schema to be less descriptive * Handle timeline queue from tracked object data * Setup timeline queue in events * Add source id for index * Add other fields * Fixes * Formatting * Store better data * Add api with filtering * Setup basic UI for timeline in events * Cleanups * Add recordings snapshot url * Start working on timeline ui * Add tooltip with info * Improve icons * Fix start time with clip * Move player logic back to clips * Make box in timeline relative coordinates * Make region relative * Get box overlay working * Remove overlay when playing again * Add disclaimer when selecting overlay points * Add docs for new apis * Fix mobile * Fix docs * Change color of bottom center box * Fix vscode formatting
This commit is contained in:
parent
3c72b96042
commit
fbaab71d78
@ -168,6 +168,16 @@ Events from the database. Accepts the following query string parameters:
|
||||
| `include_thumbnails` | int | Include thumbnails in the response (0 or 1) |
|
||||
| `in_progress` | int | Limit to events in progress (0 or 1) |
|
||||
|
||||
### `GET /api/timeline`
|
||||
|
||||
Timeline of key moments of an event(s) from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| -------------------- | ---- | --------------------------------------------- |
|
||||
| `camera` | int | Name of camera |
|
||||
| `source_id` | str | ID of tracked object |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
|
||||
### `GET /api/events/summary`
|
||||
|
||||
Returns summary data for events in the database. Used by the Home Assistant integration.
|
||||
@ -233,6 +243,10 @@ Accepts the following query string parameters, but they are only applied when an
|
||||
|
||||
Returns the snapshot image from the latest event for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/recording/<frame_time>/snapshot.png`
|
||||
|
||||
Returns the snapshot image from the specific point in that cameras recordings.
|
||||
|
||||
### `GET /clips/<camera>-<id>.jpg`
|
||||
|
||||
JPG snapshot for the given camera and event id.
|
||||
|
@ -23,13 +23,14 @@ from frigate.object_detection import ObjectDetectProcess
|
||||
from frigate.events import EventCleanup, EventProcessor
|
||||
from frigate.http import create_app
|
||||
from frigate.log import log_process, root_configurer
|
||||
from frigate.models import Event, Recordings
|
||||
from frigate.models import Event, Recordings, Timeline
|
||||
from frigate.object_processing import TrackedObjectProcessor
|
||||
from frigate.output import output_frames
|
||||
from frigate.plus import PlusApi
|
||||
from frigate.record import RecordingCleanup, RecordingMaintainer
|
||||
from frigate.stats import StatsEmitter, stats_init
|
||||
from frigate.storage import StorageMaintainer
|
||||
from frigate.timeline import TimelineProcessor
|
||||
from frigate.version import VERSION
|
||||
from frigate.video import capture_camera, track_camera
|
||||
from frigate.watchdog import FrigateWatchdog
|
||||
@ -135,6 +136,9 @@ class FrigateApp:
|
||||
# Queue for recordings info
|
||||
self.recordings_info_queue: Queue = mp.Queue()
|
||||
|
||||
# Queue for timeline events
|
||||
self.timeline_queue: Queue = mp.Queue()
|
||||
|
||||
def init_database(self) -> None:
|
||||
# Migrate DB location
|
||||
old_db_path = os.path.join(CLIPS_DIR, "frigate.db")
|
||||
@ -154,7 +158,7 @@ class FrigateApp:
|
||||
migrate_db.close()
|
||||
|
||||
self.db = SqliteQueueDatabase(self.config.database.path)
|
||||
models = [Event, Recordings]
|
||||
models = [Event, Recordings, Timeline]
|
||||
self.db.bind(models)
|
||||
|
||||
def init_stats(self) -> None:
|
||||
@ -286,12 +290,19 @@ class FrigateApp:
|
||||
capture_process.start()
|
||||
logger.info(f"Capture process started for {name}: {capture_process.pid}")
|
||||
|
||||
def start_timeline_processor(self) -> None:
|
||||
self.timeline_processor = TimelineProcessor(
|
||||
self.config, self.timeline_queue, self.stop_event
|
||||
)
|
||||
self.timeline_processor.start()
|
||||
|
||||
def start_event_processor(self) -> None:
|
||||
self.event_processor = EventProcessor(
|
||||
self.config,
|
||||
self.camera_metrics,
|
||||
self.event_queue,
|
||||
self.event_processed_queue,
|
||||
self.timeline_queue,
|
||||
self.stop_event,
|
||||
)
|
||||
self.event_processor.start()
|
||||
@ -384,6 +395,7 @@ class FrigateApp:
|
||||
self.start_storage_maintainer()
|
||||
self.init_stats()
|
||||
self.init_web_server()
|
||||
self.start_timeline_processor()
|
||||
self.start_event_processor()
|
||||
self.start_event_cleanup()
|
||||
self.start_recording_maintainer()
|
||||
|
@ -3,14 +3,14 @@ import logging
|
||||
import os
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from peewee import fn
|
||||
|
||||
from frigate.config import EventsConfig, FrigateConfig, RecordConfig
|
||||
from frigate.config import EventsConfig, FrigateConfig
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.models import Event
|
||||
from frigate.timeline import TimelineSourceEnum
|
||||
from frigate.types import CameraMetricsTypes
|
||||
|
||||
from multiprocessing.queues import Queue
|
||||
@ -48,6 +48,7 @@ class EventProcessor(threading.Thread):
|
||||
camera_processes: dict[str, CameraMetricsTypes],
|
||||
event_queue: Queue,
|
||||
event_processed_queue: Queue,
|
||||
timeline_queue: Queue,
|
||||
stop_event: MpEvent,
|
||||
):
|
||||
threading.Thread.__init__(self)
|
||||
@ -56,6 +57,7 @@ class EventProcessor(threading.Thread):
|
||||
self.camera_processes = camera_processes
|
||||
self.event_queue = event_queue
|
||||
self.event_processed_queue = event_processed_queue
|
||||
self.timeline_queue = timeline_queue
|
||||
self.events_in_process: Dict[str, Event] = {}
|
||||
self.stop_event = stop_event
|
||||
|
||||
@ -73,6 +75,16 @@ class EventProcessor(threading.Thread):
|
||||
|
||||
logger.debug(f"Event received: {event_type} {camera} {event_data['id']}")
|
||||
|
||||
self.timeline_queue.put(
|
||||
(
|
||||
camera,
|
||||
TimelineSourceEnum.tracked_object,
|
||||
event_type,
|
||||
self.events_in_process.get(event_data["id"]),
|
||||
event_data,
|
||||
)
|
||||
)
|
||||
|
||||
event_config: EventsConfig = self.config.cameras[camera].record.events
|
||||
|
||||
if event_type == "start":
|
||||
|
@ -33,7 +33,7 @@ from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
||||
from frigate.models import Event, Recordings
|
||||
from frigate.models import Event, Recordings, Timeline
|
||||
from frigate.object_processing import TrackedObject
|
||||
from frigate.stats import stats_snapshot
|
||||
from frigate.util import (
|
||||
@ -414,6 +414,42 @@ def event_thumbnail(id, max_cache_age=2592000):
|
||||
return response
|
||||
|
||||
|
||||
@bp.route("/timeline")
|
||||
def timeline():
|
||||
camera = request.args.get("camera", "all")
|
||||
source_id = request.args.get("source_id", type=str)
|
||||
limit = request.args.get("limit", 100)
|
||||
|
||||
clauses = []
|
||||
|
||||
selected_columns = [
|
||||
Timeline.timestamp,
|
||||
Timeline.camera,
|
||||
Timeline.source,
|
||||
Timeline.source_id,
|
||||
Timeline.class_type,
|
||||
Timeline.data,
|
||||
]
|
||||
|
||||
if camera != "all":
|
||||
clauses.append((Timeline.camera == camera))
|
||||
|
||||
if source_id:
|
||||
clauses.append((Timeline.source_id == source_id))
|
||||
|
||||
if len(clauses) == 0:
|
||||
clauses.append((True))
|
||||
|
||||
timeline = (
|
||||
Timeline.select(*selected_columns)
|
||||
.where(reduce(operator.and_, clauses))
|
||||
.order_by(Timeline.timestamp.asc())
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
return jsonify([model_to_dict(t) for t in timeline])
|
||||
|
||||
|
||||
@bp.route("/<camera_name>/<label>/best.jpg")
|
||||
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
|
||||
def label_thumbnail(camera_name, label):
|
||||
@ -924,6 +960,53 @@ def latest_frame(camera_name):
|
||||
return "Camera named {} not found".format(camera_name), 404
|
||||
|
||||
|
||||
@bp.route("/<camera_name>/recordings/<frame_time>/snapshot.png")
|
||||
def get_snapshot_from_recording(camera_name: str, frame_time: str):
|
||||
if camera_name not in current_app.frigate_config.cameras:
|
||||
return "Camera named {} not found".format(camera_name), 404
|
||||
|
||||
frame_time = float(frame_time)
|
||||
recording_query = (
|
||||
Recordings.select()
|
||||
.where(
|
||||
((frame_time > Recordings.start_time) & (frame_time < Recordings.end_time))
|
||||
)
|
||||
.where(Recordings.camera == camera_name)
|
||||
)
|
||||
|
||||
try:
|
||||
recording: Recordings = recording_query.get()
|
||||
time_in_segment = frame_time - recording.start_time
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
"-ss",
|
||||
f"00:00:{time_in_segment}",
|
||||
"-i",
|
||||
recording.path,
|
||||
"-frames:v",
|
||||
"1",
|
||||
"-c:v",
|
||||
"png",
|
||||
"-f",
|
||||
"image2pipe",
|
||||
"-",
|
||||
]
|
||||
|
||||
process = sp.run(
|
||||
ffmpeg_cmd,
|
||||
capture_output=True,
|
||||
)
|
||||
response = make_response(process.stdout)
|
||||
response.headers["Content-Type"] = "image/png"
|
||||
return response
|
||||
except DoesNotExist:
|
||||
return "Recording not found for {} at {}".format(camera_name, frame_time), 404
|
||||
|
||||
|
||||
@bp.route("/recordings/storage", methods=["GET"])
|
||||
def get_recordings_storage_usage():
|
||||
recording_stats = stats_snapshot(
|
||||
|
@ -32,6 +32,15 @@ class Event(Model): # type: ignore[misc]
|
||||
plus_id = CharField(max_length=30)
|
||||
|
||||
|
||||
class Timeline(Model): # type: ignore[misc]
|
||||
timestamp = DateTimeField()
|
||||
camera = CharField(index=True, max_length=20)
|
||||
source = CharField(index=True, max_length=20) # ex: tracked object, audio, external
|
||||
source_id = CharField(index=True, max_length=30)
|
||||
class_type = CharField(max_length=50) # ex: entered_zone, audio_heard
|
||||
data = JSONField() # ex: tracked object id, region, box, etc.
|
||||
|
||||
|
||||
class Recordings(Model): # type: ignore[misc]
|
||||
id = CharField(null=False, primary_key=True, max_length=30)
|
||||
camera = CharField(index=True, max_length=20)
|
||||
|
140
frigate/timeline.py
Normal file
140
frigate/timeline.py
Normal file
@ -0,0 +1,140 @@
|
||||
"""Record events for object, audio, etc. detections."""
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import queue
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.models import Timeline
|
||||
|
||||
from multiprocessing.queues import Queue
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TimelineSourceEnum(str, Enum):
|
||||
# api = "api"
|
||||
# audio = "audio"
|
||||
tracked_object = "tracked_object"
|
||||
|
||||
|
||||
class TimelineProcessor(threading.Thread):
|
||||
"""Handle timeline queue and update DB."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: FrigateConfig,
|
||||
queue: Queue,
|
||||
stop_event: MpEvent,
|
||||
) -> None:
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "timeline_processor"
|
||||
self.config = config
|
||||
self.queue = queue
|
||||
self.stop_event = stop_event
|
||||
|
||||
def run(self) -> None:
|
||||
while not self.stop_event.is_set():
|
||||
try:
|
||||
(
|
||||
camera,
|
||||
input_type,
|
||||
event_type,
|
||||
prev_event_data,
|
||||
event_data,
|
||||
) = self.queue.get(timeout=1)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
if input_type == TimelineSourceEnum.tracked_object:
|
||||
self.handle_object_detection(
|
||||
camera, event_type, prev_event_data, event_data
|
||||
)
|
||||
|
||||
def handle_object_detection(
|
||||
self,
|
||||
camera: str,
|
||||
event_type: str,
|
||||
prev_event_data: dict[any, any],
|
||||
event_data: dict[any, any],
|
||||
) -> None:
|
||||
"""Handle object detection."""
|
||||
camera_config = self.config.cameras[camera]
|
||||
|
||||
if event_type == "start":
|
||||
Timeline.insert(
|
||||
timestamp=event_data["frame_time"],
|
||||
camera=camera,
|
||||
source="tracked_object",
|
||||
source_id=event_data["id"],
|
||||
class_type="visible",
|
||||
data={
|
||||
"box": [
|
||||
event_data["box"][0] / camera_config.detect.width,
|
||||
event_data["box"][1] / camera_config.detect.height,
|
||||
event_data["box"][2] / camera_config.detect.width,
|
||||
event_data["box"][3] / camera_config.detect.height,
|
||||
],
|
||||
"label": event_data["label"],
|
||||
"region": [
|
||||
event_data["region"][0] / camera_config.detect.width,
|
||||
event_data["region"][1] / camera_config.detect.height,
|
||||
event_data["region"][2] / camera_config.detect.width,
|
||||
event_data["region"][3] / camera_config.detect.height,
|
||||
],
|
||||
},
|
||||
).execute()
|
||||
elif (
|
||||
event_type == "update"
|
||||
and prev_event_data["current_zones"] != event_data["current_zones"]
|
||||
and len(event_data["current_zones"]) > 0
|
||||
):
|
||||
Timeline.insert(
|
||||
timestamp=event_data["frame_time"],
|
||||
camera=camera,
|
||||
source="tracked_object",
|
||||
source_id=event_data["id"],
|
||||
class_type="entered_zone",
|
||||
data={
|
||||
"box": [
|
||||
event_data["box"][0] / camera_config.detect.width,
|
||||
event_data["box"][1] / camera_config.detect.height,
|
||||
event_data["box"][2] / camera_config.detect.width,
|
||||
event_data["box"][3] / camera_config.detect.height,
|
||||
],
|
||||
"label": event_data["label"],
|
||||
"region": [
|
||||
event_data["region"][0] / camera_config.detect.width,
|
||||
event_data["region"][1] / camera_config.detect.height,
|
||||
event_data["region"][2] / camera_config.detect.width,
|
||||
event_data["region"][3] / camera_config.detect.height,
|
||||
],
|
||||
"zones": event_data["current_zones"],
|
||||
},
|
||||
).execute()
|
||||
elif event_type == "end":
|
||||
Timeline.insert(
|
||||
timestamp=event_data["frame_time"],
|
||||
camera=camera,
|
||||
source="tracked_object",
|
||||
source_id=event_data["id"],
|
||||
class_type="gone",
|
||||
data={
|
||||
"box": [
|
||||
event_data["box"][0] / camera_config.detect.width,
|
||||
event_data["box"][1] / camera_config.detect.height,
|
||||
event_data["box"][2] / camera_config.detect.width,
|
||||
event_data["box"][3] / camera_config.detect.height,
|
||||
],
|
||||
"label": event_data["label"],
|
||||
"region": [
|
||||
event_data["region"][0] / camera_config.detect.width,
|
||||
event_data["region"][1] / camera_config.detect.height,
|
||||
event_data["region"][2] / camera_config.detect.width,
|
||||
event_data["region"][3] / camera_config.detect.height,
|
||||
],
|
||||
},
|
||||
).execute()
|
48
migrations/013_create_timeline_table.py
Normal file
48
migrations/013_create_timeline_table.py
Normal file
@ -0,0 +1,48 @@
|
||||
"""Peewee migrations -- 013_create_timeline_table.py.
|
||||
|
||||
Some examples (model - class or model name)::
|
||||
|
||||
> Model = migrator.orm['model_name'] # Return model in current state by name
|
||||
|
||||
> migrator.sql(sql) # Run custom SQL
|
||||
> migrator.python(func, *args, **kwargs) # Run python code
|
||||
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
||||
> migrator.remove_model(model, cascade=True) # Remove a model
|
||||
> migrator.add_fields(model, **fields) # Add fields to a model
|
||||
> migrator.change_fields(model, **fields) # Change fields
|
||||
> migrator.remove_fields(model, *field_names, cascade=True)
|
||||
> migrator.rename_field(model, old_field_name, new_field_name)
|
||||
> migrator.rename_table(model, new_table_name)
|
||||
> migrator.add_index(model, *col_names, unique=False)
|
||||
> migrator.drop_index(model, *col_names)
|
||||
> migrator.add_not_null(model, *field_names)
|
||||
> migrator.drop_not_null(model, *field_names)
|
||||
> migrator.add_default(model, field_name, default)
|
||||
|
||||
"""
|
||||
|
||||
import datetime as dt
|
||||
import peewee as pw
|
||||
from playhouse.sqlite_ext import *
|
||||
from decimal import ROUND_HALF_EVEN
|
||||
from frigate.models import Recordings
|
||||
|
||||
try:
|
||||
import playhouse.postgres_ext as pw_pext
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
SQL = pw.SQL
|
||||
|
||||
|
||||
def migrate(migrator, database, fake=False, **kwargs):
|
||||
migrator.sql(
|
||||
'CREATE TABLE IF NOT EXISTS "timeline" ("timestamp" DATETIME NOT NULL, "camera" VARCHAR(20) NOT NULL, "source" VARCHAR(20) NOT NULL, "source_id" VARCHAR(30), "class_type" VARCHAR(50) NOT NULL, "data" JSON)'
|
||||
)
|
||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_camera" ON "timeline" ("camera")')
|
||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source" ON "timeline" ("source")')
|
||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source_id" ON "timeline" ("source_id")')
|
||||
|
||||
|
||||
def rollback(migrator, database, fake=False, **kwargs):
|
||||
pass
|
95
web/src/components/TimelineSummary.jsx
Normal file
95
web/src/components/TimelineSummary.jsx
Normal file
@ -0,0 +1,95 @@
|
||||
import { h } from 'preact';
|
||||
import useSWR from 'swr';
|
||||
import ActivityIndicator from './ActivityIndicator';
|
||||
import { formatUnixTimestampToDateTime } from '../utils/dateUtil';
|
||||
import PlayIcon from '../icons/Play';
|
||||
import ExitIcon from '../icons/Exit';
|
||||
import { Zone } from '../icons/Zone';
|
||||
import { useState } from 'preact/hooks';
|
||||
import Button from './Button';
|
||||
|
||||
export default function TimelineSummary({ event, onFrameSelected }) {
|
||||
const { data: eventTimeline } = useSWR([
|
||||
'timeline',
|
||||
{
|
||||
source_id: event.id,
|
||||
},
|
||||
]);
|
||||
|
||||
const { data: config } = useSWR('config');
|
||||
|
||||
const [timeIndex, setTimeIndex] = useState(-1);
|
||||
|
||||
const onSelectMoment = async (index) => {
|
||||
setTimeIndex(index);
|
||||
onFrameSelected(eventTimeline[index]);
|
||||
};
|
||||
|
||||
if (!eventTimeline || !config) {
|
||||
return <ActivityIndicator />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col">
|
||||
<div className="h-14 flex justify-center">
|
||||
<div className="sm:w-1 md:w-1/4 flex flex-row flex-nowrap justify-between overflow-auto">
|
||||
{eventTimeline.map((item, index) =>
|
||||
item.class_type == 'visible' || item.class_type == 'gone' ? (
|
||||
<Button
|
||||
key={index}
|
||||
className="rounded-full"
|
||||
type="text"
|
||||
color={index == timeIndex ? 'blue' : 'gray'}
|
||||
aria-label={window.innerWidth > 640 ? getTimelineItemDescription(config, item, event) : ''}
|
||||
onClick={() => onSelectMoment(index)}
|
||||
>
|
||||
{item.class_type == 'visible' ? <PlayIcon className="w-8" /> : <ExitIcon className="w-8" />}
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
key={index}
|
||||
className="rounded-full"
|
||||
type="text"
|
||||
color={index == timeIndex ? 'blue' : 'gray'}
|
||||
aria-label={window.innerWidth > 640 ? getTimelineItemDescription(config, item, event) : ''}
|
||||
onClick={() => onSelectMoment(index)}
|
||||
>
|
||||
<Zone className="w-8" />
|
||||
</Button>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{timeIndex >= 0 ? (
|
||||
<div className="bg-gray-500 p-4 m-2 max-w-md self-center">
|
||||
Disclaimer: This data comes from the detect feed but is shown on the recordings, it is unlikely that the
|
||||
streams are perfectly in sync so the bounding box and the footage will not line up perfectly.
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function getTimelineItemDescription(config, timelineItem, event) {
|
||||
if (timelineItem.class_type == 'visible') {
|
||||
return `${event.label} detected at ${formatUnixTimestampToDateTime(timelineItem.timestamp, {
|
||||
date_style: 'short',
|
||||
time_style: 'medium',
|
||||
time_format: config.ui.time_format,
|
||||
})}`;
|
||||
} else if (timelineItem.class_type == 'entered_zone') {
|
||||
return `${event.label.replaceAll('_', ' ')} entered ${timelineItem.data.zones
|
||||
.join(' and ')
|
||||
.replaceAll('_', ' ')} at ${formatUnixTimestampToDateTime(timelineItem.timestamp, {
|
||||
date_style: 'short',
|
||||
time_style: 'medium',
|
||||
time_format: config.ui.time_format,
|
||||
})}`;
|
||||
}
|
||||
|
||||
return `${event.label} left at ${formatUnixTimestampToDateTime(timelineItem.timestamp, {
|
||||
date_style: 'short',
|
||||
time_style: 'medium',
|
||||
time_format: config.ui.time_format,
|
||||
})}`;
|
||||
}
|
@ -49,7 +49,7 @@ export default function Tooltip({ relativeTo, text }) {
|
||||
const tooltip = (
|
||||
<div
|
||||
role="tooltip"
|
||||
className={`shadow max-w-lg absolute pointer-events-none bg-gray-900 dark:bg-gray-200 bg-opacity-80 rounded px-2 py-1 transition-transform transition-opacity duration-75 transform scale-90 opacity-0 text-gray-100 dark:text-gray-900 text-sm ${
|
||||
className={`shadow max-w-lg absolute pointer-events-none bg-gray-900 dark:bg-gray-200 bg-opacity-80 rounded px-2 py-1 transition-transform transition-opacity duration-75 transform scale-90 opacity-0 text-gray-100 dark:text-gray-900 text-sm capitalize ${
|
||||
position.top >= 0 ? 'opacity-100 scale-100' : ''
|
||||
}`}
|
||||
ref={ref}
|
||||
|
12
web/src/icons/Exit.jsx
Normal file
12
web/src/icons/Exit.jsx
Normal file
@ -0,0 +1,12 @@
|
||||
import { h } from 'preact';
|
||||
import { memo } from 'preact/compat';
|
||||
|
||||
function Exit({ className = '' }) {
|
||||
return (
|
||||
<svg className={`fill-current ${className}`} viewBox="0 0 24 24">
|
||||
<path d="M22 12l-4-4v3h-8v2h8v3m2 2a10 10 0 110-12h-2.73a8 8 0 100 12z" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(Exit);
|
@ -1,9 +1,9 @@
|
||||
import { h } from 'preact';
|
||||
import { memo } from 'preact/compat';
|
||||
|
||||
export function Play() {
|
||||
export function Play({ className = '' }) {
|
||||
return (
|
||||
<svg style="width:24px;height:24px" viewBox="0 0 24 24">
|
||||
<svg className={`fill-current ${className}`} viewBox="0 0 24 24">
|
||||
<path fill="currentColor" d="M8,5.14V19.14L19,12.14L8,5.14Z" />
|
||||
</svg>
|
||||
);
|
||||
|
@ -26,6 +26,7 @@ import Dialog from '../components/Dialog';
|
||||
import MultiSelect from '../components/MultiSelect';
|
||||
import { formatUnixTimestampToDateTime, getDurationFromTimestamps } from '../utils/dateUtil';
|
||||
import TimeAgo from '../components/TimeAgo';
|
||||
import TimelineSummary from '../components/TimelineSummary';
|
||||
|
||||
const API_LIMIT = 25;
|
||||
|
||||
@ -60,6 +61,7 @@ export default function Events({ path, ...props }) {
|
||||
});
|
||||
const [uploading, setUploading] = useState([]);
|
||||
const [viewEvent, setViewEvent] = useState();
|
||||
const [eventOverlay, setEventOverlay] = useState();
|
||||
const [eventDetailType, setEventDetailType] = useState('clip');
|
||||
const [downloadEvent, setDownloadEvent] = useState({
|
||||
id: null,
|
||||
@ -180,6 +182,18 @@ export default function Events({ path, ...props }) {
|
||||
onFilter(name, items);
|
||||
};
|
||||
|
||||
const onEventFrameSelected = (event, frame) => {
|
||||
const eventDuration = event.end_time - event.start_time;
|
||||
|
||||
if (this.player) {
|
||||
this.player.pause();
|
||||
const videoOffset = this.player.duration() - eventDuration;
|
||||
const startTime = videoOffset + (frame.timestamp - event.start_time);
|
||||
this.player.currentTime(startTime);
|
||||
setEventOverlay(frame);
|
||||
}
|
||||
};
|
||||
|
||||
const datePicker = useRef();
|
||||
|
||||
const downloadButton = useRef();
|
||||
@ -526,7 +540,7 @@ export default function Events({ path, ...props }) {
|
||||
</div>
|
||||
</div>
|
||||
<div class="hidden sm:flex flex-col justify-end mr-2">
|
||||
{(event.end_time && event.has_snapshot) && (
|
||||
{event.end_time && event.has_snapshot && (
|
||||
<Fragment>
|
||||
{event.plus_id ? (
|
||||
<div className="uppercase text-xs">Sent to Frigate+</div>
|
||||
@ -573,20 +587,52 @@ export default function Events({ path, ...props }) {
|
||||
|
||||
<div>
|
||||
{eventDetailType == 'clip' && event.has_clip ? (
|
||||
<VideoPlayer
|
||||
options={{
|
||||
preload: 'auto',
|
||||
autoplay: true,
|
||||
sources: [
|
||||
{
|
||||
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
|
||||
type: 'application/vnd.apple.mpegurl',
|
||||
},
|
||||
],
|
||||
}}
|
||||
seekOptions={{ forward: 10, backward: 5 }}
|
||||
onReady={() => {}}
|
||||
/>
|
||||
<div>
|
||||
<TimelineSummary
|
||||
event={event}
|
||||
onFrameSelected={(frame) => onEventFrameSelected(event, frame)}
|
||||
/>
|
||||
<div>
|
||||
<VideoPlayer
|
||||
options={{
|
||||
preload: 'auto',
|
||||
autoplay: true,
|
||||
sources: [
|
||||
{
|
||||
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
|
||||
type: 'application/vnd.apple.mpegurl',
|
||||
},
|
||||
],
|
||||
}}
|
||||
seekOptions={{ forward: 10, backward: 5 }}
|
||||
onReady={(player) => {
|
||||
this.player = player;
|
||||
this.player.on('playing', () => {
|
||||
setEventOverlay(undefined);
|
||||
});
|
||||
}}
|
||||
onDispose={() => {
|
||||
this.player = null;
|
||||
}}
|
||||
>
|
||||
{eventOverlay ? (
|
||||
<div
|
||||
className="absolute border-4 border-red-600"
|
||||
style={{
|
||||
left: `${Math.round(eventOverlay.data.box[0] * 100)}%`,
|
||||
top: `${Math.round(eventOverlay.data.box[1] * 100)}%`,
|
||||
right: `${Math.round((1 - eventOverlay.data.box[2]) * 100)}%`,
|
||||
bottom: `${Math.round((1 - eventOverlay.data.box[3]) * 100)}%`,
|
||||
}}
|
||||
>
|
||||
{eventOverlay.class_type == 'entered_zone' ? (
|
||||
<div className="absolute w-2 h-2 bg-yellow-500 left-[50%] bottom-0" />
|
||||
) : null}
|
||||
</div>
|
||||
) : null}
|
||||
</VideoPlayer>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{eventDetailType == 'image' || !event.has_clip ? (
|
||||
|
Loading…
Reference in New Issue
Block a user