store has_clip and has_snapshot on events

This commit is contained in:
Blake Blackshear 2020-12-24 08:09:15 -06:00
parent c770470b58
commit 89e3c2e4b1
3 changed files with 25 additions and 21 deletions

View File

@ -148,7 +148,8 @@ class EventProcessor(threading.Thread):
p = sp.run(ffmpeg_cmd, input="\n".join(playlist_lines), encoding='ascii', capture_output=True) p = sp.run(ffmpeg_cmd, input="\n".join(playlist_lines), encoding='ascii', capture_output=True)
if p.returncode != 0: if p.returncode != 0:
logger.error(p.stderr) logger.error(p.stderr)
return return False
return True
def run(self): def run(self):
while True: while True:
@ -166,27 +167,17 @@ class EventProcessor(threading.Thread):
logger.debug(f"Event received: {event_type} {camera} {event_data['id']}") logger.debug(f"Event received: {event_type} {camera} {event_data['id']}")
self.refresh_cache() self.refresh_cache()
clips_config = self.config.cameras[camera].clips
# if save clips is not enabled for this camera, just continue
if not clips_config.enabled:
logger.debug(f"Clips not enabled for {camera}. Not making a clip.")
if event_type == 'end':
self.event_processed_queue.put((event_data['id'], camera))
continue
# if specific objects are listed for this camera, only save clips for them
if not event_data['label'] in clips_config.objects:
if event_type == 'end':
self.event_processed_queue.put((event_data['id'], camera))
continue
if event_type == 'start': if event_type == 'start':
self.events_in_process[event_data['id']] = event_data self.events_in_process[event_data['id']] = event_data
if event_type == 'end': if event_type == 'end':
if len(self.cached_clips) > 0 and not event_data['false_positive']: clips_config = self.config.cameras[camera].clips
self.create_clip(camera, event_data, clips_config.pre_capture, clips_config.post_capture)
if not event_data['false_positive']:
clip_created = False
if clips_config.enabled and event_data['label'] in clips_config.objects:
clip_created = self.create_clip(camera, event_data, clips_config.pre_capture, clips_config.post_capture)
Event.create( Event.create(
id=event_data['id'], id=event_data['id'],
label=event_data['label'], label=event_data['label'],
@ -196,7 +187,9 @@ class EventProcessor(threading.Thread):
top_score=event_data['top_score'], top_score=event_data['top_score'],
false_positive=event_data['false_positive'], false_positive=event_data['false_positive'],
zones=list(event_data['entered_zones']), zones=list(event_data['entered_zones']),
thumbnail=event_data['thumbnail'] thumbnail=event_data['thumbnail'],
has_clip=clip_created,
has_snapshot=event_data['has_snapshot'],
) )
del self.events_in_process[event_data['id']] del self.events_in_process[event_data['id']]
self.event_processed_queue.put((event_data['id'], camera)) self.event_processed_queue.put((event_data['id'], camera))

View File

@ -114,6 +114,8 @@ def events():
zone = request.args.get('zone') zone = request.args.get('zone')
after = request.args.get('after', type=int) after = request.args.get('after', type=int)
before = request.args.get('before', type=int) before = request.args.get('before', type=int)
has_clip = request.args.get('has_clip', type=int)
has_snapshot = request.args.get('has_snapshot', type=int)
clauses = [] clauses = []
@ -132,6 +134,12 @@ def events():
if before: if before:
clauses.append((Event.start_time <= before)) clauses.append((Event.start_time <= before))
if not has_clip is None:
clauses.append((Event.has_clip == has_clip))
if not has_snapshot is None:
clauses.append((Event.has_snapshot == has_snapshot))
if len(clauses) == 0: if len(clauses) == 0:
clauses.append((1 == 1)) clauses.append((1 == 1))

View File

@ -432,6 +432,8 @@ class TrackedObjectProcessor(threading.Thread):
def end(camera, obj: TrackedObject, current_frame_time): def end(camera, obj: TrackedObject, current_frame_time):
snapshot_config = self.config.cameras[camera].snapshots snapshot_config = self.config.cameras[camera].snapshots
event_data = obj.to_dict(include_thumbnail=True)
event_data['has_snapshot'] = False
if not obj.false_positive: if not obj.false_positive:
message = { 'before': obj.previous, 'after': obj.to_dict() } message = { 'before': obj.previous, 'after': obj.to_dict() }
self.client.publish(f"{self.topic_prefix}/events", json.dumps(message), retain=False) self.client.publish(f"{self.topic_prefix}/events", json.dumps(message), retain=False)
@ -445,8 +447,9 @@ class TrackedObjectProcessor(threading.Thread):
) )
with open(os.path.join(CLIPS_DIR, f"{camera}-{obj.obj_data['id']}.jpg"), 'wb') as j: with open(os.path.join(CLIPS_DIR, f"{camera}-{obj.obj_data['id']}.jpg"), 'wb') as j:
j.write(jpg_bytes) j.write(jpg_bytes)
self.event_queue.put(('end', camera, obj.to_dict(include_thumbnail=True))) event_data['has_snapshot'] = True
self.event_queue.put(('end', camera, event_data))
def snapshot(camera, obj: TrackedObject, current_frame_time): def snapshot(camera, obj: TrackedObject, current_frame_time):
mqtt_config = self.config.cameras[camera].mqtt mqtt_config = self.config.cameras[camera].mqtt
if mqtt_config.enabled: if mqtt_config.enabled: