mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
more consistent use of iterators in select queries (#8258)
This commit is contained in:
parent
e13a176820
commit
9fc93c72a0
@ -83,14 +83,19 @@ class EventCleanup(threading.Thread):
|
|||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
# grab all events after specific time
|
# grab all events after specific time
|
||||||
expired_events = Event.select(
|
expired_events = (
|
||||||
Event.id,
|
Event.select(
|
||||||
Event.camera,
|
Event.id,
|
||||||
).where(
|
Event.camera,
|
||||||
Event.camera.not_in(self.camera_keys),
|
)
|
||||||
Event.start_time < expire_after,
|
.where(
|
||||||
Event.label == event.label,
|
Event.camera.not_in(self.camera_keys),
|
||||||
Event.retain_indefinitely == False,
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely == False,
|
||||||
|
)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
# delete the media from disk
|
# delete the media from disk
|
||||||
for event in expired_events:
|
for event in expired_events:
|
||||||
@ -136,14 +141,19 @@ class EventCleanup(threading.Thread):
|
|||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
# grab all events after specific time
|
# grab all events after specific time
|
||||||
expired_events = Event.select(
|
expired_events = (
|
||||||
Event.id,
|
Event.select(
|
||||||
Event.camera,
|
Event.id,
|
||||||
).where(
|
Event.camera,
|
||||||
Event.camera == name,
|
)
|
||||||
Event.start_time < expire_after,
|
.where(
|
||||||
Event.label == event.label,
|
Event.camera == name,
|
||||||
Event.retain_indefinitely == False,
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely == False,
|
||||||
|
)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
# delete the grabbed clips from disk
|
# delete the grabbed clips from disk
|
||||||
|
@ -958,9 +958,10 @@ def events():
|
|||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.dicts()
|
.dicts()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonify([e for e in events])
|
return jsonify(list(events))
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
||||||
@ -1490,6 +1491,7 @@ def recordings_summary(camera_name):
|
|||||||
),
|
),
|
||||||
).desc()
|
).desc()
|
||||||
)
|
)
|
||||||
|
.namedtuples()
|
||||||
)
|
)
|
||||||
|
|
||||||
event_groups = (
|
event_groups = (
|
||||||
@ -1511,14 +1513,14 @@ def recordings_summary(camera_name):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.objects()
|
.namedtuples()
|
||||||
)
|
)
|
||||||
|
|
||||||
event_map = {g.hour: g.count for g in event_groups}
|
event_map = {g.hour: g.count for g in event_groups}
|
||||||
|
|
||||||
days = {}
|
days = {}
|
||||||
|
|
||||||
for recording_group in recording_groups.objects():
|
for recording_group in recording_groups:
|
||||||
parts = recording_group.hour.split()
|
parts = recording_group.hour.split()
|
||||||
hour = parts[1]
|
hour = parts[1]
|
||||||
day = parts[0]
|
day = parts[0]
|
||||||
@ -1562,9 +1564,11 @@ def recordings(camera_name):
|
|||||||
Recordings.start_time <= before,
|
Recordings.start_time <= before,
|
||||||
)
|
)
|
||||||
.order_by(Recordings.start_time)
|
.order_by(Recordings.start_time)
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonify([e for e in recordings.dicts()])
|
return jsonify(list(recordings))
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
||||||
@ -1669,6 +1673,7 @@ def vod_ts(camera_name, start_ts, end_ts):
|
|||||||
)
|
)
|
||||||
.where(Recordings.camera == camera_name)
|
.where(Recordings.camera == camera_name)
|
||||||
.order_by(Recordings.start_time.asc())
|
.order_by(Recordings.start_time.asc())
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
clips = []
|
clips = []
|
||||||
|
@ -48,12 +48,17 @@ class RecordingCleanup(threading.Thread):
|
|||||||
expire_before = (
|
expire_before = (
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
no_camera_recordings: Recordings = Recordings.select(
|
no_camera_recordings: Recordings = (
|
||||||
Recordings.id,
|
Recordings.select(
|
||||||
Recordings.path,
|
Recordings.id,
|
||||||
).where(
|
Recordings.path,
|
||||||
Recordings.camera.not_in(list(self.config.cameras.keys())),
|
)
|
||||||
Recordings.end_time < expire_before,
|
.where(
|
||||||
|
Recordings.camera.not_in(list(self.config.cameras.keys())),
|
||||||
|
Recordings.end_time < expire_before,
|
||||||
|
)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
deleted_recordings = set()
|
deleted_recordings = set()
|
||||||
@ -95,6 +100,8 @@ class RecordingCleanup(threading.Thread):
|
|||||||
Recordings.end_time < expire_date,
|
Recordings.end_time < expire_date,
|
||||||
)
|
)
|
||||||
.order_by(Recordings.start_time)
|
.order_by(Recordings.start_time)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all the events to check against
|
# Get all the events to check against
|
||||||
@ -111,14 +118,14 @@ class RecordingCleanup(threading.Thread):
|
|||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time)
|
.order_by(Event.start_time)
|
||||||
.objects()
|
.namedtuples()
|
||||||
)
|
)
|
||||||
|
|
||||||
# loop over recordings and see if they overlap with any non-expired events
|
# loop over recordings and see if they overlap with any non-expired events
|
||||||
# TODO: expire segments based on segment stats according to config
|
# TODO: expire segments based on segment stats according to config
|
||||||
event_start = 0
|
event_start = 0
|
||||||
deleted_recordings = set()
|
deleted_recordings = set()
|
||||||
for recording in recordings.objects().iterator():
|
for recording in recordings:
|
||||||
keep = False
|
keep = False
|
||||||
# Now look for a reason to keep this recording segment
|
# Now look for a reason to keep this recording segment
|
||||||
for idx in range(event_start, len(events)):
|
for idx in range(event_start, len(events)):
|
||||||
|
@ -163,6 +163,8 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time)
|
.order_by(Event.start_time)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
)
|
)
|
||||||
|
|
||||||
tasks.extend(
|
tasks.extend(
|
||||||
|
@ -99,13 +99,19 @@ class StorageMaintainer(threading.Thread):
|
|||||||
[b["bandwidth"] for b in self.camera_storage_stats.values()]
|
[b["bandwidth"] for b in self.camera_storage_stats.values()]
|
||||||
)
|
)
|
||||||
|
|
||||||
recordings: Recordings = Recordings.select(
|
recordings: Recordings = (
|
||||||
Recordings.id,
|
Recordings.select(
|
||||||
Recordings.start_time,
|
Recordings.id,
|
||||||
Recordings.end_time,
|
Recordings.start_time,
|
||||||
Recordings.segment_size,
|
Recordings.end_time,
|
||||||
Recordings.path,
|
Recordings.segment_size,
|
||||||
).order_by(Recordings.start_time.asc())
|
Recordings.path,
|
||||||
|
)
|
||||||
|
.order_by(Recordings.start_time.asc())
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
retained_events: Event = (
|
retained_events: Event = (
|
||||||
Event.select(
|
Event.select(
|
||||||
Event.start_time,
|
Event.start_time,
|
||||||
@ -116,12 +122,12 @@ class StorageMaintainer(threading.Thread):
|
|||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time.asc())
|
.order_by(Event.start_time.asc())
|
||||||
.objects()
|
.namedtuples()
|
||||||
)
|
)
|
||||||
|
|
||||||
event_start = 0
|
event_start = 0
|
||||||
deleted_recordings = set()
|
deleted_recordings = set()
|
||||||
for recording in recordings.objects().iterator():
|
for recording in recordings:
|
||||||
# check if 1 hour of storage has been reclaimed
|
# check if 1 hour of storage has been reclaimed
|
||||||
if deleted_segments_size > hourly_bandwidth:
|
if deleted_segments_size > hourly_bandwidth:
|
||||||
break
|
break
|
||||||
@ -162,13 +168,18 @@ class StorageMaintainer(threading.Thread):
|
|||||||
logger.error(
|
logger.error(
|
||||||
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
|
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
|
||||||
)
|
)
|
||||||
recordings = Recordings.select(
|
recordings = (
|
||||||
Recordings.id,
|
Recordings.select(
|
||||||
Recordings.path,
|
Recordings.id,
|
||||||
Recordings.segment_size,
|
Recordings.path,
|
||||||
).order_by(Recordings.start_time.asc())
|
Recordings.segment_size,
|
||||||
|
)
|
||||||
|
.order_by(Recordings.start_time.asc())
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
for recording in recordings.objects().iterator():
|
for recording in recordings:
|
||||||
if deleted_segments_size > hourly_bandwidth:
|
if deleted_segments_size > hourly_bandwidth:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user