Reduce database queries to necessary fields (#7751)

* Reduce database queries to necessary labels

* Set columns for other queries

* skip creating model instances

---------

Co-authored-by: Blake Blackshear <blakeb@blakeshome.com>
This commit is contained in:
Nicolas Mowen 2023-09-11 16:07:04 -06:00 committed by GitHub
parent 5658e5a4cc
commit a2206bb177
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 62 additions and 23 deletions

View File

@ -83,7 +83,10 @@ class EventCleanup(threading.Thread):
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
# grab all events after specific time
expired_events = Event.select().where(
expired_events = Event.select(
Event.id,
Event.camera,
).where(
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.label == event.label,
@ -133,7 +136,10 @@ class EventCleanup(threading.Thread):
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
# grab all events after specific time
expired_events = Event.select().where(
expired_events = Event.select(
Event.id,
Event.camera,
).where(
Event.camera == name,
Event.start_time < expire_after,
Event.label == event.label,

View File

@ -590,9 +590,10 @@ def timeline():
.where(reduce(operator.and_, clauses))
.order_by(Timeline.timestamp.asc())
.limit(limit)
.dicts()
)
return jsonify([model_to_dict(t) for t in timeline])
return jsonify([t for t in timeline])
@bp.route("/<camera_name>/<label>/best.jpg")
@ -673,14 +674,14 @@ def label_snapshot(camera_name, label):
label = unquote(label)
if label == "any":
event_query = (
Event.select()
Event.select(Event.id)
.where(Event.camera == camera_name)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
else:
event_query = (
Event.select()
Event.select(Event.id)
.where(Event.camera == camera_name)
.where(Event.label == label)
.where(Event.has_snapshot == True)
@ -774,7 +775,6 @@ def events():
favorites = request.args.get("favorites", type=int)
clauses = []
excluded_fields = []
selected_columns = [
Event.id,
@ -859,9 +859,7 @@ def events():
if in_progress is not None:
clauses.append((Event.end_time.is_null(in_progress)))
if not include_thumbnails:
excluded_fields.append(Event.thumbnail)
else:
if include_thumbnails:
selected_columns.append(Event.thumbnail)
if favorites:
@ -875,9 +873,10 @@ def events():
.where(reduce(operator.and_, clauses))
.order_by(Event.start_time.desc())
.limit(limit)
.dicts()
)
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
return jsonify([e for e in events])
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
@ -1242,7 +1241,10 @@ def get_snapshot_from_recording(camera_name: str, frame_time: str):
frame_time = float(frame_time)
recording_query = (
Recordings.select()
Recordings.select(
Recordings.path,
Recordings.start_time,
)
.where(
((frame_time > Recordings.start_time) & (frame_time < Recordings.end_time))
)
@ -1425,7 +1427,11 @@ def recording_clip(camera_name, start_ts, end_ts):
download = request.args.get("download", type=bool)
recordings = (
Recordings.select()
Recordings.select(
Recordings.path,
Recordings.start_time,
Recordings.end_time,
)
.where(
(Recordings.start_time.between(start_ts, end_ts))
| (Recordings.end_time.between(start_ts, end_ts))
@ -1501,7 +1507,7 @@ def recording_clip(camera_name, start_ts, end_ts):
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
def vod_ts(camera_name, start_ts, end_ts):
recordings = (
Recordings.select()
Recordings.select(Recordings.path, Recordings.duration, Recordings.end_time)
.where(
Recordings.start_time.between(start_ts, end_ts)
| Recordings.end_time.between(start_ts, end_ts)

View File

@ -48,7 +48,10 @@ class RecordingCleanup(threading.Thread):
expire_before = (
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
no_camera_recordings: Recordings = Recordings.select().where(
no_camera_recordings: Recordings = Recordings.select(
Recordings.id,
Recordings.path,
).where(
Recordings.camera.not_in(list(self.config.cameras.keys())),
Recordings.end_time < expire_before,
)
@ -79,7 +82,14 @@ class RecordingCleanup(threading.Thread):
# Get recordings to check for expiration
recordings: Recordings = (
Recordings.select()
Recordings.select(
Recordings.id,
Recordings.start_time,
Recordings.end_time,
Recordings.path,
Recordings.objects,
Recordings.motion,
)
.where(
Recordings.camera == camera,
Recordings.end_time < expire_date,
@ -89,7 +99,10 @@ class RecordingCleanup(threading.Thread):
# Get all the events to check against
events: Event = (
Event.select()
Event.select(
Event.start_time,
Event.end_time,
)
.where(
Event.camera == camera,
# need to ensure segments for all events starting
@ -109,7 +122,7 @@ class RecordingCleanup(threading.Thread):
keep = False
# Now look for a reason to keep this recording segment
for idx in range(event_start, len(events)):
event = events[idx]
event: Event = events[idx]
# if the event starts in the future, stop checking events
# and let this recording segment expire

View File

@ -152,7 +152,10 @@ class RecordingMaintainer(threading.Thread):
# get all events with the end time after the start of the oldest cache file
# or with end_time None
events: Event = (
Event.select()
Event.select(
Event.start_time,
Event.end_time,
)
.where(
Event.camera == camera,
(Event.end_time == None)

View File

@ -99,11 +99,18 @@ class StorageMaintainer(threading.Thread):
[b["bandwidth"] for b in self.camera_storage_stats.values()]
)
recordings: Recordings = Recordings.select().order_by(
Recordings.start_time.asc()
)
recordings: Recordings = Recordings.select(
Recordings.id,
Recordings.start_time,
Recordings.end_time,
Recordings.segment_size,
Recordings.path,
).order_by(Recordings.start_time.asc())
retained_events: Event = (
Event.select()
Event.select(
Event.start_time,
Event.end_time,
)
.where(
Event.retain_indefinitely == True,
Event.has_clip,
@ -155,7 +162,11 @@ class StorageMaintainer(threading.Thread):
logger.error(
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
)
recordings = Recordings.select().order_by(Recordings.start_time.asc())
recordings = Recordings.select(
Recordings.id,
Recordings.path,
Recordings.segment_size,
).order_by(Recordings.start_time.asc())
for recording in recordings.objects().iterator():
if deleted_segments_size > hourly_bandwidth: