mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-08-04 13:47:37 +02:00
Reduce database queries to necessary fields (#7751)
* Reduce database queries to necessary labels * Set columns for other queries * skip creating model instances --------- Co-authored-by: Blake Blackshear <blakeb@blakeshome.com>
This commit is contained in:
parent
5658e5a4cc
commit
a2206bb177
@ -83,7 +83,10 @@ class EventCleanup(threading.Thread):
|
|||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
# grab all events after specific time
|
# grab all events after specific time
|
||||||
expired_events = Event.select().where(
|
expired_events = Event.select(
|
||||||
|
Event.id,
|
||||||
|
Event.camera,
|
||||||
|
).where(
|
||||||
Event.camera.not_in(self.camera_keys),
|
Event.camera.not_in(self.camera_keys),
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == event.label,
|
Event.label == event.label,
|
||||||
@ -133,7 +136,10 @@ class EventCleanup(threading.Thread):
|
|||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
# grab all events after specific time
|
# grab all events after specific time
|
||||||
expired_events = Event.select().where(
|
expired_events = Event.select(
|
||||||
|
Event.id,
|
||||||
|
Event.camera,
|
||||||
|
).where(
|
||||||
Event.camera == name,
|
Event.camera == name,
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == event.label,
|
Event.label == event.label,
|
||||||
|
@ -590,9 +590,10 @@ def timeline():
|
|||||||
.where(reduce(operator.and_, clauses))
|
.where(reduce(operator.and_, clauses))
|
||||||
.order_by(Timeline.timestamp.asc())
|
.order_by(Timeline.timestamp.asc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonify([model_to_dict(t) for t in timeline])
|
return jsonify([t for t in timeline])
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/<camera_name>/<label>/best.jpg")
|
@bp.route("/<camera_name>/<label>/best.jpg")
|
||||||
@ -673,14 +674,14 @@ def label_snapshot(camera_name, label):
|
|||||||
label = unquote(label)
|
label = unquote(label)
|
||||||
if label == "any":
|
if label == "any":
|
||||||
event_query = (
|
event_query = (
|
||||||
Event.select()
|
Event.select(Event.id)
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot == True)
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
event_query = (
|
event_query = (
|
||||||
Event.select()
|
Event.select(Event.id)
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.label == label)
|
.where(Event.label == label)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot == True)
|
||||||
@ -774,7 +775,6 @@ def events():
|
|||||||
favorites = request.args.get("favorites", type=int)
|
favorites = request.args.get("favorites", type=int)
|
||||||
|
|
||||||
clauses = []
|
clauses = []
|
||||||
excluded_fields = []
|
|
||||||
|
|
||||||
selected_columns = [
|
selected_columns = [
|
||||||
Event.id,
|
Event.id,
|
||||||
@ -859,9 +859,7 @@ def events():
|
|||||||
if in_progress is not None:
|
if in_progress is not None:
|
||||||
clauses.append((Event.end_time.is_null(in_progress)))
|
clauses.append((Event.end_time.is_null(in_progress)))
|
||||||
|
|
||||||
if not include_thumbnails:
|
if include_thumbnails:
|
||||||
excluded_fields.append(Event.thumbnail)
|
|
||||||
else:
|
|
||||||
selected_columns.append(Event.thumbnail)
|
selected_columns.append(Event.thumbnail)
|
||||||
|
|
||||||
if favorites:
|
if favorites:
|
||||||
@ -875,9 +873,10 @@ def events():
|
|||||||
.where(reduce(operator.and_, clauses))
|
.where(reduce(operator.and_, clauses))
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
|
.dicts()
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
return jsonify([e for e in events])
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
||||||
@ -1242,7 +1241,10 @@ def get_snapshot_from_recording(camera_name: str, frame_time: str):
|
|||||||
|
|
||||||
frame_time = float(frame_time)
|
frame_time = float(frame_time)
|
||||||
recording_query = (
|
recording_query = (
|
||||||
Recordings.select()
|
Recordings.select(
|
||||||
|
Recordings.path,
|
||||||
|
Recordings.start_time,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
((frame_time > Recordings.start_time) & (frame_time < Recordings.end_time))
|
((frame_time > Recordings.start_time) & (frame_time < Recordings.end_time))
|
||||||
)
|
)
|
||||||
@ -1425,7 +1427,11 @@ def recording_clip(camera_name, start_ts, end_ts):
|
|||||||
download = request.args.get("download", type=bool)
|
download = request.args.get("download", type=bool)
|
||||||
|
|
||||||
recordings = (
|
recordings = (
|
||||||
Recordings.select()
|
Recordings.select(
|
||||||
|
Recordings.path,
|
||||||
|
Recordings.start_time,
|
||||||
|
Recordings.end_time,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
(Recordings.start_time.between(start_ts, end_ts))
|
(Recordings.start_time.between(start_ts, end_ts))
|
||||||
| (Recordings.end_time.between(start_ts, end_ts))
|
| (Recordings.end_time.between(start_ts, end_ts))
|
||||||
@ -1501,7 +1507,7 @@ def recording_clip(camera_name, start_ts, end_ts):
|
|||||||
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
||||||
def vod_ts(camera_name, start_ts, end_ts):
|
def vod_ts(camera_name, start_ts, end_ts):
|
||||||
recordings = (
|
recordings = (
|
||||||
Recordings.select()
|
Recordings.select(Recordings.path, Recordings.duration, Recordings.end_time)
|
||||||
.where(
|
.where(
|
||||||
Recordings.start_time.between(start_ts, end_ts)
|
Recordings.start_time.between(start_ts, end_ts)
|
||||||
| Recordings.end_time.between(start_ts, end_ts)
|
| Recordings.end_time.between(start_ts, end_ts)
|
||||||
|
@ -48,7 +48,10 @@ class RecordingCleanup(threading.Thread):
|
|||||||
expire_before = (
|
expire_before = (
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
no_camera_recordings: Recordings = Recordings.select().where(
|
no_camera_recordings: Recordings = Recordings.select(
|
||||||
|
Recordings.id,
|
||||||
|
Recordings.path,
|
||||||
|
).where(
|
||||||
Recordings.camera.not_in(list(self.config.cameras.keys())),
|
Recordings.camera.not_in(list(self.config.cameras.keys())),
|
||||||
Recordings.end_time < expire_before,
|
Recordings.end_time < expire_before,
|
||||||
)
|
)
|
||||||
@ -79,7 +82,14 @@ class RecordingCleanup(threading.Thread):
|
|||||||
|
|
||||||
# Get recordings to check for expiration
|
# Get recordings to check for expiration
|
||||||
recordings: Recordings = (
|
recordings: Recordings = (
|
||||||
Recordings.select()
|
Recordings.select(
|
||||||
|
Recordings.id,
|
||||||
|
Recordings.start_time,
|
||||||
|
Recordings.end_time,
|
||||||
|
Recordings.path,
|
||||||
|
Recordings.objects,
|
||||||
|
Recordings.motion,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
Recordings.camera == camera,
|
Recordings.camera == camera,
|
||||||
Recordings.end_time < expire_date,
|
Recordings.end_time < expire_date,
|
||||||
@ -89,7 +99,10 @@ class RecordingCleanup(threading.Thread):
|
|||||||
|
|
||||||
# Get all the events to check against
|
# Get all the events to check against
|
||||||
events: Event = (
|
events: Event = (
|
||||||
Event.select()
|
Event.select(
|
||||||
|
Event.start_time,
|
||||||
|
Event.end_time,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
Event.camera == camera,
|
Event.camera == camera,
|
||||||
# need to ensure segments for all events starting
|
# need to ensure segments for all events starting
|
||||||
@ -109,7 +122,7 @@ class RecordingCleanup(threading.Thread):
|
|||||||
keep = False
|
keep = False
|
||||||
# Now look for a reason to keep this recording segment
|
# Now look for a reason to keep this recording segment
|
||||||
for idx in range(event_start, len(events)):
|
for idx in range(event_start, len(events)):
|
||||||
event = events[idx]
|
event: Event = events[idx]
|
||||||
|
|
||||||
# if the event starts in the future, stop checking events
|
# if the event starts in the future, stop checking events
|
||||||
# and let this recording segment expire
|
# and let this recording segment expire
|
||||||
|
@ -152,7 +152,10 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
# get all events with the end time after the start of the oldest cache file
|
# get all events with the end time after the start of the oldest cache file
|
||||||
# or with end_time None
|
# or with end_time None
|
||||||
events: Event = (
|
events: Event = (
|
||||||
Event.select()
|
Event.select(
|
||||||
|
Event.start_time,
|
||||||
|
Event.end_time,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
Event.camera == camera,
|
Event.camera == camera,
|
||||||
(Event.end_time == None)
|
(Event.end_time == None)
|
||||||
|
@ -99,11 +99,18 @@ class StorageMaintainer(threading.Thread):
|
|||||||
[b["bandwidth"] for b in self.camera_storage_stats.values()]
|
[b["bandwidth"] for b in self.camera_storage_stats.values()]
|
||||||
)
|
)
|
||||||
|
|
||||||
recordings: Recordings = Recordings.select().order_by(
|
recordings: Recordings = Recordings.select(
|
||||||
Recordings.start_time.asc()
|
Recordings.id,
|
||||||
)
|
Recordings.start_time,
|
||||||
|
Recordings.end_time,
|
||||||
|
Recordings.segment_size,
|
||||||
|
Recordings.path,
|
||||||
|
).order_by(Recordings.start_time.asc())
|
||||||
retained_events: Event = (
|
retained_events: Event = (
|
||||||
Event.select()
|
Event.select(
|
||||||
|
Event.start_time,
|
||||||
|
Event.end_time,
|
||||||
|
)
|
||||||
.where(
|
.where(
|
||||||
Event.retain_indefinitely == True,
|
Event.retain_indefinitely == True,
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
@ -155,7 +162,11 @@ class StorageMaintainer(threading.Thread):
|
|||||||
logger.error(
|
logger.error(
|
||||||
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
|
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
|
||||||
)
|
)
|
||||||
recordings = Recordings.select().order_by(Recordings.start_time.asc())
|
recordings = Recordings.select(
|
||||||
|
Recordings.id,
|
||||||
|
Recordings.path,
|
||||||
|
Recordings.segment_size,
|
||||||
|
).order_by(Recordings.start_time.asc())
|
||||||
|
|
||||||
for recording in recordings.objects().iterator():
|
for recording in recordings.objects().iterator():
|
||||||
if deleted_segments_size > hourly_bandwidth:
|
if deleted_segments_size > hourly_bandwidth:
|
||||||
|
Loading…
Reference in New Issue
Block a user