batch deletes for massive cleanups

This commit is contained in:
Blake Blackshear 2022-04-27 06:49:59 -05:00
parent 0bd3cff13b
commit d343a658ec

View File

@ -12,6 +12,7 @@ import threading
import time
from collections import defaultdict
from pathlib import Path
from numpy import delete
import psutil
from peewee import JOIN, DoesNotExist
@ -459,7 +460,13 @@ class RecordingCleanup(threading.Thread):
deleted_recordings.add(recording.id)
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
Recordings.delete().where(Recordings.id << deleted_recordings).execute()
# delete up to 100,000 at a time
max_deletes = 100000
deleted_recordings_list = list(deleted_recordings)
for i in range(0, len(deleted_recordings_list), max_deletes):
Recordings.delete().where(
Recordings.id << deleted_recordings_list[i : i + max_deletes]
).execute()
logger.debug(f"End camera: {camera}.")
@ -534,7 +541,12 @@ class RecordingCleanup(threading.Thread):
logger.debug(
f"Deleting {len(recordings_to_delete)} recordings with missing files"
)
Recordings.delete().where(Recordings.id << recordings_to_delete).execute()
# delete up to 100,000 at a time
max_deletes = 100000
for i in range(0, len(recordings_to_delete), max_deletes):
Recordings.delete().where(
Recordings.id << recordings_to_delete[i : i + max_deletes]
).execute()
logger.debug("End sync recordings.")