Add bulkInsertEntities to db to handle migrating large collections

This commit is contained in:
advplyr 2022-04-23 06:25:16 -05:00
parent 399e0ea0bc
commit 7b3f9a1e0c
2 changed files with 35 additions and 2 deletions

View File

@ -316,6 +316,39 @@ class Db {
})
}
async bulkInsertEntities(entityName, entities, batchSize = 500) {
// Group entities in batches of size batchSize
var entityBatches = []
var batch = []
var index = 0
entities.forEach((ent) => {
batch.push(ent)
index++
if (index >= batchSize) {
entityBatches.push(batch)
index = 0
batch = []
}
})
if (batch.length) entityBatches.push(batch)
Logger.info(`[Db] bulkInsertEntities: ${entities.length} ${entityName} to ${entityBatches.length} batches of max size ${batchSize}`)
// Start inserting batches
var batchIndex = 1
for (const entityBatch of entityBatches) {
Logger.info(`[Db] bulkInsertEntities: Start inserting batch ${batchIndex} of ${entityBatch.length} for ${entityName}`)
var success = await this.insertEntities(entityName, entityBatch)
if (success) {
Logger.info(`[Db] bulkInsertEntities: Success inserting batch ${batchIndex} for ${entityName}`)
} else {
Logger.info(`[Db] bulkInsertEntities: Failed inserting batch ${batchIndex} for ${entityName}`)
}
batchIndex++
}
return true
}
updateEntities(entityName, entities) {
var entityDb = this.getEntityDb(entityName)

View File

@ -242,10 +242,10 @@ async function migrateLibraryItems(db) {
var libraryItems = audiobooks.map((ab) => makeLibraryItemFromOldAb(ab))
Logger.info(`>>> ${libraryItems.length} Library Items made`)
await db.insertEntities('libraryItem', libraryItems)
await db.bulkInsertEntities('libraryItem', libraryItems)
if (authorsToAdd.length) {
Logger.info(`>>> ${authorsToAdd.length} Authors made`)
await db.insertEntities('author', authorsToAdd)
await db.bulkInsertEntities('author', authorsToAdd)
}
if (seriesToAdd.length) {
Logger.info(`>>> ${seriesToAdd.length} Series made`)