mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-11-24 20:05:41 +01:00
Merge 3a751f711a into f1c39e8587
This commit is contained in:
commit
894a143cbb
@ -125,6 +125,8 @@ export default {
|
||||
skipMatchingMediaWithAsin: false,
|
||||
skipMatchingMediaWithIsbn: false,
|
||||
autoScanCronExpression: null,
|
||||
matchAfterScan: false,
|
||||
matchMinConfidence: 0,
|
||||
hideSingleBookSeries: false,
|
||||
onlyShowLaterBooksInContinueSeries: false,
|
||||
metadataPrecedence: ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata'],
|
||||
|
||||
@ -4,7 +4,16 @@
|
||||
<p class="text-base md:text-xl font-semibold">{{ $strings.HeaderScheduleLibraryScans }}</p>
|
||||
<ui-checkbox v-model="enableAutoScan" @input="toggleEnableAutoScan" :label="$strings.LabelEnable" medium checkbox-bg="bg" label-class="pl-2 text-base md:text-lg" />
|
||||
</div>
|
||||
<widgets-cron-expression-builder ref="cronExpressionBuilder" v-if="enableAutoScan" v-model="cronExpression" @input="updatedCron" />
|
||||
<div v-if="enableAutoScan">
|
||||
<widgets-cron-expression-builder ref="cronExpressionBuilder" v-model="cronExpression" @input="updatedCron" />
|
||||
<div class="mt-4">
|
||||
<ui-checkbox v-model="matchAfterScan" @input="updateMatchAfterScan" :label="$strings.LabelMatchAfterScan" medium checkbox-bg="bg" label-class="pl-2 text-base" />
|
||||
</div>
|
||||
<div class="mt-4" v-if="matchAfterScan">
|
||||
<label class="px-1 text-sm font-semibold">{{ $strings.LabelMatchMinConfidence }}</label>
|
||||
<ui-range-input v-model.number="matchMinConfidencePercentage" :min="0" :max="100" :step="1" />
|
||||
</div>
|
||||
</div>
|
||||
<div v-else>
|
||||
<p class="text-yellow-400 text-base">{{ $strings.MessageScheduleLibraryScanNote }}</p>
|
||||
</div>
|
||||
@ -23,10 +32,22 @@ export default {
|
||||
data() {
|
||||
return {
|
||||
cronExpression: null,
|
||||
enableAutoScan: false
|
||||
enableAutoScan: false,
|
||||
matchAfterScan: false,
|
||||
matchMinConfidence: 0
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
matchMinConfidencePercentage: {
|
||||
get() {
|
||||
return this.matchMinConfidence * 100
|
||||
},
|
||||
set(val) {
|
||||
this.matchMinConfidence = val / 100
|
||||
this.updateMatchMinConfidence()
|
||||
}
|
||||
}
|
||||
},
|
||||
computed: {},
|
||||
methods: {
|
||||
checkBlurExpressionInput() {
|
||||
// returns true if advanced cron input is focused
|
||||
@ -47,9 +68,25 @@ export default {
|
||||
}
|
||||
})
|
||||
},
|
||||
updateMatchAfterScan(value) {
|
||||
this.$emit('update', {
|
||||
settings: {
|
||||
matchAfterScan: value
|
||||
}
|
||||
})
|
||||
},
|
||||
updateMatchMinConfidence() {
|
||||
this.$emit('update', {
|
||||
settings: {
|
||||
matchMinConfidence: this.matchMinConfidence
|
||||
}
|
||||
})
|
||||
},
|
||||
init() {
|
||||
this.cronExpression = this.library.settings.autoScanCronExpression
|
||||
this.enableAutoScan = !!this.cronExpression
|
||||
this.matchAfterScan = this.library.settings.matchAfterScan
|
||||
this.matchMinConfidence = this.library.settings.matchMinConfidence || 0
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
|
||||
@ -448,6 +448,8 @@
|
||||
"LabelLookForNewEpisodesAfterDate": "Look for new episodes after this date",
|
||||
"LabelLowestPriority": "Lowest Priority",
|
||||
"LabelMatchConfidence": "Confidence",
|
||||
"LabelMatchAfterScan": "Run 'Match Books' after scan",
|
||||
"LabelMatchMinConfidence": "Minimum match confidence (0-100)",
|
||||
"LabelMatchExistingUsersBy": "Match existing users by",
|
||||
"LabelMatchExistingUsersByDescription": "Used for connecting existing users. Once connected, users will be matched by a unique id from your SSO provider",
|
||||
"LabelMaxEpisodesToDownload": "Max # of episodes to download. Use 0 for unlimited.",
|
||||
|
||||
@ -354,6 +354,28 @@ class LibraryController {
|
||||
updatedSettings[key] = req.body.settings[key] === null ? null : Number(req.body.settings[key])
|
||||
Logger.debug(`[LibraryController] Library "${req.library.name}" updating setting "${key}" to "${updatedSettings[key]}"`)
|
||||
}
|
||||
} else if (key === 'matchMinConfidence') {
|
||||
if (req.body.settings[key] !== null && isNaN(req.body.settings[key])) {
|
||||
Logger.error(`[LibraryController] Invalid request. Setting "${key}" must be a number`)
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be a number`)
|
||||
} else if (req.body.settings[key] !== null && (Number(req.body.settings[key]) < 0 || Number(req.body.settings[key]) > 1)) {
|
||||
Logger.error(`[LibraryController] Invalid request. Setting "${key}" must be between 0 and 1`)
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be between 0 and 1`)
|
||||
}
|
||||
if (req.body.settings[key] !== updatedSettings[key]) {
|
||||
hasUpdates = true
|
||||
updatedSettings[key] = req.body.settings[key] === null ? null : Number(req.body.settings[key])
|
||||
Logger.debug(`[LibraryController] Library "${req.library.name}" updating setting "${key}" to "${updatedSettings[key]}"`)
|
||||
}
|
||||
} else if (key === 'matchAfterScan') {
|
||||
if (typeof req.body.settings[key] !== 'boolean') {
|
||||
return res.status(400).send('Invalid request. Setting "matchAfterScan" must be a boolean')
|
||||
}
|
||||
if (req.body.settings[key] !== updatedSettings[key]) {
|
||||
hasUpdates = true
|
||||
updatedSettings[key] = req.body.settings[key]
|
||||
Logger.debug(`[LibraryController] Library "${req.library.name}" updating setting "${key}" to "${updatedSettings[key]}"`)
|
||||
}
|
||||
} else {
|
||||
if (typeof req.body.settings[key] !== typeof updatedSettings[key]) {
|
||||
Logger.error(`[LibraryController] Invalid request. Setting "${key}" must be of type ${typeof updatedSettings[key]}`)
|
||||
|
||||
@ -3,6 +3,8 @@ const cron = require('../libs/nodeCron')
|
||||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
const LibraryScanner = require('../scanner/LibraryScanner')
|
||||
const Scanner = require('../scanner/Scanner')
|
||||
const { checkRemoveEmptySeries, checkRemoveAuthorsWithNoBooks } = require('../utils/cleanup')
|
||||
|
||||
const ShareManager = require('./ShareManager')
|
||||
|
||||
@ -74,7 +76,18 @@ class CronManager {
|
||||
Logger.error(`[CronManager] Library not found for scan cron ${_library.id}`)
|
||||
} else {
|
||||
Logger.debug(`[CronManager] Library scan cron executing for ${library.name}`)
|
||||
LibraryScanner.scan(library)
|
||||
await LibraryScanner.scan(library)
|
||||
|
||||
if (library.settings.matchAfterScan) {
|
||||
Logger.debug(`[CronManager] Library scan cron matching books for ${library.name}`)
|
||||
const apiRouterCtx = {
|
||||
checkRemoveEmptySeries,
|
||||
checkRemoveAuthorsWithNoBooks
|
||||
}
|
||||
Scanner.matchLibraryItems(apiRouterCtx, library, {
|
||||
minConfidence: library.settings.matchMinConfidence
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
this.libraryScanCrons.push({
|
||||
|
||||
@ -68,6 +68,8 @@ class Library extends Model {
|
||||
coverAspectRatio: 1, // Square
|
||||
disableWatcher: false,
|
||||
autoScanCronExpression: null,
|
||||
matchAfterScan: false,
|
||||
matchMinConfidence: 0,
|
||||
skipMatchingMediaWithAsin: false,
|
||||
skipMatchingMediaWithIsbn: false,
|
||||
audiobooksOnly: false,
|
||||
|
||||
@ -35,6 +35,7 @@ const MiscController = require('../controllers/MiscController')
|
||||
const ShareController = require('../controllers/ShareController')
|
||||
const StatsController = require('../controllers/StatsController')
|
||||
const ApiKeyController = require('../controllers/ApiKeyController')
|
||||
const { checkRemoveEmptySeries, checkRemoveAuthorsWithNoBooks } = require('../utils/cleanup')
|
||||
|
||||
class ApiRouter {
|
||||
constructor(Server) {
|
||||
@ -405,54 +406,7 @@ class ApiRouter {
|
||||
* @param {string[]} seriesIds
|
||||
*/
|
||||
async checkRemoveEmptySeries(seriesIds) {
|
||||
if (!seriesIds?.length) return
|
||||
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
const seriesToRemove = (
|
||||
await Database.seriesModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: seriesIds
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookSeries bs WHERE bs.seriesId = series.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
include: {
|
||||
model: Database.bookModel,
|
||||
attributes: ['id'],
|
||||
required: false // Ensure it includes series even if no books exist
|
||||
},
|
||||
transaction
|
||||
})
|
||||
).map((s) => ({ id: s.id, name: s.name, libraryId: s.libraryId }))
|
||||
|
||||
if (seriesToRemove.length) {
|
||||
await Database.seriesModel.destroy({
|
||||
where: {
|
||||
id: seriesToRemove.map((s) => s.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
seriesToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Logger.info(`[ApiRouter] Series "${name}" is now empty. Removing series`)
|
||||
|
||||
// Remove series from library filter data
|
||||
Database.removeSeriesFromFilterData(libraryId, id)
|
||||
SocketAuthority.emitter('series_removed', { id: id, libraryId: libraryId })
|
||||
})
|
||||
// Close rss feeds - remove from db and emit socket event
|
||||
if (seriesToRemove.length) {
|
||||
await RssFeedManager.closeFeedsForEntityIds(seriesToRemove.map((s) => s.id))
|
||||
}
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing empty series: ${error.message}`)
|
||||
}
|
||||
return checkRemoveEmptySeries(seriesIds)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -463,56 +417,7 @@ class ApiRouter {
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async checkRemoveAuthorsWithNoBooks(authorIds) {
|
||||
if (!authorIds?.length) return
|
||||
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
// Select authors with locking to prevent concurrent updates
|
||||
const bookAuthorsToRemove = (
|
||||
await Database.authorModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: authorIds,
|
||||
asin: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
description: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
imagePath: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
}
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
raw: true,
|
||||
transaction
|
||||
})
|
||||
).map((au) => ({ id: au.id, name: au.name, libraryId: au.libraryId }))
|
||||
|
||||
if (bookAuthorsToRemove.length) {
|
||||
await Database.authorModel.destroy({
|
||||
where: {
|
||||
id: bookAuthorsToRemove.map((au) => au.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
// Remove all book authors after completing remove from database
|
||||
bookAuthorsToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Database.removeAuthorFromFilterData(libraryId, id)
|
||||
// TODO: Clients were expecting full author in payload but its unnecessary
|
||||
SocketAuthority.emitter('author_removed', { id, libraryId })
|
||||
Logger.info(`[ApiRouter] Removed author "${name}" with no books`)
|
||||
})
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing authors: ${error.message}`)
|
||||
}
|
||||
return checkRemoveAuthorsWithNoBooks(authorIds)
|
||||
}
|
||||
|
||||
async getUserListeningSessionsHelper(userId) {
|
||||
|
||||
@ -60,6 +60,12 @@ class Scanner {
|
||||
}
|
||||
const matchData = results[0]
|
||||
|
||||
if (options.minConfidence && matchData.matchConfidence < options.minConfidence) {
|
||||
return {
|
||||
warning: `Match confidence ${matchData.matchConfidence} is below the minimum of ${options.minConfidence}`
|
||||
}
|
||||
}
|
||||
|
||||
// Update cover if not set OR overrideCover flag
|
||||
if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) {
|
||||
Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`)
|
||||
@ -433,7 +439,7 @@ class Scanner {
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<boolean>} false if scan canceled
|
||||
*/
|
||||
async matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan) {
|
||||
async matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan, options = {}) {
|
||||
for (let i = 0; i < libraryItems.length; i++) {
|
||||
const libraryItem = libraryItems[i]
|
||||
|
||||
@ -448,7 +454,7 @@ class Scanner {
|
||||
}
|
||||
|
||||
Logger.debug(`[Scanner] matchLibraryItems: Quick matching "${libraryItem.media.title}" (${i + 1} of ${libraryItems.length})`)
|
||||
const result = await this.quickMatchLibraryItem(apiRouterCtx, libraryItem, { provider: library.provider })
|
||||
const result = await this.quickMatchLibraryItem(apiRouterCtx, libraryItem, { provider: library.provider, minConfidence: options.minConfidence })
|
||||
if (result.warning) {
|
||||
Logger.warn(`[Scanner] matchLibraryItems: Match warning ${result.warning} for library item "${libraryItem.media.title}"`)
|
||||
} else if (result.updated) {
|
||||
@ -470,7 +476,7 @@ class Scanner {
|
||||
* @param {import('../routers/ApiRouter')} apiRouterCtx
|
||||
* @param {import('../models/Library')} library
|
||||
*/
|
||||
async matchLibraryItems(apiRouterCtx, library) {
|
||||
async matchLibraryItems(apiRouterCtx, library, options = {}) {
|
||||
if (library.mediaType === 'podcast') {
|
||||
Logger.error(`[Scanner] matchLibraryItems: Match all not supported for podcasts yet`)
|
||||
return
|
||||
@ -509,7 +515,7 @@ class Scanner {
|
||||
offset += limit
|
||||
hasMoreChunks = libraryItems.length === limit
|
||||
|
||||
const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan)
|
||||
const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan, options)
|
||||
if (!shouldContinue) {
|
||||
isCanceled = true
|
||||
break
|
||||
|
||||
126
server/utils/cleanup.js
Normal file
126
server/utils/cleanup.js
Normal file
@ -0,0 +1,126 @@
|
||||
const sequelize = require('sequelize')
|
||||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const RssFeedManager = require('../managers/RssFeedManager')
|
||||
|
||||
/**
|
||||
* After deleting book(s), remove empty series
|
||||
*
|
||||
* @param {string[]} seriesIds
|
||||
*/
|
||||
async function checkRemoveEmptySeries(seriesIds) {
|
||||
if (!seriesIds?.length) return
|
||||
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
const seriesToRemove = (
|
||||
await Database.seriesModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: seriesIds
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookSeries bs WHERE bs.seriesId = series.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
include: {
|
||||
model: Database.bookModel,
|
||||
attributes: ['id'],
|
||||
required: false // Ensure it includes series even if no books exist
|
||||
},
|
||||
transaction
|
||||
})
|
||||
).map((s) => ({ id: s.id, name: s.name, libraryId: s.libraryId }))
|
||||
|
||||
if (seriesToRemove.length) {
|
||||
await Database.seriesModel.destroy({
|
||||
where: {
|
||||
id: seriesToRemove.map((s) => s.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
seriesToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Logger.info(`[ApiRouter] Series "${name}" is now empty. Removing series`)
|
||||
|
||||
// Remove series from library filter data
|
||||
Database.removeSeriesFromFilterData(libraryId, id)
|
||||
SocketAuthority.emitter('series_removed', { id: id, libraryId: libraryId })
|
||||
})
|
||||
// Close rss feeds - remove from db and emit socket event
|
||||
if (seriesToRemove.length) {
|
||||
await RssFeedManager.closeFeedsForEntityIds(seriesToRemove.map((s) => s.id))
|
||||
}
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing empty series: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove authors with no books and unset asin, description and imagePath
|
||||
* Note: Other implementation is in BookScanner.checkAuthorsRemovedFromBooks (can be merged)
|
||||
*
|
||||
* @param {string[]} authorIds
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function checkRemoveAuthorsWithNoBooks(authorIds) {
|
||||
if (!authorIds?.length) return
|
||||
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
// Select authors with locking to prevent concurrent updates
|
||||
const bookAuthorsToRemove = (
|
||||
await Database.authorModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: authorIds,
|
||||
asin: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
description: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
imagePath: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
}
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
raw: true,
|
||||
transaction
|
||||
})
|
||||
).map((au) => ({ id: au.id, name: au.name, libraryId: au.libraryId }))
|
||||
|
||||
if (bookAuthorsToRemove.length) {
|
||||
await Database.authorModel.destroy({
|
||||
where: {
|
||||
id: bookAuthorsToRemove.map((au) => au.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
// Remove all book authors after completing remove from database
|
||||
bookAuthorsToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Database.removeAuthorFromFilterData(libraryId, id)
|
||||
// TODO: Clients were expecting full author in payload but its unnecessary
|
||||
SocketAuthority.emitter('author_removed', { id, libraryId })
|
||||
Logger.info(`[ApiRouter] Removed author "${name}" with no books`)
|
||||
})
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing authors: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkRemoveEmptySeries,
|
||||
checkRemoveAuthorsWithNoBooks
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user