From 501dc938e6bad4e5342d375b9cdaa92c28faccaf Mon Sep 17 00:00:00 2001 From: mikiher Date: Sun, 29 Sep 2024 09:22:39 +0300 Subject: [PATCH 1/5] Add Nunicode sqlite extension integration --- Dockerfile | 34 +++- index.js | 1 + server/Database.js | 115 ++++++----- server/managers/BinaryManager.js | 180 ++++++++++++++---- server/utils/queries/authorFilters.js | 4 +- .../utils/queries/libraryItemsBookFilters.js | 14 +- .../queries/libraryItemsPodcastFilters.js | 9 +- test/server/managers/BinaryManager.test.js | 108 +++++++++++ 8 files changed, 366 insertions(+), 99 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0d586710..e4676dfb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,20 +11,36 @@ FROM node:20-alpine ENV NODE_ENV=production RUN apk update && \ - apk add --no-cache --update \ - curl \ - tzdata \ - ffmpeg \ - make \ - gcompat \ - python3 \ - g++ \ - tini + apk add --no-cache --update \ + curl \ + tzdata \ + ffmpeg \ + make \ + gcompat \ + python3 \ + g++ \ + tini \ + unzip COPY --from=build /client/dist /client/dist COPY index.js package* / COPY server server +ARG TARGETPLATFORM + +ENV NUSQLITE3_DIR="/usr/local/lib/nusqlite3" +ENV NUSQLITE3_PATH="${NUSQLITE3_DIR}/libnusqlite3.so" + +RUN case "$TARGETPLATFORM" in \ + "linux/amd64") \ + curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.0/libnusqlite3-linux-x64.zip" ;; \ + "linux/arm64") \ + curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.0/libnusqlite3-linux-arm64.zip" ;; \ + *) echo "Unsupported platform: $TARGETPLATFORM" && exit 1 ;; \ + esac && \ + unzip /tmp/library.zip -d $NUSQLITE3_DIR && \ + rm /tmp/library.zip + RUN npm ci --only=production RUN apk del make python3 g++ diff --git a/index.js b/index.js index 141c5826..0ddd04ff 100644 --- a/index.js +++ b/index.js @@ -9,6 +9,7 @@ if (isDev) { if (devEnv.MetadataPath) process.env.METADATA_PATH = devEnv.MetadataPath if (devEnv.FFmpegPath) process.env.FFMPEG_PATH = devEnv.FFmpegPath if (devEnv.FFProbePath) process.env.FFPROBE_PATH = devEnv.FFProbePath + if (devEnv.NunicodePath) process.env.NUNICODE_PATH = devEnv.NunicodePath if (devEnv.SkipBinariesCheck) process.env.SKIP_BINARIES_CHECK = '1' if (devEnv.BackupPath) process.env.BACKUP_PATH = devEnv.BackupPath process.env.SOURCE = 'local' diff --git a/server/Database.js b/server/Database.js index e7bad49b..d71d5378 100644 --- a/server/Database.js +++ b/server/Database.js @@ -28,6 +28,9 @@ class Database { this.notificationSettings = null /** @type {import('./objects/settings/EmailSettings')} */ this.emailSettings = null + + this.supportsUnaccent = false + this.supportsUnicodeFoldings = false } get models() { @@ -223,6 +226,12 @@ class Database { try { await this.sequelize.authenticate() + if (process.env.NUSQLITE3_PATH) { + await this.loadExtension(process.env.NUSQLITE3_PATH) + Logger.info(`[Database] Db supports unaccent and unicode foldings`) + this.supportsUnaccent = true + this.supportsUnicodeFoldings = true + } Logger.info(`[Database] Db connection was successful`) return true } catch (error) { @@ -232,10 +241,9 @@ class Database { } /** - * TODO: Temporarily disabled - * @param {string[]} extensions paths to extension binaries + * @param {string} extension paths to extension binary */ - async loadExtensions(extensions) { + async loadExtension(extension) { // This is a hack to get the db connection for loading extensions. // The proper way would be to use the 'afterConnect' hook, but that hook is never called for sqlite due to a bug in sequelize. // See https://github.com/sequelize/sequelize/issues/12487 @@ -243,20 +251,18 @@ class Database { const db = await this.sequelize.dialect.connectionManager.getConnection() if (typeof db?.loadExtension !== 'function') throw new Error('Failed to get db connection for loading extensions') - for (const ext of extensions) { - Logger.info(`[Database] Loading extension ${ext}`) - await new Promise((resolve, reject) => { - db.loadExtension(ext, (err) => { - if (err) { - Logger.error(`[Database] Failed to load extension ${ext}`, err) - reject(err) - return - } - Logger.info(`[Database] Successfully loaded extension ${ext}`) - resolve() - }) + Logger.info(`[Database] Loading extension ${extension}`) + await new Promise((resolve, reject) => { + db.loadExtension(extension, (err) => { + if (err) { + Logger.error(`[Database] Failed to load extension ${extension}`, err) + reject(err) + return + } + Logger.info(`[Database] Successfully loaded extension ${extension}`) + resolve() }) - } + }) } /** @@ -745,37 +751,58 @@ class Database { } } - /** - * TODO: Temporarily unused - * @param {string} value - * @returns {string} - */ - normalize(value) { - return `lower(unaccent(${value}))` + async createTextSearchQuery(query) { + const textQuery = new this.TextSearchQuery(this.sequelize, this.supportsUnaccent, query) + await textQuery.init() + return textQuery } - /** - * TODO: Temporarily unused - * @param {string} query - * @returns {Promise} - */ - async getNormalizedQuery(query) { - const escapedQuery = this.sequelize.escape(query) - const normalizedQuery = this.normalize(escapedQuery) - const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQuery} as normalized_query`) - return normalizedQueryResult[0][0].normalized_query - } + TextSearchQuery = class { + constructor(sequelize, supportsUnaccent, query) { + this.sequelize = sequelize + this.supportsUnaccent = supportsUnaccent + this.query = query + this.hasAccents = false + } - /** - * - * @param {string} column - * @param {string} normalizedQuery - * @returns {string} - */ - matchExpression(column, normalizedQuery) { - const normalizedPattern = this.sequelize.escape(`%${normalizedQuery}%`) - const normalizedColumn = column - return `${normalizedColumn} LIKE ${normalizedPattern}` + /** + * Returns a normalized (accents-removed) expression for the specified value. + * + * @param {string} value + * @returns {string} + */ + normalize(value) { + return `unaccent(${value})` + } + + /** + * Initialize the text query. + * + */ + async init() { + if (!this.supportsUnaccent) return + const escapedQuery = this.sequelize.escape(this.query) + const normalizedQueryExpression = this.normalize(escapedQuery) + const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQueryExpression} as normalized_query`) + const normalizedQuery = normalizedQueryResult[0][0].normalized_query + this.hasAccents = escapedQuery !== this.sequelize.escape(normalizedQuery) + Logger.debug(`[TextSearchQuery] hasAccents: ${this.hasAccents}`) + } + + /** + * Get match expression for the specified column. + * If the query contains accents, match against the column as-is (case-insensitive exact match). + * otherwise match against a normalized column (case-insensitive match with accents removed). + * + * @param {string} column + * @returns {string} + */ + matchExpression(column) { + const pattern = this.sequelize.escape(`%${this.query}%`) + if (!this.supportsUnaccent) return `${column} LIKE ${pattern}` + const normalizedColumn = this.hasAccents ? column : this.normalize(column) + return `${normalizedColumn} LIKE ${pattern}` + } } } diff --git a/server/managers/BinaryManager.js b/server/managers/BinaryManager.js index 823a4c0e..a5202354 100644 --- a/server/managers/BinaryManager.js +++ b/server/managers/BinaryManager.js @@ -76,18 +76,27 @@ class ZippedAssetDownloader { async extractFiles(zipPath, filesToExtract, destDir) { const zip = new StreamZip.async({ file: zipPath }) - for (const file of filesToExtract) { - const outputPath = path.join(destDir, file.outputFileName) - await zip.extract(file.pathInsideZip, outputPath) - Logger.debug(`[ZippedAssetDownloader] Extracted file ${file.pathInsideZip} to ${outputPath}`) + try { + for (const file of filesToExtract) { + const outputPath = path.join(destDir, file.outputFileName) + if (!(await zip.entry(file.pathInsideZip))) { + Logger.error(`[ZippedAssetDownloader] File ${file.pathInsideZip} not found in zip file ${zipPath}`) + continue + } + await zip.extract(file.pathInsideZip, outputPath) + Logger.debug(`[ZippedAssetDownloader] Extracted file ${file.pathInsideZip} to ${outputPath}`) - // Set executable permission for Linux - if (process.platform !== 'win32') { - await fs.chmod(outputPath, 0o755) + // Set executable permission for Linux + if (process.platform !== 'win32') { + await fs.chmod(outputPath, 0o755) + } } + } catch (error) { + Logger.error('[ZippedAssetDownloader] Error extracting files:', error) + throw error + } finally { + await zip.close() } - - await zip.close() } async downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir) { @@ -99,7 +108,6 @@ class ZippedAssetDownloader { await this.extractFiles(zipPath, filesToExtract, destDir) } catch (error) { Logger.error(`[ZippedAssetDownloader] Error downloading or extracting files: ${error.message}`) - throw error } finally { if (zipPath) await fs.remove(zipPath) } @@ -164,14 +172,67 @@ class FFBinariesDownloader extends ZippedAssetDownloader { } } +class NunicodeDownloader extends ZippedAssetDownloader { + constructor() { + super() + this.platformSuffix = this.getPlatformSuffix() + } + + getPlatformSuffix() { + const platform = process.platform + const arch = process.arch + + if (platform === 'win32' && arch === 'x64') { + return 'win-x64' + } else if (platform === 'darwin' && (arch === 'x64' || arch === 'arm64')) { + return 'osx-arm64' + } else if (platform === 'linux' && arch === 'x64') { + return 'linux-x64' + } else if (platform === 'linux' && arch === 'arm64') { + return 'linux-arm64' + } + + return null + } + + async getAssetUrl(releaseTag, assetName) { + return `https://github.com/mikiher/nunicode-sqlite/releases/download/v${releaseTag}/${assetName}` + } + + getAssetName(binaryName, releaseTag) { + if (!this.platformSuffix) { + throw new Error(`[NunicodeDownloader] Platform ${process.platform}-${process.arch} not supported`) + } + return `${binaryName}-${this.platformSuffix}.zip` + } + + getAssetFileName(binaryName) { + if (process.platform === 'win32') { + return `${binaryName}.dll` + } else if (process.platform === 'darwin') { + return `${binaryName}.dylib` + } else if (process.platform === 'linux') { + return `${binaryName}.so` + } + + throw new Error(`[NunicodeDownloader] Platform ${process.platform} not supported`) + } +} + class Binary { - constructor(name, type, envVariable, validVersions, source) { + constructor(name, type, envVariable, validVersions, source, required = true) { + if (!name) throw new Error('Binary name is required') this.name = name + if (!type) throw new Error('Binary type is required') this.type = type + if (!envVariable) throw new Error('Binary environment variable name is required') this.envVariable = envVariable + if (!validVersions || !validVersions.length) throw new Error(`No valid versions specified for ${type} ${name}. At least one version is required.`) this.validVersions = validVersions + if (!source || !(source instanceof ZippedAssetDownloader)) throw new Error('Binary source is required, and must be an instance of ZippedAssetDownloader') this.source = source this.fileName = this.getFileName() + this.required = required this.exec = exec } @@ -205,37 +266,65 @@ class Binary { } } - async isGood(binaryPath) { - if (!binaryPath || !(await fs.pathExists(binaryPath))) return false - if (!this.validVersions.length) return true - if (this.type === 'library') return true + async isLibraryVersionValid(libraryPath) { try { - const { stdout } = await this.exec('"' + binaryPath + '"' + ' -version') + const versionFilePath = libraryPath + '.ver' + if (!(await fs.pathExists(versionFilePath))) return false + const version = (await fs.readFile(versionFilePath, 'utf8')).trim() + return this.validVersions.some((validVersion) => version.startsWith(validVersion)) + } catch (err) { + Logger.error(`[Binary] Failed to check version of ${libraryPath}`, err) + return false + } + } + + async isExecutableVersionValid(executablePath) { + try { + const { stdout } = await this.exec('"' + executablePath + '"' + ' -version') const version = stdout.match(/version\s([\d\.]+)/)?.[1] if (!version) return false return this.validVersions.some((validVersion) => version.startsWith(validVersion)) } catch (err) { - Logger.error(`[Binary] Failed to check version of ${binaryPath}`) + Logger.error(`[Binary] Failed to check version of ${executablePath}`, err) + return false + } + } + + async isGood(binaryPath) { + try { + if (!binaryPath || !(await fs.pathExists(binaryPath))) return false + if (this.type === 'library') return await this.isLibraryVersionValid(binaryPath) + else if (this.type === 'executable') return await this.isExecutableVersionValid(binaryPath) + else return true + } catch (err) { + Logger.error(`[Binary] Failed to check ${this.type} ${this.name} at ${binaryPath}`, err) return false } } async download(destination) { - await this.source.downloadBinary(this.name, this.validVersions[0], destination) + const version = this.validVersions[0] + try { + await this.source.downloadBinary(this.name, version, destination) + // if it's a library, write the version string to a file + if (this.type === 'library') { + const libraryPath = path.join(destination, this.fileName) + await fs.writeFile(libraryPath + '.ver', version) + } + } catch (err) { + Logger.error(`[Binary] Failed to download ${this.type} ${this.name} version ${version} to ${destination}`, err) + } } } const ffbinaries = new FFBinariesDownloader() -module.exports.ffbinaries = ffbinaries // for testing -//const sqlean = new SQLeanDownloader() -//module.exports.sqlean = sqlean // for testing +const nunicode = new NunicodeDownloader() class BinaryManager { defaultRequiredBinaries = [ new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries), // ffmpeg executable - new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries) // ffprobe executable - // TODO: Temporarily disabled due to db corruption issues - // new Binary('unicode', 'library', 'SQLEAN_UNICODE_PATH', ['0.24.2'], sqlean) // sqlean unicode extension + new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries), // ffprobe executable + new Binary('libnusqlite3', 'library', 'NUSQLITE3_PATH', ['1.0'], nunicode, false) // nunicode sqlite3 extension ] constructor(requiredBinaries = this.defaultRequiredBinaries) { @@ -249,7 +338,7 @@ class BinaryManager { // Optional skip binaries check if (process.env.SKIP_BINARIES_CHECK === '1') { for (const binary of this.requiredBinaries) { - if (!process.env[binary.envVariable]) { + if (!process.env[binary.envVariable] && binary.required) { await Logger.fatal(`[BinaryManager] Environment variable ${binary.envVariable} must be set`) process.exit(1) } @@ -265,21 +354,37 @@ class BinaryManager { await this.removeOldBinaries(missingBinaries) await this.install(missingBinaries) const missingBinariesAfterInstall = await this.findRequiredBinaries() - if (missingBinariesAfterInstall.length) { - Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingBinariesAfterInstall.join(', ')}`) + const missingRequiredBinryNames = missingBinariesAfterInstall.filter((binary) => binary.required).map((binary) => binary.name) + if (missingRequiredBinryNames.length) { + Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingRequiredBinryNames.join(', ')}`) process.exit(1) } this.initialized = true } + /** + * Remove binary + * + * @param {string} destination + * @param {Binary} binary + */ async removeBinary(destination, binary) { - const binaryPath = path.join(destination, binary.fileName) - if (await fs.pathExists(binaryPath)) { - Logger.debug(`[BinaryManager] Removing binary: ${binaryPath}`) - await fs.remove(binaryPath) + try { + const binaryPath = path.join(destination, binary.fileName) + if (await fs.pathExists(binaryPath)) { + Logger.debug(`[BinaryManager] Removing binary: ${binaryPath}`) + await fs.remove(binaryPath) + } + } catch (err) { + Logger.error(`[BinaryManager] Error removing binary: ${binaryPath}`) } } + /** + * Remove old binaries + * + * @param {Binary[]} binaries + */ async removeOldBinaries(binaries) { for (const binary of binaries) { await this.removeBinary(this.mainInstallDir, binary) @@ -290,26 +395,31 @@ class BinaryManager { /** * Find required binaries and return array of binary names that are missing * - * @returns {Promise} + * @returns {Promise} Array of missing binaries */ async findRequiredBinaries() { const missingBinaries = [] for (const binary of this.requiredBinaries) { const binaryPath = await binary.find(this.mainInstallDir, this.altInstallDir) if (binaryPath) { - Logger.info(`[BinaryManager] Found valid binary ${binary.name} at ${binaryPath}`) + Logger.info(`[BinaryManager] Found valid ${binary.type} ${binary.name} at ${binaryPath}`) if (process.env[binary.envVariable] !== binaryPath) { Logger.info(`[BinaryManager] Updating process.env.${binary.envVariable}`) process.env[binary.envVariable] = binaryPath } } else { - Logger.info(`[BinaryManager] ${binary.name} not found or version too old`) + Logger.info(`[BinaryManager] ${binary.name} not found or not a valid version`) missingBinaries.push(binary) } } return missingBinaries } + /** + * Install missing binaries + * + * @param {Binary[]} binaries + */ async install(binaries) { if (!binaries.length) return Logger.info(`[BinaryManager] Installing binaries: ${binaries.map((binary) => binary.name).join(', ')}`) @@ -323,3 +433,5 @@ class BinaryManager { module.exports = BinaryManager module.exports.Binary = Binary // for testing +module.exports.ffbinaries = ffbinaries // for testing +module.exports.nunicode = nunicode // for testing diff --git a/server/utils/queries/authorFilters.js b/server/utils/queries/authorFilters.js index 67591535..3d6bc7bd 100644 --- a/server/utils/queries/authorFilters.js +++ b/server/utils/queries/authorFilters.js @@ -54,13 +54,13 @@ module.exports = { * Search authors * * @param {string} libraryId - * @param {string} query + * @param {Database.TextQuery} query * @param {number} limit * @param {number} offset * @returns {Promise} oldAuthor with numBooks */ async search(libraryId, query, limit, offset) { - const matchAuthor = Database.matchExpression('name', query) + const matchAuthor = query.matchExpression('name') const authors = await Database.authorModel.findAll({ where: { [Sequelize.Op.and]: [Sequelize.literal(matchAuthor), { libraryId }] diff --git a/server/utils/queries/libraryItemsBookFilters.js b/server/utils/queries/libraryItemsBookFilters.js index ae1ccc03..2b9ce756 100644 --- a/server/utils/queries/libraryItemsBookFilters.js +++ b/server/utils/queries/libraryItemsBookFilters.js @@ -975,10 +975,12 @@ module.exports = { async search(user, library, query, limit, offset) { const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(user) - const normalizedQuery = query + const textSearchQuery = await Database.createTextSearchQuery(query) - const matchTitle = Database.matchExpression('title', normalizedQuery) - const matchSubtitle = Database.matchExpression('subtitle', normalizedQuery) + const matchTitle = textSearchQuery.matchExpression('title') + const matchSubtitle = textSearchQuery.matchExpression('subtitle') + Logger.debug(`[libraryItemsBookFilters] matchTitle: ${matchTitle}`) + Logger.debug(`[libraryItemsBookFilters] matchSubtitle: ${matchSubtitle}`) // Search title, subtitle, asin, isbn const books = await Database.bookModel.findAll({ @@ -1041,7 +1043,7 @@ module.exports = { }) } - const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery) + const matchJsonValue = textSearchQuery.matchExpression('json_each.value') // Search narrators const narratorMatches = [] @@ -1095,7 +1097,7 @@ module.exports = { } // Search series - const matchName = Database.matchExpression('name', normalizedQuery) + const matchName = textSearchQuery.matchExpression('name') const allSeries = await Database.seriesModel.findAll({ where: { [Sequelize.Op.and]: [ @@ -1136,7 +1138,7 @@ module.exports = { } // Search authors - const authorMatches = await authorFilters.search(library.id, normalizedQuery, limit, offset) + const authorMatches = await authorFilters.search(library.id, textSearchQuery, limit, offset) return { book: itemMatches, diff --git a/server/utils/queries/libraryItemsPodcastFilters.js b/server/utils/queries/libraryItemsPodcastFilters.js index 50163edf..2f259efc 100644 --- a/server/utils/queries/libraryItemsPodcastFilters.js +++ b/server/utils/queries/libraryItemsPodcastFilters.js @@ -315,9 +315,10 @@ module.exports = { async search(user, library, query, limit, offset) { const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(user) - const normalizedQuery = query - const matchTitle = Database.matchExpression('title', normalizedQuery) - const matchAuthor = Database.matchExpression('author', normalizedQuery) + const textSearchQuery = await Database.createTextSearchQuery(query) + + const matchTitle = textSearchQuery.matchExpression('title') + const matchAuthor = textSearchQuery.matchExpression('author') // Search title, author, itunesId, itunesArtistId const podcasts = await Database.podcastModel.findAll({ @@ -366,7 +367,7 @@ module.exports = { }) } - const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery) + const matchJsonValue = textSearchQuery.matchExpression('json_each.value') // Search tags const tagMatches = [] diff --git a/test/server/managers/BinaryManager.test.js b/test/server/managers/BinaryManager.test.js index 365fdff9..5ec38c4b 100644 --- a/test/server/managers/BinaryManager.test.js +++ b/test/server/managers/BinaryManager.test.js @@ -85,6 +85,25 @@ describe('BinaryManager', () => { expect(exitStub.calledOnce).to.be.true expect(exitStub.calledWith(1)).to.be.true }) + + it('should not exit if binaries are not found but not required', async () => { + const ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries) + const ffprobeBinary = new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries, false) + const requiredBinaries = [ffmpegBinary] + const missingBinaries = [ffprobeBinary] + const missingBinariesAfterInstall = [ffprobeBinary] + findStub.onFirstCall().resolves(missingBinaries) + findStub.onSecondCall().resolves(missingBinariesAfterInstall) + binaryManager.requiredBinaries = requiredBinaries + + await binaryManager.init() + + expect(findStub.calledTwice).to.be.true + expect(installStub.calledOnce).to.be.true + expect(removeOldBinariesStub.calledOnce).to.be.true + expect(errorStub.called).to.be.false + expect(exitStub.called).to.be.false + }) }) describe('findRequiredBinaries', () => { @@ -296,6 +315,7 @@ describe('Binary', () => { describe('isGood', () => { let binary let fsPathExistsStub + let fsReadFileStub let execStub const binaryPath = '/path/to/binary' @@ -305,11 +325,13 @@ describe('Binary', () => { beforeEach(() => { binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', goodVersions, ffbinaries) fsPathExistsStub = sinon.stub(fs, 'pathExists') + fsReadFileStub = sinon.stub(fs, 'readFile') execStub = sinon.stub(binary, 'exec') }) afterEach(() => { fsPathExistsStub.restore() + fsReadFileStub.restore() execStub.restore() }) @@ -388,6 +410,53 @@ describe('Binary', () => { expect(execStub.calledOnce).to.be.true expect(execStub.calledWith(execCommand)).to.be.true }) + + it('should check library version file', async () => { + const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries) + fsReadFileStub.resolves('5.1.2 ') + fsPathExistsStub.onFirstCall().resolves(true) + fsPathExistsStub.onSecondCall().resolves(true) + + const result = await binary.isGood(binaryPath) + + expect(result).to.be.true + expect(fsPathExistsStub.calledTwice).to.be.true + expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath) + expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver') + expect(fsReadFileStub.calledOnce).to.be.true + expect(fsReadFileStub.calledWith(binaryPath + '.ver'), 'utf8').to.be.true + }) + + it('should return false if library version file does not exist', async () => { + const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries) + fsReadFileStub.resolves('5.1.2 ') + fsPathExistsStub.onFirstCall().resolves(true) + fsPathExistsStub.onSecondCall().resolves(false) + + const result = await binary.isGood(binaryPath) + + expect(result).to.be.false + expect(fsPathExistsStub.calledTwice).to.be.true + expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath) + expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver') + expect(fsReadFileStub.called).to.be.false + }) + + it('should return false if library version does not match a valid version', async () => { + const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries) + fsReadFileStub.resolves('5.2.1 ') + fsPathExistsStub.onFirstCall().resolves(true) + fsPathExistsStub.onSecondCall().resolves(true) + + const result = await binary.isGood(binaryPath) + + expect(result).to.be.false + expect(fsPathExistsStub.calledTwice).to.be.true + expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath) + expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver') + expect(fsReadFileStub.calledOnce).to.be.true + expect(fsReadFileStub.calledWith(binaryPath + '.ver'), 'utf8').to.be.true + }) }) describe('getFileName', () => { @@ -452,4 +521,43 @@ describe('Binary', () => { expect(result).to.equal('ffmpeg') }) }) + + describe('download', () => { + let binary + let downloadBinaryStub + let fsWriteFileStub + + beforeEach(() => { + binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries) + downloadBinaryStub = sinon.stub(binary.source, 'downloadBinary') + fsWriteFileStub = sinon.stub(fs, 'writeFile') + }) + + afterEach(() => { + downloadBinaryStub.restore() + fsWriteFileStub.restore() + }) + + it('should call downloadBinary with the correct parameters', async () => { + const destination = '/path/to/destination' + + await binary.download(destination) + + expect(downloadBinaryStub.calledOnce).to.be.true + expect(downloadBinaryStub.calledWith('ffmpeg', '5.1', destination)).to.be.true + }) + + it('should write a version file for libraries', async () => { + const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries) + const destination = '/path/to/destination' + const versionFilePath = path.join(destination, binary.fileName) + '.ver' + + await binary.download(destination) + + expect(downloadBinaryStub.calledOnce).to.be.true + expect(downloadBinaryStub.calledWith('libavcodec', '5.1', destination)).to.be.true + expect(fsWriteFileStub.calledOnce).to.be.true + expect(fsWriteFileStub.calledWith(versionFilePath, '5.1')).to.be.true + }) + }) }) From 37eae3406c9bfec59e4ba8a4c5e1c8f6f038380a Mon Sep 17 00:00:00 2001 From: mikiher Date: Sun, 29 Sep 2024 12:27:30 +0300 Subject: [PATCH 2/5] Remove debug messages --- server/Database.js | 1 - server/utils/queries/libraryItemsBookFilters.js | 2 -- 2 files changed, 3 deletions(-) diff --git a/server/Database.js b/server/Database.js index d71d5378..a936327d 100644 --- a/server/Database.js +++ b/server/Database.js @@ -786,7 +786,6 @@ class Database { const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQueryExpression} as normalized_query`) const normalizedQuery = normalizedQueryResult[0][0].normalized_query this.hasAccents = escapedQuery !== this.sequelize.escape(normalizedQuery) - Logger.debug(`[TextSearchQuery] hasAccents: ${this.hasAccents}`) } /** diff --git a/server/utils/queries/libraryItemsBookFilters.js b/server/utils/queries/libraryItemsBookFilters.js index 2b9ce756..6bafa225 100644 --- a/server/utils/queries/libraryItemsBookFilters.js +++ b/server/utils/queries/libraryItemsBookFilters.js @@ -979,8 +979,6 @@ module.exports = { const matchTitle = textSearchQuery.matchExpression('title') const matchSubtitle = textSearchQuery.matchExpression('subtitle') - Logger.debug(`[libraryItemsBookFilters] matchTitle: ${matchTitle}`) - Logger.debug(`[libraryItemsBookFilters] matchSubtitle: ${matchSubtitle}`) // Search title, subtitle, asin, isbn const books = await Database.bookModel.findAll({ From 7108501d242a70662d4807dbf266632b0d6b8c2a Mon Sep 17 00:00:00 2001 From: advplyr Date: Sun, 29 Sep 2024 11:37:13 -0500 Subject: [PATCH 3/5] Add libnusqlite3 to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index ca3768ba..1eda8e1f 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ /ffmpeg* /ffprobe* /unicode* +/libnusqlite3* sw.* .DS_STORE From 4a7ada28fb413366321fefed27aa2d7579217929 Mon Sep 17 00:00:00 2001 From: mikiher Date: Tue, 1 Oct 2024 16:47:40 +0300 Subject: [PATCH 4/5] Switch to nunicode-binaries v1.1 --- Dockerfile | 4 ++-- server/managers/BinaryManager.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index e4676dfb..7e9e416e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,9 +33,9 @@ ENV NUSQLITE3_PATH="${NUSQLITE3_DIR}/libnusqlite3.so" RUN case "$TARGETPLATFORM" in \ "linux/amd64") \ - curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.0/libnusqlite3-linux-x64.zip" ;; \ + curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.1/libnusqlite3-linux-x64.zip" ;; \ "linux/arm64") \ - curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.0/libnusqlite3-linux-arm64.zip" ;; \ + curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.1/libnusqlite3-linux-arm64.zip" ;; \ *) echo "Unsupported platform: $TARGETPLATFORM" && exit 1 ;; \ esac && \ unzip /tmp/library.zip -d $NUSQLITE3_DIR && \ diff --git a/server/managers/BinaryManager.js b/server/managers/BinaryManager.js index a5202354..71547420 100644 --- a/server/managers/BinaryManager.js +++ b/server/managers/BinaryManager.js @@ -324,7 +324,7 @@ class BinaryManager { defaultRequiredBinaries = [ new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries), // ffmpeg executable new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries), // ffprobe executable - new Binary('libnusqlite3', 'library', 'NUSQLITE3_PATH', ['1.0'], nunicode, false) // nunicode sqlite3 extension + new Binary('libnusqlite3', 'library', 'NUSQLITE3_PATH', ['1.1'], nunicode, false) // nunicode sqlite3 extension ] constructor(requiredBinaries = this.defaultRequiredBinaries) { From 086532652eb20866617f1b6049bc182e15c525e8 Mon Sep 17 00:00:00 2001 From: mikiher Date: Tue, 1 Oct 2024 17:15:44 +0300 Subject: [PATCH 5/5] Fix to NUSQLITE3_PATH in index.js --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index 0ddd04ff..de1ed5c3 100644 --- a/index.js +++ b/index.js @@ -9,7 +9,7 @@ if (isDev) { if (devEnv.MetadataPath) process.env.METADATA_PATH = devEnv.MetadataPath if (devEnv.FFmpegPath) process.env.FFMPEG_PATH = devEnv.FFmpegPath if (devEnv.FFProbePath) process.env.FFPROBE_PATH = devEnv.FFProbePath - if (devEnv.NunicodePath) process.env.NUNICODE_PATH = devEnv.NunicodePath + if (devEnv.NunicodePath) process.env.NUSQLITE3_PATH = devEnv.NunicodePath if (devEnv.SkipBinariesCheck) process.env.SKIP_BINARIES_CHECK = '1' if (devEnv.BackupPath) process.env.BACKUP_PATH = devEnv.BackupPath process.env.SOURCE = 'local'