mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2024-12-20 19:06:06 +01:00
Merge pull request #3468 from mikiher/nunicode-intergration
Nunicode integration
This commit is contained in:
commit
1b1b71a9b6
1
.gitignore
vendored
1
.gitignore
vendored
@ -16,6 +16,7 @@
|
|||||||
/ffmpeg*
|
/ffmpeg*
|
||||||
/ffprobe*
|
/ffprobe*
|
||||||
/unicode*
|
/unicode*
|
||||||
|
/libnusqlite3*
|
||||||
|
|
||||||
sw.*
|
sw.*
|
||||||
.DS_STORE
|
.DS_STORE
|
||||||
|
34
Dockerfile
34
Dockerfile
@ -11,20 +11,36 @@ FROM node:20-alpine
|
|||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
RUN apk update && \
|
RUN apk update && \
|
||||||
apk add --no-cache --update \
|
apk add --no-cache --update \
|
||||||
curl \
|
curl \
|
||||||
tzdata \
|
tzdata \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
make \
|
make \
|
||||||
gcompat \
|
gcompat \
|
||||||
python3 \
|
python3 \
|
||||||
g++ \
|
g++ \
|
||||||
tini
|
tini \
|
||||||
|
unzip
|
||||||
|
|
||||||
COPY --from=build /client/dist /client/dist
|
COPY --from=build /client/dist /client/dist
|
||||||
COPY index.js package* /
|
COPY index.js package* /
|
||||||
COPY server server
|
COPY server server
|
||||||
|
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
|
ENV NUSQLITE3_DIR="/usr/local/lib/nusqlite3"
|
||||||
|
ENV NUSQLITE3_PATH="${NUSQLITE3_DIR}/libnusqlite3.so"
|
||||||
|
|
||||||
|
RUN case "$TARGETPLATFORM" in \
|
||||||
|
"linux/amd64") \
|
||||||
|
curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.1/libnusqlite3-linux-x64.zip" ;; \
|
||||||
|
"linux/arm64") \
|
||||||
|
curl -L -o /tmp/library.zip "https://github.com/mikiher/nunicode-sqlite/releases/download/v1.1/libnusqlite3-linux-arm64.zip" ;; \
|
||||||
|
*) echo "Unsupported platform: $TARGETPLATFORM" && exit 1 ;; \
|
||||||
|
esac && \
|
||||||
|
unzip /tmp/library.zip -d $NUSQLITE3_DIR && \
|
||||||
|
rm /tmp/library.zip
|
||||||
|
|
||||||
RUN npm ci --only=production
|
RUN npm ci --only=production
|
||||||
|
|
||||||
RUN apk del make python3 g++
|
RUN apk del make python3 g++
|
||||||
|
1
index.js
1
index.js
@ -9,6 +9,7 @@ if (isDev) {
|
|||||||
if (devEnv.MetadataPath) process.env.METADATA_PATH = devEnv.MetadataPath
|
if (devEnv.MetadataPath) process.env.METADATA_PATH = devEnv.MetadataPath
|
||||||
if (devEnv.FFmpegPath) process.env.FFMPEG_PATH = devEnv.FFmpegPath
|
if (devEnv.FFmpegPath) process.env.FFMPEG_PATH = devEnv.FFmpegPath
|
||||||
if (devEnv.FFProbePath) process.env.FFPROBE_PATH = devEnv.FFProbePath
|
if (devEnv.FFProbePath) process.env.FFPROBE_PATH = devEnv.FFProbePath
|
||||||
|
if (devEnv.NunicodePath) process.env.NUSQLITE3_PATH = devEnv.NunicodePath
|
||||||
if (devEnv.SkipBinariesCheck) process.env.SKIP_BINARIES_CHECK = '1'
|
if (devEnv.SkipBinariesCheck) process.env.SKIP_BINARIES_CHECK = '1'
|
||||||
if (devEnv.BackupPath) process.env.BACKUP_PATH = devEnv.BackupPath
|
if (devEnv.BackupPath) process.env.BACKUP_PATH = devEnv.BackupPath
|
||||||
process.env.SOURCE = 'local'
|
process.env.SOURCE = 'local'
|
||||||
|
@ -28,6 +28,9 @@ class Database {
|
|||||||
this.notificationSettings = null
|
this.notificationSettings = null
|
||||||
/** @type {import('./objects/settings/EmailSettings')} */
|
/** @type {import('./objects/settings/EmailSettings')} */
|
||||||
this.emailSettings = null
|
this.emailSettings = null
|
||||||
|
|
||||||
|
this.supportsUnaccent = false
|
||||||
|
this.supportsUnicodeFoldings = false
|
||||||
}
|
}
|
||||||
|
|
||||||
get models() {
|
get models() {
|
||||||
@ -223,6 +226,12 @@ class Database {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await this.sequelize.authenticate()
|
await this.sequelize.authenticate()
|
||||||
|
if (process.env.NUSQLITE3_PATH) {
|
||||||
|
await this.loadExtension(process.env.NUSQLITE3_PATH)
|
||||||
|
Logger.info(`[Database] Db supports unaccent and unicode foldings`)
|
||||||
|
this.supportsUnaccent = true
|
||||||
|
this.supportsUnicodeFoldings = true
|
||||||
|
}
|
||||||
Logger.info(`[Database] Db connection was successful`)
|
Logger.info(`[Database] Db connection was successful`)
|
||||||
return true
|
return true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -232,10 +241,9 @@ class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO: Temporarily disabled
|
* @param {string} extension paths to extension binary
|
||||||
* @param {string[]} extensions paths to extension binaries
|
|
||||||
*/
|
*/
|
||||||
async loadExtensions(extensions) {
|
async loadExtension(extension) {
|
||||||
// This is a hack to get the db connection for loading extensions.
|
// This is a hack to get the db connection for loading extensions.
|
||||||
// The proper way would be to use the 'afterConnect' hook, but that hook is never called for sqlite due to a bug in sequelize.
|
// The proper way would be to use the 'afterConnect' hook, but that hook is never called for sqlite due to a bug in sequelize.
|
||||||
// See https://github.com/sequelize/sequelize/issues/12487
|
// See https://github.com/sequelize/sequelize/issues/12487
|
||||||
@ -243,20 +251,18 @@ class Database {
|
|||||||
const db = await this.sequelize.dialect.connectionManager.getConnection()
|
const db = await this.sequelize.dialect.connectionManager.getConnection()
|
||||||
if (typeof db?.loadExtension !== 'function') throw new Error('Failed to get db connection for loading extensions')
|
if (typeof db?.loadExtension !== 'function') throw new Error('Failed to get db connection for loading extensions')
|
||||||
|
|
||||||
for (const ext of extensions) {
|
Logger.info(`[Database] Loading extension ${extension}`)
|
||||||
Logger.info(`[Database] Loading extension ${ext}`)
|
await new Promise((resolve, reject) => {
|
||||||
await new Promise((resolve, reject) => {
|
db.loadExtension(extension, (err) => {
|
||||||
db.loadExtension(ext, (err) => {
|
if (err) {
|
||||||
if (err) {
|
Logger.error(`[Database] Failed to load extension ${extension}`, err)
|
||||||
Logger.error(`[Database] Failed to load extension ${ext}`, err)
|
reject(err)
|
||||||
reject(err)
|
return
|
||||||
return
|
}
|
||||||
}
|
Logger.info(`[Database] Successfully loaded extension ${extension}`)
|
||||||
Logger.info(`[Database] Successfully loaded extension ${ext}`)
|
resolve()
|
||||||
resolve()
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -745,37 +751,57 @@ class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async createTextSearchQuery(query) {
|
||||||
* TODO: Temporarily unused
|
const textQuery = new this.TextSearchQuery(this.sequelize, this.supportsUnaccent, query)
|
||||||
* @param {string} value
|
await textQuery.init()
|
||||||
* @returns {string}
|
return textQuery
|
||||||
*/
|
|
||||||
normalize(value) {
|
|
||||||
return `lower(unaccent(${value}))`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
TextSearchQuery = class {
|
||||||
* TODO: Temporarily unused
|
constructor(sequelize, supportsUnaccent, query) {
|
||||||
* @param {string} query
|
this.sequelize = sequelize
|
||||||
* @returns {Promise<string>}
|
this.supportsUnaccent = supportsUnaccent
|
||||||
*/
|
this.query = query
|
||||||
async getNormalizedQuery(query) {
|
this.hasAccents = false
|
||||||
const escapedQuery = this.sequelize.escape(query)
|
}
|
||||||
const normalizedQuery = this.normalize(escapedQuery)
|
|
||||||
const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQuery} as normalized_query`)
|
|
||||||
return normalizedQueryResult[0][0].normalized_query
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Returns a normalized (accents-removed) expression for the specified value.
|
||||||
* @param {string} column
|
*
|
||||||
* @param {string} normalizedQuery
|
* @param {string} value
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
matchExpression(column, normalizedQuery) {
|
normalize(value) {
|
||||||
const normalizedPattern = this.sequelize.escape(`%${normalizedQuery}%`)
|
return `unaccent(${value})`
|
||||||
const normalizedColumn = column
|
}
|
||||||
return `${normalizedColumn} LIKE ${normalizedPattern}`
|
|
||||||
|
/**
|
||||||
|
* Initialize the text query.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
async init() {
|
||||||
|
if (!this.supportsUnaccent) return
|
||||||
|
const escapedQuery = this.sequelize.escape(this.query)
|
||||||
|
const normalizedQueryExpression = this.normalize(escapedQuery)
|
||||||
|
const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQueryExpression} as normalized_query`)
|
||||||
|
const normalizedQuery = normalizedQueryResult[0][0].normalized_query
|
||||||
|
this.hasAccents = escapedQuery !== this.sequelize.escape(normalizedQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get match expression for the specified column.
|
||||||
|
* If the query contains accents, match against the column as-is (case-insensitive exact match).
|
||||||
|
* otherwise match against a normalized column (case-insensitive match with accents removed).
|
||||||
|
*
|
||||||
|
* @param {string} column
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
matchExpression(column) {
|
||||||
|
const pattern = this.sequelize.escape(`%${this.query}%`)
|
||||||
|
if (!this.supportsUnaccent) return `${column} LIKE ${pattern}`
|
||||||
|
const normalizedColumn = this.hasAccents ? column : this.normalize(column)
|
||||||
|
return `${normalizedColumn} LIKE ${pattern}`
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,18 +76,27 @@ class ZippedAssetDownloader {
|
|||||||
async extractFiles(zipPath, filesToExtract, destDir) {
|
async extractFiles(zipPath, filesToExtract, destDir) {
|
||||||
const zip = new StreamZip.async({ file: zipPath })
|
const zip = new StreamZip.async({ file: zipPath })
|
||||||
|
|
||||||
for (const file of filesToExtract) {
|
try {
|
||||||
const outputPath = path.join(destDir, file.outputFileName)
|
for (const file of filesToExtract) {
|
||||||
await zip.extract(file.pathInsideZip, outputPath)
|
const outputPath = path.join(destDir, file.outputFileName)
|
||||||
Logger.debug(`[ZippedAssetDownloader] Extracted file ${file.pathInsideZip} to ${outputPath}`)
|
if (!(await zip.entry(file.pathInsideZip))) {
|
||||||
|
Logger.error(`[ZippedAssetDownloader] File ${file.pathInsideZip} not found in zip file ${zipPath}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
await zip.extract(file.pathInsideZip, outputPath)
|
||||||
|
Logger.debug(`[ZippedAssetDownloader] Extracted file ${file.pathInsideZip} to ${outputPath}`)
|
||||||
|
|
||||||
// Set executable permission for Linux
|
// Set executable permission for Linux
|
||||||
if (process.platform !== 'win32') {
|
if (process.platform !== 'win32') {
|
||||||
await fs.chmod(outputPath, 0o755)
|
await fs.chmod(outputPath, 0o755)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
Logger.error('[ZippedAssetDownloader] Error extracting files:', error)
|
||||||
|
throw error
|
||||||
|
} finally {
|
||||||
|
await zip.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
await zip.close()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir) {
|
async downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir) {
|
||||||
@ -99,7 +108,6 @@ class ZippedAssetDownloader {
|
|||||||
await this.extractFiles(zipPath, filesToExtract, destDir)
|
await this.extractFiles(zipPath, filesToExtract, destDir)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
Logger.error(`[ZippedAssetDownloader] Error downloading or extracting files: ${error.message}`)
|
Logger.error(`[ZippedAssetDownloader] Error downloading or extracting files: ${error.message}`)
|
||||||
throw error
|
|
||||||
} finally {
|
} finally {
|
||||||
if (zipPath) await fs.remove(zipPath)
|
if (zipPath) await fs.remove(zipPath)
|
||||||
}
|
}
|
||||||
@ -164,14 +172,67 @@ class FFBinariesDownloader extends ZippedAssetDownloader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class NunicodeDownloader extends ZippedAssetDownloader {
|
||||||
|
constructor() {
|
||||||
|
super()
|
||||||
|
this.platformSuffix = this.getPlatformSuffix()
|
||||||
|
}
|
||||||
|
|
||||||
|
getPlatformSuffix() {
|
||||||
|
const platform = process.platform
|
||||||
|
const arch = process.arch
|
||||||
|
|
||||||
|
if (platform === 'win32' && arch === 'x64') {
|
||||||
|
return 'win-x64'
|
||||||
|
} else if (platform === 'darwin' && (arch === 'x64' || arch === 'arm64')) {
|
||||||
|
return 'osx-arm64'
|
||||||
|
} else if (platform === 'linux' && arch === 'x64') {
|
||||||
|
return 'linux-x64'
|
||||||
|
} else if (platform === 'linux' && arch === 'arm64') {
|
||||||
|
return 'linux-arm64'
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAssetUrl(releaseTag, assetName) {
|
||||||
|
return `https://github.com/mikiher/nunicode-sqlite/releases/download/v${releaseTag}/${assetName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
getAssetName(binaryName, releaseTag) {
|
||||||
|
if (!this.platformSuffix) {
|
||||||
|
throw new Error(`[NunicodeDownloader] Platform ${process.platform}-${process.arch} not supported`)
|
||||||
|
}
|
||||||
|
return `${binaryName}-${this.platformSuffix}.zip`
|
||||||
|
}
|
||||||
|
|
||||||
|
getAssetFileName(binaryName) {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
return `${binaryName}.dll`
|
||||||
|
} else if (process.platform === 'darwin') {
|
||||||
|
return `${binaryName}.dylib`
|
||||||
|
} else if (process.platform === 'linux') {
|
||||||
|
return `${binaryName}.so`
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`[NunicodeDownloader] Platform ${process.platform} not supported`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class Binary {
|
class Binary {
|
||||||
constructor(name, type, envVariable, validVersions, source) {
|
constructor(name, type, envVariable, validVersions, source, required = true) {
|
||||||
|
if (!name) throw new Error('Binary name is required')
|
||||||
this.name = name
|
this.name = name
|
||||||
|
if (!type) throw new Error('Binary type is required')
|
||||||
this.type = type
|
this.type = type
|
||||||
|
if (!envVariable) throw new Error('Binary environment variable name is required')
|
||||||
this.envVariable = envVariable
|
this.envVariable = envVariable
|
||||||
|
if (!validVersions || !validVersions.length) throw new Error(`No valid versions specified for ${type} ${name}. At least one version is required.`)
|
||||||
this.validVersions = validVersions
|
this.validVersions = validVersions
|
||||||
|
if (!source || !(source instanceof ZippedAssetDownloader)) throw new Error('Binary source is required, and must be an instance of ZippedAssetDownloader')
|
||||||
this.source = source
|
this.source = source
|
||||||
this.fileName = this.getFileName()
|
this.fileName = this.getFileName()
|
||||||
|
this.required = required
|
||||||
this.exec = exec
|
this.exec = exec
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,37 +266,65 @@ class Binary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async isGood(binaryPath) {
|
async isLibraryVersionValid(libraryPath) {
|
||||||
if (!binaryPath || !(await fs.pathExists(binaryPath))) return false
|
|
||||||
if (!this.validVersions.length) return true
|
|
||||||
if (this.type === 'library') return true
|
|
||||||
try {
|
try {
|
||||||
const { stdout } = await this.exec('"' + binaryPath + '"' + ' -version')
|
const versionFilePath = libraryPath + '.ver'
|
||||||
|
if (!(await fs.pathExists(versionFilePath))) return false
|
||||||
|
const version = (await fs.readFile(versionFilePath, 'utf8')).trim()
|
||||||
|
return this.validVersions.some((validVersion) => version.startsWith(validVersion))
|
||||||
|
} catch (err) {
|
||||||
|
Logger.error(`[Binary] Failed to check version of ${libraryPath}`, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async isExecutableVersionValid(executablePath) {
|
||||||
|
try {
|
||||||
|
const { stdout } = await this.exec('"' + executablePath + '"' + ' -version')
|
||||||
const version = stdout.match(/version\s([\d\.]+)/)?.[1]
|
const version = stdout.match(/version\s([\d\.]+)/)?.[1]
|
||||||
if (!version) return false
|
if (!version) return false
|
||||||
return this.validVersions.some((validVersion) => version.startsWith(validVersion))
|
return this.validVersions.some((validVersion) => version.startsWith(validVersion))
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
Logger.error(`[Binary] Failed to check version of ${binaryPath}`)
|
Logger.error(`[Binary] Failed to check version of ${executablePath}`, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async isGood(binaryPath) {
|
||||||
|
try {
|
||||||
|
if (!binaryPath || !(await fs.pathExists(binaryPath))) return false
|
||||||
|
if (this.type === 'library') return await this.isLibraryVersionValid(binaryPath)
|
||||||
|
else if (this.type === 'executable') return await this.isExecutableVersionValid(binaryPath)
|
||||||
|
else return true
|
||||||
|
} catch (err) {
|
||||||
|
Logger.error(`[Binary] Failed to check ${this.type} ${this.name} at ${binaryPath}`, err)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async download(destination) {
|
async download(destination) {
|
||||||
await this.source.downloadBinary(this.name, this.validVersions[0], destination)
|
const version = this.validVersions[0]
|
||||||
|
try {
|
||||||
|
await this.source.downloadBinary(this.name, version, destination)
|
||||||
|
// if it's a library, write the version string to a file
|
||||||
|
if (this.type === 'library') {
|
||||||
|
const libraryPath = path.join(destination, this.fileName)
|
||||||
|
await fs.writeFile(libraryPath + '.ver', version)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
Logger.error(`[Binary] Failed to download ${this.type} ${this.name} version ${version} to ${destination}`, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const ffbinaries = new FFBinariesDownloader()
|
const ffbinaries = new FFBinariesDownloader()
|
||||||
module.exports.ffbinaries = ffbinaries // for testing
|
const nunicode = new NunicodeDownloader()
|
||||||
//const sqlean = new SQLeanDownloader()
|
|
||||||
//module.exports.sqlean = sqlean // for testing
|
|
||||||
|
|
||||||
class BinaryManager {
|
class BinaryManager {
|
||||||
defaultRequiredBinaries = [
|
defaultRequiredBinaries = [
|
||||||
new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries), // ffmpeg executable
|
new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries), // ffmpeg executable
|
||||||
new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries) // ffprobe executable
|
new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries), // ffprobe executable
|
||||||
// TODO: Temporarily disabled due to db corruption issues
|
new Binary('libnusqlite3', 'library', 'NUSQLITE3_PATH', ['1.1'], nunicode, false) // nunicode sqlite3 extension
|
||||||
// new Binary('unicode', 'library', 'SQLEAN_UNICODE_PATH', ['0.24.2'], sqlean) // sqlean unicode extension
|
|
||||||
]
|
]
|
||||||
|
|
||||||
constructor(requiredBinaries = this.defaultRequiredBinaries) {
|
constructor(requiredBinaries = this.defaultRequiredBinaries) {
|
||||||
@ -249,7 +338,7 @@ class BinaryManager {
|
|||||||
// Optional skip binaries check
|
// Optional skip binaries check
|
||||||
if (process.env.SKIP_BINARIES_CHECK === '1') {
|
if (process.env.SKIP_BINARIES_CHECK === '1') {
|
||||||
for (const binary of this.requiredBinaries) {
|
for (const binary of this.requiredBinaries) {
|
||||||
if (!process.env[binary.envVariable]) {
|
if (!process.env[binary.envVariable] && binary.required) {
|
||||||
await Logger.fatal(`[BinaryManager] Environment variable ${binary.envVariable} must be set`)
|
await Logger.fatal(`[BinaryManager] Environment variable ${binary.envVariable} must be set`)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
}
|
}
|
||||||
@ -265,21 +354,37 @@ class BinaryManager {
|
|||||||
await this.removeOldBinaries(missingBinaries)
|
await this.removeOldBinaries(missingBinaries)
|
||||||
await this.install(missingBinaries)
|
await this.install(missingBinaries)
|
||||||
const missingBinariesAfterInstall = await this.findRequiredBinaries()
|
const missingBinariesAfterInstall = await this.findRequiredBinaries()
|
||||||
if (missingBinariesAfterInstall.length) {
|
const missingRequiredBinryNames = missingBinariesAfterInstall.filter((binary) => binary.required).map((binary) => binary.name)
|
||||||
Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingBinariesAfterInstall.join(', ')}`)
|
if (missingRequiredBinryNames.length) {
|
||||||
|
Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingRequiredBinryNames.join(', ')}`)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
}
|
}
|
||||||
this.initialized = true
|
this.initialized = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove binary
|
||||||
|
*
|
||||||
|
* @param {string} destination
|
||||||
|
* @param {Binary} binary
|
||||||
|
*/
|
||||||
async removeBinary(destination, binary) {
|
async removeBinary(destination, binary) {
|
||||||
const binaryPath = path.join(destination, binary.fileName)
|
try {
|
||||||
if (await fs.pathExists(binaryPath)) {
|
const binaryPath = path.join(destination, binary.fileName)
|
||||||
Logger.debug(`[BinaryManager] Removing binary: ${binaryPath}`)
|
if (await fs.pathExists(binaryPath)) {
|
||||||
await fs.remove(binaryPath)
|
Logger.debug(`[BinaryManager] Removing binary: ${binaryPath}`)
|
||||||
|
await fs.remove(binaryPath)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
Logger.error(`[BinaryManager] Error removing binary: ${binaryPath}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove old binaries
|
||||||
|
*
|
||||||
|
* @param {Binary[]} binaries
|
||||||
|
*/
|
||||||
async removeOldBinaries(binaries) {
|
async removeOldBinaries(binaries) {
|
||||||
for (const binary of binaries) {
|
for (const binary of binaries) {
|
||||||
await this.removeBinary(this.mainInstallDir, binary)
|
await this.removeBinary(this.mainInstallDir, binary)
|
||||||
@ -290,26 +395,31 @@ class BinaryManager {
|
|||||||
/**
|
/**
|
||||||
* Find required binaries and return array of binary names that are missing
|
* Find required binaries and return array of binary names that are missing
|
||||||
*
|
*
|
||||||
* @returns {Promise<string[]>}
|
* @returns {Promise<Binary[]>} Array of missing binaries
|
||||||
*/
|
*/
|
||||||
async findRequiredBinaries() {
|
async findRequiredBinaries() {
|
||||||
const missingBinaries = []
|
const missingBinaries = []
|
||||||
for (const binary of this.requiredBinaries) {
|
for (const binary of this.requiredBinaries) {
|
||||||
const binaryPath = await binary.find(this.mainInstallDir, this.altInstallDir)
|
const binaryPath = await binary.find(this.mainInstallDir, this.altInstallDir)
|
||||||
if (binaryPath) {
|
if (binaryPath) {
|
||||||
Logger.info(`[BinaryManager] Found valid binary ${binary.name} at ${binaryPath}`)
|
Logger.info(`[BinaryManager] Found valid ${binary.type} ${binary.name} at ${binaryPath}`)
|
||||||
if (process.env[binary.envVariable] !== binaryPath) {
|
if (process.env[binary.envVariable] !== binaryPath) {
|
||||||
Logger.info(`[BinaryManager] Updating process.env.${binary.envVariable}`)
|
Logger.info(`[BinaryManager] Updating process.env.${binary.envVariable}`)
|
||||||
process.env[binary.envVariable] = binaryPath
|
process.env[binary.envVariable] = binaryPath
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Logger.info(`[BinaryManager] ${binary.name} not found or version too old`)
|
Logger.info(`[BinaryManager] ${binary.name} not found or not a valid version`)
|
||||||
missingBinaries.push(binary)
|
missingBinaries.push(binary)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return missingBinaries
|
return missingBinaries
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Install missing binaries
|
||||||
|
*
|
||||||
|
* @param {Binary[]} binaries
|
||||||
|
*/
|
||||||
async install(binaries) {
|
async install(binaries) {
|
||||||
if (!binaries.length) return
|
if (!binaries.length) return
|
||||||
Logger.info(`[BinaryManager] Installing binaries: ${binaries.map((binary) => binary.name).join(', ')}`)
|
Logger.info(`[BinaryManager] Installing binaries: ${binaries.map((binary) => binary.name).join(', ')}`)
|
||||||
@ -323,3 +433,5 @@ class BinaryManager {
|
|||||||
|
|
||||||
module.exports = BinaryManager
|
module.exports = BinaryManager
|
||||||
module.exports.Binary = Binary // for testing
|
module.exports.Binary = Binary // for testing
|
||||||
|
module.exports.ffbinaries = ffbinaries // for testing
|
||||||
|
module.exports.nunicode = nunicode // for testing
|
||||||
|
@ -54,13 +54,13 @@ module.exports = {
|
|||||||
* Search authors
|
* Search authors
|
||||||
*
|
*
|
||||||
* @param {string} libraryId
|
* @param {string} libraryId
|
||||||
* @param {string} query
|
* @param {Database.TextQuery} query
|
||||||
* @param {number} limit
|
* @param {number} limit
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
* @returns {Promise<Object[]>} oldAuthor with numBooks
|
* @returns {Promise<Object[]>} oldAuthor with numBooks
|
||||||
*/
|
*/
|
||||||
async search(libraryId, query, limit, offset) {
|
async search(libraryId, query, limit, offset) {
|
||||||
const matchAuthor = Database.matchExpression('name', query)
|
const matchAuthor = query.matchExpression('name')
|
||||||
const authors = await Database.authorModel.findAll({
|
const authors = await Database.authorModel.findAll({
|
||||||
where: {
|
where: {
|
||||||
[Sequelize.Op.and]: [Sequelize.literal(matchAuthor), { libraryId }]
|
[Sequelize.Op.and]: [Sequelize.literal(matchAuthor), { libraryId }]
|
||||||
|
@ -975,10 +975,10 @@ module.exports = {
|
|||||||
async search(user, library, query, limit, offset) {
|
async search(user, library, query, limit, offset) {
|
||||||
const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(user)
|
const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(user)
|
||||||
|
|
||||||
const normalizedQuery = query
|
const textSearchQuery = await Database.createTextSearchQuery(query)
|
||||||
|
|
||||||
const matchTitle = Database.matchExpression('title', normalizedQuery)
|
const matchTitle = textSearchQuery.matchExpression('title')
|
||||||
const matchSubtitle = Database.matchExpression('subtitle', normalizedQuery)
|
const matchSubtitle = textSearchQuery.matchExpression('subtitle')
|
||||||
|
|
||||||
// Search title, subtitle, asin, isbn
|
// Search title, subtitle, asin, isbn
|
||||||
const books = await Database.bookModel.findAll({
|
const books = await Database.bookModel.findAll({
|
||||||
@ -1041,7 +1041,7 @@ module.exports = {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery)
|
const matchJsonValue = textSearchQuery.matchExpression('json_each.value')
|
||||||
|
|
||||||
// Search narrators
|
// Search narrators
|
||||||
const narratorMatches = []
|
const narratorMatches = []
|
||||||
@ -1095,7 +1095,7 @@ module.exports = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Search series
|
// Search series
|
||||||
const matchName = Database.matchExpression('name', normalizedQuery)
|
const matchName = textSearchQuery.matchExpression('name')
|
||||||
const allSeries = await Database.seriesModel.findAll({
|
const allSeries = await Database.seriesModel.findAll({
|
||||||
where: {
|
where: {
|
||||||
[Sequelize.Op.and]: [
|
[Sequelize.Op.and]: [
|
||||||
@ -1136,7 +1136,7 @@ module.exports = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Search authors
|
// Search authors
|
||||||
const authorMatches = await authorFilters.search(library.id, normalizedQuery, limit, offset)
|
const authorMatches = await authorFilters.search(library.id, textSearchQuery, limit, offset)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
book: itemMatches,
|
book: itemMatches,
|
||||||
|
@ -315,9 +315,10 @@ module.exports = {
|
|||||||
async search(user, library, query, limit, offset) {
|
async search(user, library, query, limit, offset) {
|
||||||
const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(user)
|
const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(user)
|
||||||
|
|
||||||
const normalizedQuery = query
|
const textSearchQuery = await Database.createTextSearchQuery(query)
|
||||||
const matchTitle = Database.matchExpression('title', normalizedQuery)
|
|
||||||
const matchAuthor = Database.matchExpression('author', normalizedQuery)
|
const matchTitle = textSearchQuery.matchExpression('title')
|
||||||
|
const matchAuthor = textSearchQuery.matchExpression('author')
|
||||||
|
|
||||||
// Search title, author, itunesId, itunesArtistId
|
// Search title, author, itunesId, itunesArtistId
|
||||||
const podcasts = await Database.podcastModel.findAll({
|
const podcasts = await Database.podcastModel.findAll({
|
||||||
@ -366,7 +367,7 @@ module.exports = {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery)
|
const matchJsonValue = textSearchQuery.matchExpression('json_each.value')
|
||||||
|
|
||||||
// Search tags
|
// Search tags
|
||||||
const tagMatches = []
|
const tagMatches = []
|
||||||
|
@ -85,6 +85,25 @@ describe('BinaryManager', () => {
|
|||||||
expect(exitStub.calledOnce).to.be.true
|
expect(exitStub.calledOnce).to.be.true
|
||||||
expect(exitStub.calledWith(1)).to.be.true
|
expect(exitStub.calledWith(1)).to.be.true
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should not exit if binaries are not found but not required', async () => {
|
||||||
|
const ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
const ffprobeBinary = new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries, false)
|
||||||
|
const requiredBinaries = [ffmpegBinary]
|
||||||
|
const missingBinaries = [ffprobeBinary]
|
||||||
|
const missingBinariesAfterInstall = [ffprobeBinary]
|
||||||
|
findStub.onFirstCall().resolves(missingBinaries)
|
||||||
|
findStub.onSecondCall().resolves(missingBinariesAfterInstall)
|
||||||
|
binaryManager.requiredBinaries = requiredBinaries
|
||||||
|
|
||||||
|
await binaryManager.init()
|
||||||
|
|
||||||
|
expect(findStub.calledTwice).to.be.true
|
||||||
|
expect(installStub.calledOnce).to.be.true
|
||||||
|
expect(removeOldBinariesStub.calledOnce).to.be.true
|
||||||
|
expect(errorStub.called).to.be.false
|
||||||
|
expect(exitStub.called).to.be.false
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('findRequiredBinaries', () => {
|
describe('findRequiredBinaries', () => {
|
||||||
@ -296,6 +315,7 @@ describe('Binary', () => {
|
|||||||
describe('isGood', () => {
|
describe('isGood', () => {
|
||||||
let binary
|
let binary
|
||||||
let fsPathExistsStub
|
let fsPathExistsStub
|
||||||
|
let fsReadFileStub
|
||||||
let execStub
|
let execStub
|
||||||
|
|
||||||
const binaryPath = '/path/to/binary'
|
const binaryPath = '/path/to/binary'
|
||||||
@ -305,11 +325,13 @@ describe('Binary', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', goodVersions, ffbinaries)
|
binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', goodVersions, ffbinaries)
|
||||||
fsPathExistsStub = sinon.stub(fs, 'pathExists')
|
fsPathExistsStub = sinon.stub(fs, 'pathExists')
|
||||||
|
fsReadFileStub = sinon.stub(fs, 'readFile')
|
||||||
execStub = sinon.stub(binary, 'exec')
|
execStub = sinon.stub(binary, 'exec')
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
fsPathExistsStub.restore()
|
fsPathExistsStub.restore()
|
||||||
|
fsReadFileStub.restore()
|
||||||
execStub.restore()
|
execStub.restore()
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -388,6 +410,53 @@ describe('Binary', () => {
|
|||||||
expect(execStub.calledOnce).to.be.true
|
expect(execStub.calledOnce).to.be.true
|
||||||
expect(execStub.calledWith(execCommand)).to.be.true
|
expect(execStub.calledWith(execCommand)).to.be.true
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should check library version file', async () => {
|
||||||
|
const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
fsReadFileStub.resolves('5.1.2 ')
|
||||||
|
fsPathExistsStub.onFirstCall().resolves(true)
|
||||||
|
fsPathExistsStub.onSecondCall().resolves(true)
|
||||||
|
|
||||||
|
const result = await binary.isGood(binaryPath)
|
||||||
|
|
||||||
|
expect(result).to.be.true
|
||||||
|
expect(fsPathExistsStub.calledTwice).to.be.true
|
||||||
|
expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath)
|
||||||
|
expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver')
|
||||||
|
expect(fsReadFileStub.calledOnce).to.be.true
|
||||||
|
expect(fsReadFileStub.calledWith(binaryPath + '.ver'), 'utf8').to.be.true
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return false if library version file does not exist', async () => {
|
||||||
|
const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
fsReadFileStub.resolves('5.1.2 ')
|
||||||
|
fsPathExistsStub.onFirstCall().resolves(true)
|
||||||
|
fsPathExistsStub.onSecondCall().resolves(false)
|
||||||
|
|
||||||
|
const result = await binary.isGood(binaryPath)
|
||||||
|
|
||||||
|
expect(result).to.be.false
|
||||||
|
expect(fsPathExistsStub.calledTwice).to.be.true
|
||||||
|
expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath)
|
||||||
|
expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver')
|
||||||
|
expect(fsReadFileStub.called).to.be.false
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return false if library version does not match a valid version', async () => {
|
||||||
|
const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
fsReadFileStub.resolves('5.2.1 ')
|
||||||
|
fsPathExistsStub.onFirstCall().resolves(true)
|
||||||
|
fsPathExistsStub.onSecondCall().resolves(true)
|
||||||
|
|
||||||
|
const result = await binary.isGood(binaryPath)
|
||||||
|
|
||||||
|
expect(result).to.be.false
|
||||||
|
expect(fsPathExistsStub.calledTwice).to.be.true
|
||||||
|
expect(fsPathExistsStub.firstCall.args[0]).to.be.equal(binaryPath)
|
||||||
|
expect(fsPathExistsStub.secondCall.args[0]).to.be.equal(binaryPath + '.ver')
|
||||||
|
expect(fsReadFileStub.calledOnce).to.be.true
|
||||||
|
expect(fsReadFileStub.calledWith(binaryPath + '.ver'), 'utf8').to.be.true
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getFileName', () => {
|
describe('getFileName', () => {
|
||||||
@ -452,4 +521,43 @@ describe('Binary', () => {
|
|||||||
expect(result).to.equal('ffmpeg')
|
expect(result).to.equal('ffmpeg')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('download', () => {
|
||||||
|
let binary
|
||||||
|
let downloadBinaryStub
|
||||||
|
let fsWriteFileStub
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
downloadBinaryStub = sinon.stub(binary.source, 'downloadBinary')
|
||||||
|
fsWriteFileStub = sinon.stub(fs, 'writeFile')
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
downloadBinaryStub.restore()
|
||||||
|
fsWriteFileStub.restore()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call downloadBinary with the correct parameters', async () => {
|
||||||
|
const destination = '/path/to/destination'
|
||||||
|
|
||||||
|
await binary.download(destination)
|
||||||
|
|
||||||
|
expect(downloadBinaryStub.calledOnce).to.be.true
|
||||||
|
expect(downloadBinaryStub.calledWith('ffmpeg', '5.1', destination)).to.be.true
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should write a version file for libraries', async () => {
|
||||||
|
const binary = new Binary('libavcodec', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||||
|
const destination = '/path/to/destination'
|
||||||
|
const versionFilePath = path.join(destination, binary.fileName) + '.ver'
|
||||||
|
|
||||||
|
await binary.download(destination)
|
||||||
|
|
||||||
|
expect(downloadBinaryStub.calledOnce).to.be.true
|
||||||
|
expect(downloadBinaryStub.calledWith('libavcodec', '5.1', destination)).to.be.true
|
||||||
|
expect(fsWriteFileStub.calledOnce).to.be.true
|
||||||
|
expect(fsWriteFileStub.calledWith(versionFilePath, '5.1')).to.be.true
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
Loading…
Reference in New Issue
Block a user