mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2024-12-20 19:06:06 +01:00
Merge pull request #3199 from mikiher/unaccent
Support accent-insensitive search using SQLean unicode sqlite3 extension
This commit is contained in:
commit
6183001fca
3
.gitignore
vendored
3
.gitignore
vendored
@ -15,8 +15,9 @@
|
||||
/.nyc_output/
|
||||
/ffmpeg*
|
||||
/ffprobe*
|
||||
/unicode*
|
||||
|
||||
sw.*
|
||||
.DS_STORE
|
||||
.idea/*
|
||||
tailwind.compiled.css
|
||||
tailwind.compiled.css
|
||||
|
@ -2,7 +2,6 @@
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
FFMPEG_INSTALL_DIR="/usr/lib/audiobookshelf-ffmpeg"
|
||||
DEFAULT_DATA_DIR="/usr/share/audiobookshelf"
|
||||
CONFIG_PATH="/etc/default/audiobookshelf"
|
||||
DEFAULT_PORT=13378
|
||||
@ -46,25 +45,6 @@ add_group() {
|
||||
fi
|
||||
}
|
||||
|
||||
install_ffmpeg() {
|
||||
echo "Starting FFMPEG Install"
|
||||
|
||||
WGET="wget https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz --output-document=ffmpeg-git-amd64-static.tar.xz"
|
||||
|
||||
if ! cd "$FFMPEG_INSTALL_DIR"; then
|
||||
echo "Creating ffmpeg install dir at $FFMPEG_INSTALL_DIR"
|
||||
mkdir "$FFMPEG_INSTALL_DIR"
|
||||
chown -R 'audiobookshelf:audiobookshelf' "$FFMPEG_INSTALL_DIR"
|
||||
cd "$FFMPEG_INSTALL_DIR"
|
||||
fi
|
||||
|
||||
$WGET
|
||||
tar xvf ffmpeg-git-amd64-static.tar.xz --strip-components=1 --no-same-owner
|
||||
rm ffmpeg-git-amd64-static.tar.xz
|
||||
|
||||
echo "Good to go on Ffmpeg... hopefully"
|
||||
}
|
||||
|
||||
setup_config() {
|
||||
if [ -f "$CONFIG_PATH" ]; then
|
||||
echo "Existing config found."
|
||||
@ -83,8 +63,6 @@ setup_config() {
|
||||
|
||||
config_text="METADATA_PATH=$DEFAULT_DATA_DIR/metadata
|
||||
CONFIG_PATH=$DEFAULT_DATA_DIR/config
|
||||
FFMPEG_PATH=$FFMPEG_INSTALL_DIR/ffmpeg
|
||||
FFPROBE_PATH=$FFMPEG_INSTALL_DIR/ffprobe
|
||||
PORT=$DEFAULT_PORT
|
||||
HOST=$DEFAULT_HOST"
|
||||
|
||||
@ -101,5 +79,3 @@ add_group 'audiobookshelf' ''
|
||||
add_user 'audiobookshelf' '' 'audiobookshelf' 'audiobookshelf user-daemon' '/bin/false'
|
||||
|
||||
setup_config
|
||||
|
||||
install_ffmpeg
|
||||
|
@ -207,6 +207,7 @@ class Database {
|
||||
|
||||
try {
|
||||
await this.sequelize.authenticate()
|
||||
await this.loadExtensions([process.env.SQLEAN_UNICODE_PATH])
|
||||
Logger.info(`[Database] Db connection was successful`)
|
||||
return true
|
||||
} catch (error) {
|
||||
@ -215,6 +216,34 @@ class Database {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} extensions paths to extension binaries
|
||||
*/
|
||||
async loadExtensions(extensions) {
|
||||
// This is a hack to get the db connection for loading extensions.
|
||||
// The proper way would be to use the 'afterConnect' hook, but that hook is never called for sqlite due to a bug in sequelize.
|
||||
// See https://github.com/sequelize/sequelize/issues/12487
|
||||
// This is not a public API and may break in the future.
|
||||
const db = await this.sequelize.dialect.connectionManager.getConnection()
|
||||
if (typeof db?.loadExtension !== 'function') throw new Error('Failed to get db connection for loading extensions')
|
||||
|
||||
for (const ext of extensions) {
|
||||
Logger.info(`[Database] Loading extension ${ext}`)
|
||||
await new Promise((resolve, reject) => {
|
||||
db.loadExtension(ext, (err) => {
|
||||
if (err) {
|
||||
Logger.error(`[Database] Failed to load extension ${ext}`, err)
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
Logger.info(`[Database] Successfully loaded extension ${ext}`)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from db
|
||||
*/
|
||||
@ -801,6 +830,39 @@ class Database {
|
||||
Logger.warn(`Removed ${badSessionsRemoved} sessions that were 3 seconds or less`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} value
|
||||
* @returns {string}
|
||||
*/
|
||||
normalize(value) {
|
||||
return `lower(unaccent(${value}))`
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} query
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async getNormalizedQuery(query) {
|
||||
const escapedQuery = this.sequelize.escape(query)
|
||||
const normalizedQuery = this.normalize(escapedQuery)
|
||||
const normalizedQueryResult = await this.sequelize.query(`SELECT ${normalizedQuery} as normalized_query`)
|
||||
return normalizedQueryResult[0][0].normalized_query
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} column
|
||||
* @param {string} normalizedQuery
|
||||
* @returns {string}
|
||||
*/
|
||||
matchExpression(column, normalizedQuery) {
|
||||
const normalizedPattern = this.sequelize.escape(`%${normalizedQuery}%`)
|
||||
const normalizedColumn = this.normalize(column)
|
||||
return `${normalizedColumn} LIKE ${normalizedPattern}`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Database()
|
||||
|
@ -108,6 +108,8 @@ class Server {
|
||||
|
||||
await this.playbackSessionManager.removeOrphanStreams()
|
||||
|
||||
await this.binaryManager.init()
|
||||
|
||||
await Database.init(false)
|
||||
|
||||
await Logger.logManager.init()
|
||||
@ -128,11 +130,6 @@ class Server {
|
||||
await this.cronManager.init(libraries)
|
||||
this.apiCacheManager.init()
|
||||
|
||||
// Download ffmpeg & ffprobe if not found (Currently only in use for Windows installs)
|
||||
if (global.isWin || Logger.isDev) {
|
||||
await this.binaryManager.init()
|
||||
}
|
||||
|
||||
if (Database.serverSettings.scannerDisableWatcher) {
|
||||
Logger.info(`[Server] Watcher is disabled`)
|
||||
this.watcher.disabled = true
|
||||
|
@ -1,315 +0,0 @@
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const axios = require('axios')
|
||||
const fse = require('../fsExtra')
|
||||
const async = require('../async')
|
||||
const StreamZip = require('../nodeStreamZip')
|
||||
const { finished } = require('stream/promises')
|
||||
|
||||
var API_URL = 'https://ffbinaries.com/api/v1'
|
||||
|
||||
var RUNTIME_CACHE = {}
|
||||
var errorMsgs = {
|
||||
connectionIssues: 'Couldn\'t connect to ffbinaries.com API. Check your Internet connection.',
|
||||
parsingVersionData: 'Couldn\'t parse retrieved version data.',
|
||||
parsingVersionList: 'Couldn\'t parse the list of available versions.',
|
||||
notFound: 'Requested data not found.',
|
||||
incorrectVersionParam: '"version" parameter must be a string.'
|
||||
}
|
||||
|
||||
function ensureDirSync(dir) {
|
||||
try {
|
||||
fse.accessSync(dir)
|
||||
} catch (e) {
|
||||
fse.mkdirSync(dir)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the platform key based on input string
|
||||
*/
|
||||
function resolvePlatform(input) {
|
||||
var rtn = null
|
||||
|
||||
switch (input) {
|
||||
case 'mac':
|
||||
case 'osx':
|
||||
case 'mac-64':
|
||||
case 'osx-64':
|
||||
rtn = 'osx-64'
|
||||
break
|
||||
|
||||
case 'linux':
|
||||
case 'linux-32':
|
||||
rtn = 'linux-32'
|
||||
break
|
||||
|
||||
case 'linux-64':
|
||||
rtn = 'linux-64'
|
||||
break
|
||||
|
||||
case 'linux-arm':
|
||||
case 'linux-armel':
|
||||
rtn = 'linux-armel'
|
||||
break
|
||||
|
||||
case 'linux-armhf':
|
||||
rtn = 'linux-armhf'
|
||||
break
|
||||
|
||||
case 'win':
|
||||
case 'win-32':
|
||||
case 'windows':
|
||||
case 'windows-32':
|
||||
rtn = 'windows-32'
|
||||
break
|
||||
|
||||
case 'win-64':
|
||||
case 'windows-64':
|
||||
rtn = 'windows-64'
|
||||
break
|
||||
|
||||
default:
|
||||
rtn = null
|
||||
}
|
||||
|
||||
return rtn
|
||||
}
|
||||
/**
|
||||
* Detects the platform of the machine the script is executed on.
|
||||
* Object can be provided to detect platform from info derived elsewhere.
|
||||
*
|
||||
* @param {object} osinfo Contains "type" and "arch" properties
|
||||
*/
|
||||
function detectPlatform(osinfo) {
|
||||
var inputIsValid = typeof osinfo === 'object' && typeof osinfo.type === 'string' && typeof osinfo.arch === 'string'
|
||||
var type = (inputIsValid ? osinfo.type : os.type()).toLowerCase()
|
||||
var arch = (inputIsValid ? osinfo.arch : os.arch()).toLowerCase()
|
||||
|
||||
if (type === 'darwin') {
|
||||
return 'osx-64'
|
||||
}
|
||||
|
||||
if (type === 'windows_nt') {
|
||||
return arch === 'x64' ? 'windows-64' : 'windows-32'
|
||||
}
|
||||
|
||||
if (type === 'linux') {
|
||||
if (arch === 'arm' || arch === 'arm64') {
|
||||
return 'linux-armel'
|
||||
}
|
||||
return arch === 'x64' ? 'linux-64' : 'linux-32'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
/**
|
||||
* Gets the binary filename (appends exe in Windows)
|
||||
*
|
||||
* @param {string} component "ffmpeg", "ffplay", "ffprobe" or "ffserver"
|
||||
* @param {platform} platform "ffmpeg", "ffplay", "ffprobe" or "ffserver"
|
||||
*/
|
||||
function getBinaryFilename(component, platform) {
|
||||
var platformCode = resolvePlatform(platform)
|
||||
if (platformCode === 'windows-32' || platformCode === 'windows-64') {
|
||||
return component + '.exe'
|
||||
}
|
||||
return component
|
||||
}
|
||||
|
||||
function listPlatforms() {
|
||||
return ['osx-64', 'linux-32', 'linux-64', 'linux-armel', 'linux-armhf', 'windows-32', 'windows-64']
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<string[]>} array of version strings
|
||||
*/
|
||||
function listVersions() {
|
||||
if (RUNTIME_CACHE.versionsAll) {
|
||||
return RUNTIME_CACHE.versionsAll
|
||||
}
|
||||
return axios.get(API_URL).then((res) => {
|
||||
if (!res.data?.versions || !Object.keys(res.data.versions)?.length) {
|
||||
throw new Error(errorMsgs.parsingVersionList)
|
||||
}
|
||||
const versionKeys = Object.keys(res.data.versions)
|
||||
RUNTIME_CACHE.versionsAll = versionKeys
|
||||
return versionKeys
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Gets full data set from ffbinaries.com
|
||||
*/
|
||||
function getVersionData(version) {
|
||||
if (RUNTIME_CACHE[version]) {
|
||||
return RUNTIME_CACHE[version]
|
||||
}
|
||||
|
||||
if (version && typeof version !== 'string') {
|
||||
throw new Error(errorMsgs.incorrectVersionParam)
|
||||
}
|
||||
|
||||
var url = version ? '/version/' + version : '/latest'
|
||||
|
||||
return axios.get(`${API_URL}${url}`).then((res) => {
|
||||
RUNTIME_CACHE[version] = res.data
|
||||
return res.data
|
||||
}).catch((error) => {
|
||||
if (error.response?.status == 404) {
|
||||
throw new Error(errorMsgs.notFound)
|
||||
} else {
|
||||
throw new Error(errorMsgs.connectionIssues)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file(s) and save them in the specified directory
|
||||
*/
|
||||
async function downloadUrls(components, urls, opts) {
|
||||
const destinationDir = opts.destination
|
||||
const results = []
|
||||
const remappedUrls = []
|
||||
|
||||
if (components && !Array.isArray(components)) {
|
||||
components = [components]
|
||||
} else if (!components || !Array.isArray(components)) {
|
||||
components = []
|
||||
}
|
||||
|
||||
// returns an array of objects like this: {component: 'ffmpeg', url: 'https://...'}
|
||||
if (typeof urls === 'object') {
|
||||
for (const key in urls) {
|
||||
if (components.includes(key) && urls[key]) {
|
||||
remappedUrls.push({
|
||||
component: key,
|
||||
url: urls[key]
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function extractZipToDestination(zipFilename) {
|
||||
const oldpath = path.join(destinationDir, zipFilename)
|
||||
const zip = new StreamZip.async({ file: oldpath })
|
||||
const count = await zip.extract(null, destinationDir)
|
||||
await zip.close()
|
||||
}
|
||||
|
||||
|
||||
await async.each(remappedUrls, async function (urlObject) {
|
||||
try {
|
||||
const url = urlObject.url
|
||||
|
||||
const zipFilename = url.split('/').pop()
|
||||
const binFilenameBase = urlObject.component
|
||||
const binFilename = getBinaryFilename(binFilenameBase, opts.platform || detectPlatform())
|
||||
|
||||
let runningTotal = 0
|
||||
let totalFilesize
|
||||
let interval
|
||||
|
||||
|
||||
if (typeof opts.tickerFn === 'function') {
|
||||
opts.tickerInterval = parseInt(opts.tickerInterval, 10)
|
||||
const tickerInterval = (!Number.isNaN(opts.tickerInterval)) ? opts.tickerInterval : 1000
|
||||
const tickData = { filename: zipFilename, progress: 0 }
|
||||
|
||||
// Schedule next ticks
|
||||
interval = setInterval(function () {
|
||||
if (totalFilesize && runningTotal == totalFilesize) {
|
||||
return clearInterval(interval)
|
||||
}
|
||||
tickData.progress = totalFilesize > -1 ? runningTotal / totalFilesize : 0
|
||||
|
||||
opts.tickerFn(tickData)
|
||||
}, tickerInterval)
|
||||
}
|
||||
|
||||
|
||||
// Check if file already exists in target directory
|
||||
const binPath = path.join(destinationDir, binFilename)
|
||||
if (!opts.force && await fse.pathExists(binPath)) {
|
||||
// if the accessSync method doesn't throw we know the binary already exists
|
||||
results.push({
|
||||
filename: binFilename,
|
||||
path: destinationDir,
|
||||
status: 'File exists',
|
||||
code: 'FILE_EXISTS'
|
||||
})
|
||||
clearInterval(interval)
|
||||
return
|
||||
}
|
||||
|
||||
if (opts.quiet) clearInterval(interval)
|
||||
|
||||
const zipPath = path.join(destinationDir, zipFilename)
|
||||
const zipFileTempName = zipPath + '.part'
|
||||
const zipFileFinalName = zipPath
|
||||
|
||||
const response = await axios({
|
||||
url,
|
||||
method: 'GET',
|
||||
responseType: 'stream'
|
||||
})
|
||||
totalFilesize = response.headers?.['content-length'] || []
|
||||
|
||||
const writer = fse.createWriteStream(zipFileTempName)
|
||||
response.data.on('data', (chunk) => {
|
||||
runningTotal += chunk.length
|
||||
})
|
||||
response.data.pipe(writer)
|
||||
await finished(writer)
|
||||
await fse.rename(zipFileTempName, zipFileFinalName)
|
||||
await extractZipToDestination(zipFilename)
|
||||
await fse.remove(zipFileFinalName)
|
||||
|
||||
results.push({
|
||||
filename: binFilename,
|
||||
path: destinationDir,
|
||||
size: Math.floor(totalFilesize / 1024 / 1024 * 1000) / 1000 + 'MB',
|
||||
status: 'File extracted to destination (downloaded from "' + url + '")',
|
||||
code: 'DONE_CLEAN'
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Failed to download or extract file for component: ${urlObject.component}`, err)
|
||||
}
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets binaries for the platform
|
||||
* It will get the data from ffbinaries, pick the correct files
|
||||
* and save it to the specified directory
|
||||
*
|
||||
* @param {Array} components
|
||||
* @param {Object} [opts]
|
||||
*/
|
||||
async function downloadBinaries(components, opts = {}) {
|
||||
var platform = resolvePlatform(opts.platform) || detectPlatform()
|
||||
|
||||
opts.destination = path.resolve(opts.destination || '.')
|
||||
ensureDirSync(opts.destination)
|
||||
|
||||
const versionData = await getVersionData(opts.version)
|
||||
const urls = versionData?.bin?.[platform]
|
||||
if (!urls) {
|
||||
throw new Error('No URLs!')
|
||||
}
|
||||
|
||||
return await downloadUrls(components, urls, opts)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
downloadBinaries: downloadBinaries,
|
||||
getVersionData: getVersionData,
|
||||
listVersions: listVersions,
|
||||
listPlatforms: listPlatforms,
|
||||
detectPlatform: detectPlatform,
|
||||
resolvePlatform: resolvePlatform,
|
||||
getBinaryFilename: getBinaryFilename
|
||||
}
|
@ -2,25 +2,274 @@ const child_process = require('child_process')
|
||||
const { promisify } = require('util')
|
||||
const exec = promisify(child_process.exec)
|
||||
const path = require('path')
|
||||
const axios = require('axios')
|
||||
const which = require('../libs/which')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const ffbinaries = require('../libs/ffbinaries')
|
||||
const Logger = require('../Logger')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const StreamZip = require('../libs/nodeStreamZip')
|
||||
|
||||
class GithubAssetDownloader {
|
||||
constructor(owner, repo) {
|
||||
this.owner = owner
|
||||
this.repo = repo
|
||||
this.assetCache = {}
|
||||
}
|
||||
|
||||
async getAssetUrl(releaseTag, assetName) {
|
||||
// Check if the assets information is already cached for the release tag
|
||||
if (this.assetCache[releaseTag]) {
|
||||
Logger.debug(`[GithubAssetDownloader] Repo ${this.repo} release ${releaseTag}: assets found in cache.`)
|
||||
} else {
|
||||
// Get the release information
|
||||
const releaseUrl = `https://api.github.com/repos/${this.owner}/${this.repo}/releases/tags/${releaseTag}`
|
||||
const releaseResponse = await axios.get(releaseUrl, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'axios'
|
||||
}
|
||||
})
|
||||
|
||||
// Cache the assets information for the release tag
|
||||
this.assetCache[releaseTag] = releaseResponse.data.assets
|
||||
Logger.debug(`[GithubAssetDownloader] Repo ${this.repo} release ${releaseTag}: assets fetched from API.`)
|
||||
}
|
||||
|
||||
// Find the asset URL
|
||||
const assets = this.assetCache[releaseTag]
|
||||
const asset = assets.find((asset) => asset.name === assetName)
|
||||
if (!asset) {
|
||||
throw new Error(`[GithubAssetDownloader] Repo ${this.repo} release ${releaseTag}: asset ${assetName} not found`)
|
||||
}
|
||||
|
||||
return asset.browser_download_url
|
||||
}
|
||||
|
||||
async downloadAsset(assetUrl, destDir) {
|
||||
const zipPath = path.join(destDir, 'temp.zip')
|
||||
const writer = fs.createWriteStream(zipPath)
|
||||
|
||||
const assetResponse = await axios({
|
||||
url: assetUrl,
|
||||
method: 'GET',
|
||||
responseType: 'stream'
|
||||
})
|
||||
|
||||
assetResponse.data.pipe(writer)
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
writer.on('finish', () => {
|
||||
Logger.debug(`[GithubAssetDownloader] Downloaded asset ${assetUrl} to ${zipPath}`)
|
||||
resolve()
|
||||
})
|
||||
writer.on('error', (err) => {
|
||||
Logger.error(`[GithubAssetDownloader] Error downloading asset ${assetUrl}: ${err.message}`)
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
|
||||
return zipPath
|
||||
}
|
||||
|
||||
async extractFiles(zipPath, filesToExtract, destDir) {
|
||||
const zip = new StreamZip.async({ file: zipPath })
|
||||
|
||||
for (const file of filesToExtract) {
|
||||
const outputPath = path.join(destDir, file.outputFileName)
|
||||
await zip.extract(file.pathInsideZip, outputPath)
|
||||
Logger.debug(`[GithubAssetDownloader] Extracted file ${file.pathInsideZip} to ${outputPath}`)
|
||||
|
||||
// Set executable permission for Linux
|
||||
if (process.platform !== 'win32') {
|
||||
await fs.chmod(outputPath, 0o755)
|
||||
}
|
||||
}
|
||||
|
||||
await zip.close()
|
||||
}
|
||||
|
||||
async downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir) {
|
||||
let zipPath
|
||||
try {
|
||||
await fs.ensureDir(destDir)
|
||||
const assetUrl = await this.getAssetUrl(releaseTag, assetName)
|
||||
zipPath = await this.downloadAsset(assetUrl, destDir)
|
||||
await this.extractFiles(zipPath, filesToExtract, destDir)
|
||||
} catch (error) {
|
||||
Logger.error(`[GithubAssetDownloader] Error downloading or extracting files: ${error.message}`)
|
||||
throw error
|
||||
} finally {
|
||||
if (zipPath) await fs.remove(zipPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FFBinariesDownloader extends GithubAssetDownloader {
|
||||
constructor() {
|
||||
super('ffbinaries', 'ffbinaries-prebuilt')
|
||||
}
|
||||
|
||||
getPlatformSuffix() {
|
||||
const platform = process.platform
|
||||
const arch = process.arch
|
||||
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
return 'win-64'
|
||||
case 'darwin':
|
||||
return 'macos-64'
|
||||
case 'linux':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
return 'linux-64'
|
||||
case 'x32':
|
||||
case 'ia32':
|
||||
return 'linux-32'
|
||||
case 'arm64':
|
||||
return 'linux-arm-64'
|
||||
case 'arm':
|
||||
return 'linux-armhf-32'
|
||||
default:
|
||||
throw new Error(`Unsupported architecture: ${arch}`)
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`)
|
||||
}
|
||||
}
|
||||
|
||||
async downloadBinary(binaryName, releaseTag, destDir) {
|
||||
const platformSuffix = this.getPlatformSuffix()
|
||||
const assetName = `${binaryName}-${releaseTag}-${platformSuffix}.zip`
|
||||
const fileName = process.platform === 'win32' ? `${binaryName}.exe` : binaryName
|
||||
const filesToExtract = [{ pathInsideZip: fileName, outputFileName: fileName }]
|
||||
releaseTag = `v${releaseTag}`
|
||||
|
||||
await this.downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir)
|
||||
}
|
||||
}
|
||||
|
||||
class SQLeanDownloader extends GithubAssetDownloader {
|
||||
constructor() {
|
||||
super('nalgeon', 'sqlean')
|
||||
}
|
||||
|
||||
getPlatformSuffix() {
|
||||
const platform = process.platform
|
||||
const arch = process.arch
|
||||
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
return arch === 'x64' ? 'win-x64' : 'win-x86'
|
||||
case 'darwin':
|
||||
return arch === 'arm64' ? 'macos-arm64' : 'macos-x86'
|
||||
case 'linux':
|
||||
return arch === 'arm64' ? 'linux-arm64' : 'linux-x86'
|
||||
default:
|
||||
throw new Error(`Unsupported platform or architecture: ${platform}, ${arch}`)
|
||||
}
|
||||
}
|
||||
|
||||
getLibraryName(binaryName) {
|
||||
const platform = process.platform
|
||||
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
return `${binaryName}.dll`
|
||||
case 'darwin':
|
||||
return `${binaryName}.dylib`
|
||||
case 'linux':
|
||||
return `${binaryName}.so`
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`)
|
||||
}
|
||||
}
|
||||
|
||||
async downloadBinary(binaryName, releaseTag, destDir) {
|
||||
const platformSuffix = this.getPlatformSuffix()
|
||||
const assetName = `sqlean-${platformSuffix}.zip`
|
||||
const fileName = this.getLibraryName(binaryName)
|
||||
const filesToExtract = [{ pathInsideZip: fileName, outputFileName: fileName }]
|
||||
|
||||
await this.downloadAndExtractFiles(releaseTag, assetName, filesToExtract, destDir)
|
||||
}
|
||||
}
|
||||
|
||||
class Binary {
|
||||
constructor(name, type, envVariable, validVersions, source) {
|
||||
this.name = name
|
||||
this.type = type
|
||||
this.envVariable = envVariable
|
||||
this.validVersions = validVersions
|
||||
this.source = source
|
||||
this.fileName = this.getFileName()
|
||||
this.exec = exec
|
||||
}
|
||||
|
||||
async find(mainInstallDir, altInstallDir) {
|
||||
// 1. check path specified in environment variable
|
||||
const defaultPath = process.env[this.envVariable]
|
||||
if (await this.isGood(defaultPath)) return defaultPath
|
||||
// 2. find the first instance of the binary in the PATH environment variable
|
||||
if (this.type === 'executable') {
|
||||
const whichPath = which.sync(this.fileName, { nothrow: true })
|
||||
if (await this.isGood(whichPath)) return whichPath
|
||||
}
|
||||
// 3. check main install path (binary root dir)
|
||||
const mainInstallPath = path.join(mainInstallDir, this.fileName)
|
||||
if (await this.isGood(mainInstallPath)) return mainInstallPath
|
||||
// 4. check alt install path (/config)
|
||||
const altInstallPath = path.join(altInstallDir, this.fileName)
|
||||
if (await this.isGood(altInstallPath)) return altInstallPath
|
||||
return null
|
||||
}
|
||||
|
||||
getFileName() {
|
||||
if (this.type === 'executable') {
|
||||
return this.name + (process.platform == 'win32' ? '.exe' : '')
|
||||
} else if (this.type === 'library') {
|
||||
return this.name + (process.platform == 'win32' ? '.dll' : '.so')
|
||||
} else {
|
||||
return this.name
|
||||
}
|
||||
}
|
||||
|
||||
async isGood(binaryPath) {
|
||||
if (!binaryPath || !(await fs.pathExists(binaryPath))) return false
|
||||
if (!this.validVersions.length) return true
|
||||
if (this.type === 'library') return true
|
||||
try {
|
||||
const { stdout } = await this.exec('"' + binaryPath + '"' + ' -version')
|
||||
const version = stdout.match(/version\s([\d\.]+)/)?.[1]
|
||||
if (!version) return false
|
||||
return this.validVersions.some((validVersion) => version.startsWith(validVersion))
|
||||
} catch (err) {
|
||||
Logger.error(`[Binary] Failed to check version of ${binaryPath}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async download(destination) {
|
||||
await this.source.downloadBinary(this.name, this.validVersions[0], destination)
|
||||
}
|
||||
}
|
||||
|
||||
const ffbinaries = new FFBinariesDownloader()
|
||||
module.exports.ffbinaries = ffbinaries // for testing
|
||||
const sqlean = new SQLeanDownloader()
|
||||
module.exports.sqlean = sqlean // for testing
|
||||
|
||||
class BinaryManager {
|
||||
|
||||
defaultRequiredBinaries = [
|
||||
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH', validVersions: ['5.1'] },
|
||||
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH', validVersions: ['5.1'] }
|
||||
new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries), // ffmpeg executable
|
||||
new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries), // ffprobe executable
|
||||
new Binary('unicode', 'library', 'SQLEAN_UNICODE_PATH', ['0.24.2'], sqlean) // sqlean unicode extension
|
||||
]
|
||||
|
||||
constructor(requiredBinaries = this.defaultRequiredBinaries) {
|
||||
this.requiredBinaries = requiredBinaries
|
||||
this.mainInstallPath = process.pkg ? path.dirname(process.execPath) : global.appRoot
|
||||
this.altInstallPath = global.ConfigPath
|
||||
this.mainInstallDir = process.pkg ? path.dirname(process.execPath) : global.appRoot
|
||||
this.altInstallDir = global.ConfigPath
|
||||
this.initialized = false
|
||||
this.exec = exec
|
||||
}
|
||||
|
||||
async init() {
|
||||
@ -44,36 +293,30 @@ class BinaryManager {
|
||||
this.initialized = true
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove old/invalid binaries in main or alt install path
|
||||
*
|
||||
* @param {string[]} binaryNames
|
||||
*/
|
||||
async removeOldBinaries(binaryNames) {
|
||||
for (const binaryName of binaryNames) {
|
||||
const executable = this.getExecutableFileName(binaryName)
|
||||
const mainInstallPath = path.join(this.mainInstallPath, executable)
|
||||
if (await fs.pathExists(mainInstallPath)) {
|
||||
Logger.debug(`[BinaryManager] Removing old binary: ${mainInstallPath}`)
|
||||
await fs.remove(mainInstallPath)
|
||||
}
|
||||
const altInstallPath = path.join(this.altInstallPath, executable)
|
||||
if (await fs.pathExists(altInstallPath)) {
|
||||
Logger.debug(`[BinaryManager] Removing old binary: ${altInstallPath}`)
|
||||
await fs.remove(altInstallPath)
|
||||
}
|
||||
async removeBinary(destination, binary) {
|
||||
const binaryPath = path.join(destination, binary.fileName)
|
||||
if (await fs.pathExists(binaryPath)) {
|
||||
Logger.debug(`[BinaryManager] Removing binary: ${binaryPath}`)
|
||||
await fs.remove(binaryPath)
|
||||
}
|
||||
}
|
||||
|
||||
async removeOldBinaries(binaries) {
|
||||
for (const binary of binaries) {
|
||||
await this.removeBinary(this.mainInstallDir, binary)
|
||||
await this.removeBinary(this.altInstallDir, binary)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find required binaries and return array of binary names that are missing
|
||||
*
|
||||
*
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
async findRequiredBinaries() {
|
||||
const missingBinaries = []
|
||||
for (const binary of this.requiredBinaries) {
|
||||
const binaryPath = await this.findBinary(binary.name, binary.envVariable, binary.validVersions)
|
||||
const binaryPath = await binary.find(this.mainInstallDir, this.altInstallDir)
|
||||
if (binaryPath) {
|
||||
Logger.info(`[BinaryManager] Found valid binary ${binary.name} at ${binaryPath}`)
|
||||
if (process.env[binary.envVariable] !== binaryPath) {
|
||||
@ -82,79 +325,22 @@ class BinaryManager {
|
||||
}
|
||||
} else {
|
||||
Logger.info(`[BinaryManager] ${binary.name} not found or version too old`)
|
||||
missingBinaries.push(binary.name)
|
||||
missingBinaries.push(binary)
|
||||
}
|
||||
}
|
||||
return missingBinaries
|
||||
}
|
||||
|
||||
/**
|
||||
* Find absolute path for binary
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} envVariable
|
||||
* @param {string[]} [validVersions]
|
||||
* @returns {Promise<string>} Path to binary
|
||||
*/
|
||||
async findBinary(name, envVariable, validVersions = []) {
|
||||
const executable = this.getExecutableFileName(name)
|
||||
// 1. check path specified in environment variable
|
||||
const defaultPath = process.env[envVariable]
|
||||
if (await this.isBinaryGood(defaultPath, validVersions)) return defaultPath
|
||||
// 2. find the first instance of the binary in the PATH environment variable
|
||||
const whichPath = which.sync(executable, { nothrow: true })
|
||||
if (await this.isBinaryGood(whichPath, validVersions)) return whichPath
|
||||
// 3. check main install path (binary root dir)
|
||||
const mainInstallPath = path.join(this.mainInstallPath, executable)
|
||||
if (await this.isBinaryGood(mainInstallPath, validVersions)) return mainInstallPath
|
||||
// 4. check alt install path (/config)
|
||||
const altInstallPath = path.join(this.altInstallPath, executable)
|
||||
if (await this.isBinaryGood(altInstallPath, validVersions)) return altInstallPath
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check binary path exists and optionally check version is valid
|
||||
*
|
||||
* @param {string} binaryPath
|
||||
* @param {string[]} [validVersions]
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async isBinaryGood(binaryPath, validVersions = []) {
|
||||
if (!binaryPath || !await fs.pathExists(binaryPath)) return false
|
||||
if (!validVersions.length) return true
|
||||
try {
|
||||
const { stdout } = await this.exec('"' + binaryPath + '"' + ' -version')
|
||||
const version = stdout.match(/version\s([\d\.]+)/)?.[1]
|
||||
if (!version) return false
|
||||
return validVersions.some(validVersion => version.startsWith(validVersion))
|
||||
} catch (err) {
|
||||
Logger.error(`[BinaryManager] Failed to check version of ${binaryPath}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} binaries
|
||||
*/
|
||||
async install(binaries) {
|
||||
if (!binaries.length) return
|
||||
Logger.info(`[BinaryManager] Installing binaries: ${binaries.join(', ')}`)
|
||||
let destination = await fileUtils.isWritable(this.mainInstallPath) ? this.mainInstallPath : this.altInstallPath
|
||||
await ffbinaries.downloadBinaries(binaries, { destination, version: '5.1', force: true })
|
||||
Logger.info(`[BinaryManager] Installing binaries: ${binaries.map((binary) => binary.name).join(', ')}`)
|
||||
let destination = (await fileUtils.isWritable(this.mainInstallDir)) ? this.mainInstallDir : this.altInstallDir
|
||||
for (const binary of binaries) {
|
||||
await binary.download(destination)
|
||||
}
|
||||
Logger.info(`[BinaryManager] Binaries installed to ${destination}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Append .exe to binary name for Windows
|
||||
*
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
getExecutableFileName(name) {
|
||||
return name + (process.platform == 'win32' ? '.exe' : '')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BinaryManager
|
||||
module.exports = BinaryManager
|
||||
module.exports.Binary = Binary // for testing
|
||||
|
@ -60,12 +60,10 @@ module.exports = {
|
||||
* @returns {Promise<Object[]>} oldAuthor with numBooks
|
||||
*/
|
||||
async search(libraryId, query, limit, offset) {
|
||||
const matchAuthor = Database.matchExpression('name', query)
|
||||
const authors = await Database.authorModel.findAll({
|
||||
where: {
|
||||
name: {
|
||||
[Sequelize.Op.substring]: query
|
||||
},
|
||||
libraryId
|
||||
[Sequelize.Op.and]: [Sequelize.literal(matchAuthor), { libraryId }]
|
||||
},
|
||||
attributes: {
|
||||
include: [[Sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 'numBooks']]
|
||||
|
@ -975,21 +975,18 @@ module.exports = {
|
||||
async search(oldUser, oldLibrary, query, limit, offset) {
|
||||
const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(oldUser)
|
||||
|
||||
const normalizedQuery = await Database.getNormalizedQuery(query)
|
||||
|
||||
const matchTitle = Database.matchExpression('title', normalizedQuery)
|
||||
const matchSubtitle = Database.matchExpression('subtitle', normalizedQuery)
|
||||
|
||||
// Search title, subtitle, asin, isbn
|
||||
const books = await Database.bookModel.findAll({
|
||||
where: [
|
||||
{
|
||||
[Sequelize.Op.or]: [
|
||||
{
|
||||
title: {
|
||||
[Sequelize.Op.substring]: query
|
||||
}
|
||||
},
|
||||
{
|
||||
subtitle: {
|
||||
[Sequelize.Op.substring]: query
|
||||
}
|
||||
},
|
||||
Sequelize.literal(matchTitle),
|
||||
Sequelize.literal(matchSubtitle),
|
||||
{
|
||||
asin: {
|
||||
[Sequelize.Op.substring]: query
|
||||
@ -1044,11 +1041,12 @@ module.exports = {
|
||||
})
|
||||
}
|
||||
|
||||
const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery)
|
||||
|
||||
// Search narrators
|
||||
const narratorMatches = []
|
||||
const [narratorResults] = await Database.sequelize.query(`SELECT value, count(*) AS numBooks FROM books b, libraryItems li, json_each(b.narrators) WHERE json_valid(b.narrators) AND json_each.value LIKE :query AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value LIMIT :limit OFFSET :offset;`, {
|
||||
const [narratorResults] = await Database.sequelize.query(`SELECT value, count(*) AS numBooks FROM books b, libraryItems li, json_each(b.narrators) WHERE json_valid(b.narrators) AND ${matchJsonValue} AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value LIMIT :limit OFFSET :offset;`, {
|
||||
replacements: {
|
||||
query: `%${query}%`,
|
||||
libraryId: oldLibrary.id,
|
||||
limit,
|
||||
offset
|
||||
@ -1064,9 +1062,8 @@ module.exports = {
|
||||
|
||||
// Search tags
|
||||
const tagMatches = []
|
||||
const [tagResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM books b, libraryItems li, json_each(b.tags) WHERE json_valid(b.tags) AND json_each.value LIKE :query AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
const [tagResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM books b, libraryItems li, json_each(b.tags) WHERE json_valid(b.tags) AND ${matchJsonValue} AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
replacements: {
|
||||
query: `%${query}%`,
|
||||
libraryId: oldLibrary.id,
|
||||
limit,
|
||||
offset
|
||||
@ -1082,9 +1079,8 @@ module.exports = {
|
||||
|
||||
// Search genres
|
||||
const genreMatches = []
|
||||
const [genreResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM books b, libraryItems li, json_each(b.genres) WHERE json_valid(b.genres) AND json_each.value LIKE :query AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
const [genreResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM books b, libraryItems li, json_each(b.genres) WHERE json_valid(b.genres) AND ${matchJsonValue} AND b.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
replacements: {
|
||||
query: `%${query}%`,
|
||||
libraryId: oldLibrary.id,
|
||||
limit,
|
||||
offset
|
||||
@ -1099,12 +1095,15 @@ module.exports = {
|
||||
}
|
||||
|
||||
// Search series
|
||||
const matchName = Database.matchExpression('name', normalizedQuery)
|
||||
const allSeries = await Database.seriesModel.findAll({
|
||||
where: {
|
||||
name: {
|
||||
[Sequelize.Op.substring]: query
|
||||
},
|
||||
libraryId: oldLibrary.id
|
||||
[Sequelize.Op.and]: [
|
||||
Sequelize.literal(matchName),
|
||||
{
|
||||
libraryId: oldLibrary.id
|
||||
}
|
||||
]
|
||||
},
|
||||
replacements: userPermissionBookWhere.replacements,
|
||||
include: {
|
||||
@ -1137,7 +1136,7 @@ module.exports = {
|
||||
}
|
||||
|
||||
// Search authors
|
||||
const authorMatches = await authorFilters.search(oldLibrary.id, query, limit, offset)
|
||||
const authorMatches = await authorFilters.search(oldLibrary.id, normalizedQuery, limit, offset)
|
||||
|
||||
return {
|
||||
book: itemMatches,
|
||||
|
@ -313,21 +313,18 @@ module.exports = {
|
||||
*/
|
||||
async search(oldUser, oldLibrary, query, limit, offset) {
|
||||
const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(oldUser)
|
||||
|
||||
const normalizedQuery = await Database.getNormalizedQuery(query)
|
||||
const matchTitle = Database.matchExpression('title', normalizedQuery)
|
||||
const matchAuthor = Database.matchExpression('author', normalizedQuery)
|
||||
|
||||
// Search title, author, itunesId, itunesArtistId
|
||||
const podcasts = await Database.podcastModel.findAll({
|
||||
where: [
|
||||
{
|
||||
[Sequelize.Op.or]: [
|
||||
{
|
||||
title: {
|
||||
[Sequelize.Op.substring]: query
|
||||
}
|
||||
},
|
||||
{
|
||||
author: {
|
||||
[Sequelize.Op.substring]: query
|
||||
}
|
||||
},
|
||||
Sequelize.literal(matchTitle),
|
||||
Sequelize.literal(matchAuthor),
|
||||
{
|
||||
itunesId: {
|
||||
[Sequelize.Op.substring]: query
|
||||
@ -368,11 +365,12 @@ module.exports = {
|
||||
})
|
||||
}
|
||||
|
||||
const matchJsonValue = Database.matchExpression('json_each.value', normalizedQuery)
|
||||
|
||||
// Search tags
|
||||
const tagMatches = []
|
||||
const [tagResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM podcasts p, libraryItems li, json_each(p.tags) WHERE json_valid(p.tags) AND json_each.value LIKE :query AND p.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
const [tagResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM podcasts p, libraryItems li, json_each(p.tags) WHERE json_valid(p.tags) AND ${matchJsonValue} AND p.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
replacements: {
|
||||
query: `%${query}%`,
|
||||
libraryId: oldLibrary.id,
|
||||
limit,
|
||||
offset
|
||||
@ -388,9 +386,8 @@ module.exports = {
|
||||
|
||||
// Search genres
|
||||
const genreMatches = []
|
||||
const [genreResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM podcasts p, libraryItems li, json_each(p.genres) WHERE json_valid(p.genres) AND json_each.value LIKE :query AND p.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
const [genreResults] = await Database.sequelize.query(`SELECT value, count(*) AS numItems FROM podcasts p, libraryItems li, json_each(p.genres) WHERE json_valid(p.genres) AND ${matchJsonValue} AND p.id = li.mediaId AND li.libraryId = :libraryId GROUP BY value ORDER BY numItems DESC LIMIT :limit OFFSET :offset;`, {
|
||||
replacements: {
|
||||
query: `%${query}%`,
|
||||
libraryId: oldLibrary.id,
|
||||
limit,
|
||||
offset
|
||||
|
@ -3,9 +3,9 @@ const sinon = require('sinon')
|
||||
const fs = require('../../../server/libs/fsExtra')
|
||||
const fileUtils = require('../../../server/utils/fileUtils')
|
||||
const which = require('../../../server/libs/which')
|
||||
const ffbinaries = require('../../../server/libs/ffbinaries')
|
||||
const path = require('path')
|
||||
const BinaryManager = require('../../../server/managers/BinaryManager')
|
||||
const { Binary, ffbinaries } = require('../../../server/managers/BinaryManager')
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
@ -38,7 +38,7 @@ describe('BinaryManager', () => {
|
||||
|
||||
it('should not install binaries if they are already found', async () => {
|
||||
findStub.resolves([])
|
||||
|
||||
|
||||
await binaryManager.init()
|
||||
|
||||
expect(installStub.called).to.be.false
|
||||
@ -49,10 +49,14 @@ describe('BinaryManager', () => {
|
||||
})
|
||||
|
||||
it('should install missing binaries', async () => {
|
||||
const missingBinaries = ['ffmpeg', 'ffprobe']
|
||||
const ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
const ffprobeBinary = new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries)
|
||||
const requiredBinaries = [ffmpegBinary, ffprobeBinary]
|
||||
const missingBinaries = [ffprobeBinary]
|
||||
const missingBinariesAfterInstall = []
|
||||
findStub.onFirstCall().resolves(missingBinaries)
|
||||
findStub.onSecondCall().resolves(missingBinariesAfterInstall)
|
||||
binaryManager.requiredBinaries = requiredBinaries
|
||||
|
||||
await binaryManager.init()
|
||||
|
||||
@ -64,8 +68,11 @@ describe('BinaryManager', () => {
|
||||
})
|
||||
|
||||
it('exit if binaries are not found after installation', async () => {
|
||||
const missingBinaries = ['ffmpeg', 'ffprobe']
|
||||
const missingBinariesAfterInstall = ['ffmpeg', 'ffprobe']
|
||||
const ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
const ffprobeBinary = new Binary('ffprobe', 'executable', 'FFPROBE_PATH', ['5.1'], ffbinaries)
|
||||
const requiredBinaries = [ffmpegBinary, ffprobeBinary]
|
||||
const missingBinaries = [ffprobeBinary]
|
||||
const missingBinariesAfterInstall = [ffprobeBinary]
|
||||
findStub.onFirstCall().resolves(missingBinaries)
|
||||
findStub.onSecondCall().resolves(missingBinariesAfterInstall)
|
||||
|
||||
@ -80,14 +87,15 @@ describe('BinaryManager', () => {
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe('findRequiredBinaries', () => {
|
||||
let findBinaryStub
|
||||
let ffmpegBinary
|
||||
|
||||
beforeEach(() => {
|
||||
const requiredBinaries = [{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH' }]
|
||||
ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
const requiredBinaries = [ffmpegBinary]
|
||||
binaryManager = new BinaryManager(requiredBinaries)
|
||||
findBinaryStub = sinon.stub(binaryManager, 'findBinary')
|
||||
findBinaryStub = sinon.stub(ffmpegBinary, 'find')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@ -108,8 +116,8 @@ describe('BinaryManager', () => {
|
||||
})
|
||||
|
||||
it('should add missing binaries to result', async () => {
|
||||
const missingBinaries = ['ffmpeg']
|
||||
delete process.env.FFMPEG_PATH
|
||||
const missingBinaries = [ffmpegBinary]
|
||||
delete process.env.FFMPEG_PATH
|
||||
findBinaryStub.resolves(null)
|
||||
|
||||
const result = await binaryManager.findRequiredBinaries()
|
||||
@ -119,22 +127,25 @@ describe('BinaryManager', () => {
|
||||
expect(process.env.FFMPEG_PATH).to.be.undefined
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe('install', () => {
|
||||
let isWritableStub
|
||||
let downloadBinariesStub
|
||||
let downloadBinaryStub
|
||||
let ffmpegBinary
|
||||
|
||||
beforeEach(() => {
|
||||
binaryManager = new BinaryManager()
|
||||
ffmpegBinary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
const requiredBinaries = [ffmpegBinary]
|
||||
binaryManager = new BinaryManager(requiredBinaries)
|
||||
isWritableStub = sinon.stub(fileUtils, 'isWritable')
|
||||
downloadBinariesStub = sinon.stub(ffbinaries, 'downloadBinaries')
|
||||
binaryManager.mainInstallPath = '/path/to/main/install'
|
||||
binaryManager.altInstallPath = '/path/to/alt/install'
|
||||
downloadBinaryStub = sinon.stub(ffmpegBinary, 'download')
|
||||
binaryManager.mainInstallDir = '/path/to/main/install'
|
||||
binaryManager.altInstallDir = '/path/to/alt/install'
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
isWritableStub.restore()
|
||||
downloadBinariesStub.restore()
|
||||
downloadBinaryStub.restore()
|
||||
})
|
||||
|
||||
it('should not install binaries if no binaries are passed', async () => {
|
||||
@ -143,240 +154,302 @@ describe('BinaryManager', () => {
|
||||
await binaryManager.install(binaries)
|
||||
|
||||
expect(isWritableStub.called).to.be.false
|
||||
expect(downloadBinariesStub.called).to.be.false
|
||||
expect(downloadBinaryStub.called).to.be.false
|
||||
})
|
||||
|
||||
it('should install binaries in main install path if has access', async () => {
|
||||
const binaries = ['ffmpeg']
|
||||
const destination = binaryManager.mainInstallPath
|
||||
const binaries = [ffmpegBinary]
|
||||
const destination = binaryManager.mainInstallDir
|
||||
isWritableStub.withArgs(destination).resolves(true)
|
||||
downloadBinariesStub.resolves()
|
||||
|
||||
downloadBinaryStub.resolves()
|
||||
|
||||
await binaryManager.install(binaries)
|
||||
|
||||
expect(isWritableStub.calledOnce).to.be.true
|
||||
expect(downloadBinariesStub.calledOnce).to.be.true
|
||||
expect(downloadBinariesStub.calledWith(binaries, sinon.match({ destination: destination }))).to.be.true
|
||||
expect(downloadBinaryStub.calledOnce).to.be.true
|
||||
expect(downloadBinaryStub.calledWith(destination)).to.be.true
|
||||
})
|
||||
|
||||
it('should install binaries in alt install path if has no access to main', async () => {
|
||||
const binaries = ['ffmpeg']
|
||||
const mainDestination = binaryManager.mainInstallPath
|
||||
const destination = binaryManager.altInstallPath
|
||||
const binaries = [ffmpegBinary]
|
||||
const mainDestination = binaryManager.mainInstallDir
|
||||
const destination = binaryManager.altInstallDir
|
||||
isWritableStub.withArgs(mainDestination).resolves(false)
|
||||
downloadBinariesStub.resolves()
|
||||
|
||||
downloadBinaryStub.resolves()
|
||||
|
||||
await binaryManager.install(binaries)
|
||||
|
||||
expect(isWritableStub.calledOnce).to.be.true
|
||||
expect(downloadBinariesStub.calledOnce).to.be.true
|
||||
expect(downloadBinariesStub.calledWith(binaries, sinon.match({ destination: destination }))).to.be.true
|
||||
expect(downloadBinaryStub.calledOnce).to.be.true
|
||||
expect(downloadBinaryStub.calledWith(destination)).to.be.true
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('findBinary', () => {
|
||||
let binaryManager
|
||||
let isBinaryGoodStub
|
||||
let whichSyncStub
|
||||
let mainInstallPath
|
||||
let altInstallPath
|
||||
describe('Binary', () => {
|
||||
describe('find', () => {
|
||||
let binary
|
||||
let isGoodStub
|
||||
let whichSyncStub
|
||||
let mainInstallPath
|
||||
let altInstallPath
|
||||
|
||||
const name = 'ffmpeg'
|
||||
const envVariable = 'FFMPEG_PATH'
|
||||
const defaultPath = '/path/to/ffmpeg'
|
||||
const executable = name + (process.platform == 'win32' ? '.exe' : '')
|
||||
const whichPath = '/usr/bin/ffmpeg'
|
||||
const name = 'ffmpeg'
|
||||
const envVariable = 'FFMPEG_PATH'
|
||||
const defaultPath = '/path/to/ffmpeg'
|
||||
const executable = name + (process.platform == 'win32' ? '.exe' : '')
|
||||
const whichPath = '/usr/bin/ffmpeg'
|
||||
|
||||
beforeEach(() => {
|
||||
binary = new Binary(name, 'executable', envVariable, ['5.1'], ffbinaries)
|
||||
isGoodStub = sinon.stub(binary, 'isGood')
|
||||
whichSyncStub = sinon.stub(which, 'sync')
|
||||
binary.mainInstallDir = '/path/to/main/install'
|
||||
mainInstallPath = path.join(binary.mainInstallDir, executable)
|
||||
binary.altInstallDir = '/path/to/alt/install'
|
||||
altInstallPath = path.join(binary.altInstallDir, executable)
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
binaryManager = new BinaryManager()
|
||||
isBinaryGoodStub = sinon.stub(binaryManager, 'isBinaryGood')
|
||||
whichSyncStub = sinon.stub(which, 'sync')
|
||||
binaryManager.mainInstallPath = '/path/to/main/install'
|
||||
mainInstallPath = path.join(binaryManager.mainInstallPath, executable)
|
||||
binaryManager.altInstallPath = '/path/to/alt/install'
|
||||
altInstallPath = path.join(binaryManager.altInstallPath, executable)
|
||||
afterEach(() => {
|
||||
isGoodStub.restore()
|
||||
whichSyncStub.restore()
|
||||
})
|
||||
|
||||
it('should return the defaultPath if it exists and is a good binary', async () => {
|
||||
process.env[envVariable] = defaultPath
|
||||
isGoodStub.withArgs(defaultPath).resolves(true)
|
||||
|
||||
const result = await binary.find(binary.mainInstallDir, binary.altInstallDir)
|
||||
|
||||
expect(result).to.equal(defaultPath)
|
||||
expect(isGoodStub.calledOnce).to.be.true
|
||||
expect(isGoodStub.calledWith(defaultPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return the whichPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isGoodStub.withArgs(undefined).resolves(false)
|
||||
whichSyncStub.returns(whichPath)
|
||||
isGoodStub.withArgs(whichPath).resolves(true)
|
||||
|
||||
const result = await binary.find(binary.mainInstallDir, binary.altInstallDir)
|
||||
|
||||
expect(result).to.equal(whichPath)
|
||||
expect(isGoodStub.calledTwice).to.be.true
|
||||
expect(isGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isGoodStub.calledWith(whichPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return the mainInstallPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isGoodStub.withArgs(undefined).resolves(false)
|
||||
whichSyncStub.returns(null)
|
||||
isGoodStub.withArgs(null).resolves(false)
|
||||
isGoodStub.withArgs(mainInstallPath).resolves(true)
|
||||
|
||||
const result = await binary.find(binary.mainInstallDir, binary.altInstallDir)
|
||||
|
||||
expect(result).to.equal(mainInstallPath)
|
||||
expect(isGoodStub.callCount).to.be.equal(3)
|
||||
expect(isGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isGoodStub.calledWith(null)).to.be.true
|
||||
expect(isGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return the altInstallPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isGoodStub.withArgs(undefined).resolves(false)
|
||||
whichSyncStub.returns(null)
|
||||
isGoodStub.withArgs(null).resolves(false)
|
||||
isGoodStub.withArgs(mainInstallPath).resolves(false)
|
||||
isGoodStub.withArgs(altInstallPath).resolves(true)
|
||||
|
||||
const result = await binary.find(binary.mainInstallDir, binary.altInstallDir)
|
||||
|
||||
expect(result).to.equal(altInstallPath)
|
||||
expect(isGoodStub.callCount).to.be.equal(4)
|
||||
expect(isGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isGoodStub.calledWith(null)).to.be.true
|
||||
expect(isGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
expect(isGoodStub.calledWith(altInstallPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return null if no good binary is found', async () => {
|
||||
delete process.env[envVariable]
|
||||
isGoodStub.withArgs(undefined).resolves(false)
|
||||
whichSyncStub.returns(null)
|
||||
isGoodStub.withArgs(null).resolves(false)
|
||||
isGoodStub.withArgs(mainInstallPath).resolves(false)
|
||||
isGoodStub.withArgs(altInstallPath).resolves(false)
|
||||
|
||||
const result = await binary.find(binary.mainInstallDir, binary.altInstallDir)
|
||||
|
||||
expect(result).to.be.null
|
||||
expect(isGoodStub.callCount).to.be.equal(4)
|
||||
expect(isGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isGoodStub.calledWith(null)).to.be.true
|
||||
expect(isGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
expect(isGoodStub.calledWith(altInstallPath)).to.be.true
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
isBinaryGoodStub.restore()
|
||||
whichSyncStub.restore()
|
||||
describe('isGood', () => {
|
||||
let binary
|
||||
let fsPathExistsStub
|
||||
let execStub
|
||||
|
||||
const binaryPath = '/path/to/binary'
|
||||
const execCommand = '"' + binaryPath + '"' + ' -version'
|
||||
const goodVersions = ['5.1', '6']
|
||||
|
||||
beforeEach(() => {
|
||||
binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', goodVersions, ffbinaries)
|
||||
fsPathExistsStub = sinon.stub(fs, 'pathExists')
|
||||
execStub = sinon.stub(binary, 'exec')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
fsPathExistsStub.restore()
|
||||
execStub.restore()
|
||||
})
|
||||
|
||||
it('should return false if binaryPath is falsy', async () => {
|
||||
fsPathExistsStub.resolves(true)
|
||||
|
||||
const result = await binary.isGood(null)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.called).to.be.false
|
||||
expect(execStub.called).to.be.false
|
||||
})
|
||||
|
||||
it('should return false if binaryPath does not exist', async () => {
|
||||
fsPathExistsStub.resolves(false)
|
||||
|
||||
const result = await binary.isGood(binaryPath)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.called).to.be.false
|
||||
})
|
||||
|
||||
it('should return false if failed to check version of binary', async () => {
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.rejects(new Error('Failed to execute command'))
|
||||
|
||||
const result = await binary.isGood(binaryPath)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return false if version is not found', async () => {
|
||||
const stdout = 'Some output without version'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binary.isGood(binaryPath)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return false if version is found but does not match a good version', async () => {
|
||||
const stdout = 'version 1.2.3'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binary.isGood(binaryPath)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return true if version is found and matches a good version', async () => {
|
||||
const stdout = 'version 6.1.2'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binary.isGood(binaryPath)
|
||||
|
||||
expect(result).to.be.true
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the defaultPath if it exists and is a good binary', async () => {
|
||||
process.env[envVariable] = defaultPath
|
||||
isBinaryGoodStub.withArgs(defaultPath).resolves(true)
|
||||
|
||||
const result = await binaryManager.findBinary(name, envVariable)
|
||||
|
||||
expect(result).to.equal(defaultPath)
|
||||
expect(isBinaryGoodStub.calledOnce).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(defaultPath)).to.be.true
|
||||
|
||||
describe('getFileName', () => {
|
||||
let originalPlatform
|
||||
|
||||
const mockPlatform = (platform) => {
|
||||
Object.defineProperty(process, 'platform', { value: platform })
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
// Save the original process.platform descriptor
|
||||
originalPlatform = Object.getOwnPropertyDescriptor(process, 'platform')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
// Restore the original process.platform descriptor
|
||||
Object.defineProperty(process, 'platform', originalPlatform)
|
||||
})
|
||||
|
||||
it('should return the executable file name with .exe extension on Windows', () => {
|
||||
const binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
mockPlatform('win32')
|
||||
|
||||
const result = binary.getFileName()
|
||||
|
||||
expect(result).to.equal('ffmpeg.exe')
|
||||
})
|
||||
|
||||
it('should return the executable file name without extension on linux', () => {
|
||||
const binary = new Binary('ffmpeg', 'executable', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
mockPlatform('linux')
|
||||
|
||||
const result = binary.getFileName()
|
||||
|
||||
expect(result).to.equal('ffmpeg')
|
||||
})
|
||||
|
||||
it('should return the library file name with .dll extension on Windows', () => {
|
||||
const binary = new Binary('ffmpeg', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
mockPlatform('win32')
|
||||
|
||||
const result = binary.getFileName()
|
||||
|
||||
expect(result).to.equal('ffmpeg.dll')
|
||||
})
|
||||
|
||||
it('should return the library file name with .so extension on linux', () => {
|
||||
const binary = new Binary('ffmpeg', 'library', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
mockPlatform('linux')
|
||||
|
||||
const result = binary.getFileName()
|
||||
|
||||
expect(result).to.equal('ffmpeg.so')
|
||||
})
|
||||
|
||||
it('should return the file name without extension for other types', () => {
|
||||
const binary = new Binary('ffmpeg', 'other', 'FFMPEG_PATH', ['5.1'], ffbinaries)
|
||||
mockPlatform('win32')
|
||||
|
||||
const result = binary.getFileName()
|
||||
|
||||
expect(result).to.equal('ffmpeg')
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the whichPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isBinaryGoodStub.withArgs(undefined).resolves(false)
|
||||
isBinaryGoodStub.withArgs(whichPath).resolves(true)
|
||||
whichSyncStub.returns(whichPath)
|
||||
|
||||
const result = await binaryManager.findBinary(name, envVariable)
|
||||
|
||||
expect(result).to.equal(whichPath)
|
||||
expect(isBinaryGoodStub.calledTwice).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(whichPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return the mainInstallPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isBinaryGoodStub.withArgs(undefined).resolves(false)
|
||||
isBinaryGoodStub.withArgs(null).resolves(false)
|
||||
isBinaryGoodStub.withArgs(mainInstallPath).resolves(true)
|
||||
whichSyncStub.returns(null)
|
||||
|
||||
const result = await binaryManager.findBinary(name, envVariable)
|
||||
|
||||
expect(result).to.equal(mainInstallPath)
|
||||
expect(isBinaryGoodStub.callCount).to.be.equal(3)
|
||||
expect(isBinaryGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(null)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return the altInstallPath if it exists and is a good binary', async () => {
|
||||
delete process.env[envVariable]
|
||||
isBinaryGoodStub.withArgs(undefined).resolves(false)
|
||||
isBinaryGoodStub.withArgs(null).resolves(false)
|
||||
isBinaryGoodStub.withArgs(mainInstallPath).resolves(false)
|
||||
isBinaryGoodStub.withArgs(altInstallPath).resolves(true)
|
||||
whichSyncStub.returns(null)
|
||||
|
||||
const result = await binaryManager.findBinary(name, envVariable)
|
||||
|
||||
expect(result).to.equal(altInstallPath)
|
||||
expect(isBinaryGoodStub.callCount).to.be.equal(4)
|
||||
expect(isBinaryGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(null)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(altInstallPath)).to.be.true
|
||||
})
|
||||
|
||||
it('should return null if no good binary is found', async () => {
|
||||
delete process.env[envVariable]
|
||||
isBinaryGoodStub.withArgs(undefined).resolves(false)
|
||||
isBinaryGoodStub.withArgs(null).resolves(false)
|
||||
isBinaryGoodStub.withArgs(mainInstallPath).resolves(false)
|
||||
isBinaryGoodStub.withArgs(altInstallPath).resolves(false)
|
||||
whichSyncStub.returns(null)
|
||||
|
||||
const result = await binaryManager.findBinary(name, envVariable)
|
||||
|
||||
expect(result).to.be.null
|
||||
expect(isBinaryGoodStub.callCount).to.be.equal(4)
|
||||
expect(isBinaryGoodStub.calledWith(undefined)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(null)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(mainInstallPath)).to.be.true
|
||||
expect(isBinaryGoodStub.calledWith(altInstallPath)).to.be.true
|
||||
})
|
||||
})
|
||||
|
||||
describe('isBinaryGood', () => {
|
||||
let binaryManager
|
||||
let fsPathExistsStub
|
||||
let execStub
|
||||
let loggerInfoStub
|
||||
let loggerErrorStub
|
||||
|
||||
const binaryPath = '/path/to/binary'
|
||||
const execCommand = '"' + binaryPath + '"' + ' -version'
|
||||
const goodVersions = ['5.1', '6']
|
||||
|
||||
beforeEach(() => {
|
||||
binaryManager = new BinaryManager()
|
||||
fsPathExistsStub = sinon.stub(fs, 'pathExists')
|
||||
execStub = sinon.stub(binaryManager, 'exec')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
fsPathExistsStub.restore()
|
||||
execStub.restore()
|
||||
})
|
||||
|
||||
it('should return false if binaryPath is falsy', async () => {
|
||||
fsPathExistsStub.resolves(true)
|
||||
|
||||
const result = await binaryManager.isBinaryGood(null, goodVersions)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.called).to.be.false
|
||||
expect(execStub.called).to.be.false
|
||||
})
|
||||
|
||||
it('should return false if binaryPath does not exist', async () => {
|
||||
fsPathExistsStub.resolves(false)
|
||||
|
||||
const result = await binaryManager.isBinaryGood(binaryPath, goodVersions)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.called).to.be.false
|
||||
})
|
||||
|
||||
it('should return false if failed to check version of binary', async () => {
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.rejects(new Error('Failed to execute command'))
|
||||
|
||||
const result = await binaryManager.isBinaryGood(binaryPath, goodVersions)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return false if version is not found', async () => {
|
||||
const stdout = 'Some output without version'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binaryManager.isBinaryGood(binaryPath, goodVersions)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return false if version is found but does not match a good version', async () => {
|
||||
const stdout = 'version 1.2.3'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binaryManager.isBinaryGood(binaryPath, goodVersions)
|
||||
|
||||
expect(result).to.be.false
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
|
||||
it('should return true if version is found and matches a good version', async () => {
|
||||
const stdout = 'version 6.1.2'
|
||||
fsPathExistsStub.resolves(true)
|
||||
execStub.resolves({ stdout })
|
||||
|
||||
const result = await binaryManager.isBinaryGood(binaryPath, goodVersions)
|
||||
|
||||
expect(result).to.be.true
|
||||
expect(fsPathExistsStub.calledOnce).to.be.true
|
||||
expect(fsPathExistsStub.calledWith(binaryPath)).to.be.true
|
||||
expect(execStub.calledOnce).to.be.true
|
||||
expect(execStub.calledWith(execCommand)).to.be.true
|
||||
})
|
||||
})
|
Loading…
Reference in New Issue
Block a user