mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-08-09 13:50:42 +02:00
Merge 2ae4824f8a
into 9c0c7b6b08
This commit is contained in:
commit
d40eebe263
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "audiobookshelf",
|
||||
"version": "2.25.1",
|
||||
"version": "2.25.2",
|
||||
"buildNumber": 1,
|
||||
"description": "Self-hosted audiobook and podcast server",
|
||||
"main": "index.js",
|
||||
|
140
server/controllers/KOReaderController.js
Normal file
140
server/controllers/KOReaderController.js
Normal file
@ -0,0 +1,140 @@
|
||||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
const { Op } = require('sequelize')
|
||||
|
||||
class KOReaderController {
|
||||
tmpUsers = []
|
||||
|
||||
/**
|
||||
* GET: /public/auth/users
|
||||
* Authenticate user for KOReader
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async authenticateUser(req, res) {
|
||||
console.log('KOReaderController.authenticateUser called')
|
||||
console.log('req.user:', req.body)
|
||||
|
||||
// Check if user is authenticated
|
||||
|
||||
return res.status(200).json({
|
||||
authorized: 'OK'
|
||||
})
|
||||
}
|
||||
|
||||
async updateProgress(req, res) {
|
||||
try {
|
||||
const doc = req.body.document
|
||||
console.log(doc)
|
||||
if (!doc) {
|
||||
return res.status(400).json({ error: 'Document field is missing' })
|
||||
}
|
||||
|
||||
const percentage = Number(req.body.percentage)
|
||||
const progress = req.body.progress
|
||||
const device = req.body.device
|
||||
const device_id = req.body.device_id
|
||||
const timestamp = Math.floor(Date.now() / 1000)
|
||||
|
||||
if (percentage && progress && device) {
|
||||
const data = {
|
||||
percentage,
|
||||
progress,
|
||||
device,
|
||||
device_id,
|
||||
timestamp
|
||||
}
|
||||
|
||||
console.log(data)
|
||||
|
||||
// Needs to be saved later
|
||||
|
||||
return res.status(200).json({
|
||||
document: doc,
|
||||
timestamp
|
||||
})
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Invalid fields' })
|
||||
}
|
||||
} catch (error) {
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: public/sync/:documentHash
|
||||
* Get reading progress for a document by its hash
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getProgress(req, res) {
|
||||
const { documentHash } = req.params
|
||||
|
||||
console.log('KOReaderController.getProgress called with documentHash:', documentHash)
|
||||
|
||||
if (!documentHash) {
|
||||
return res.status(400).json({ error: 'Document hash is required' })
|
||||
}
|
||||
|
||||
try {
|
||||
const book = await Database.bookModel.findOne({
|
||||
where: {
|
||||
[Op.or]: [{ md5FileHash: documentHash }, { md5FilenameHash: documentHash }]
|
||||
},
|
||||
include: [
|
||||
{
|
||||
model: Database.mediaProgressModel,
|
||||
where: {
|
||||
// Idk how we will map this later. For now enter your user ID here
|
||||
userId: '50a15f71-8504-4046-be75-8cf38212d7d1',
|
||||
mediaItemType: 'book'
|
||||
},
|
||||
required: false
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
if (!book) {
|
||||
return res.status(404).json({ error: 'Book not found for the provided hash' })
|
||||
}
|
||||
|
||||
const mediaProgress = book.mediaProgresses?.[0]
|
||||
|
||||
if (!mediaProgress) {
|
||||
return res.json({
|
||||
percentage: 0,
|
||||
progress: 0,
|
||||
device: null,
|
||||
device_id: null,
|
||||
timestamp: Math.floor(Date.now() / 1000)
|
||||
})
|
||||
}
|
||||
|
||||
// Convert progress to KOReader format
|
||||
const progressPercentage = mediaProgress.ebookProgress || 0
|
||||
// Seems not to work currently at all
|
||||
const progressValue = mediaProgress.ebookLocation.replace('epubcfi(', '').replace(/\)$/, '')
|
||||
|
||||
console.log(`Progress for hash "${documentHash}":`, {
|
||||
percentage: progressPercentage,
|
||||
progress: progressValue,
|
||||
device: '1',
|
||||
device_id: '1',
|
||||
timestamp: Math.floor(mediaProgress.updatedAt.getTime() / 1000)
|
||||
})
|
||||
|
||||
return res.status(200).json({
|
||||
percentage: progressPercentage,
|
||||
progress: progressValue,
|
||||
device: '1',
|
||||
device_id: '1',
|
||||
timestamp: Math.floor(mediaProgress.updatedAt.getTime() / 1000)
|
||||
})
|
||||
} catch (error) {
|
||||
Logger.error(`[KOReaderController] Failed to get progress for hash "${documentHash}":`, error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new KOReaderController()
|
68
server/migrations/v2.25.2-add-koreader-hashes.js
Normal file
68
server/migrations/v2.25.2-add-koreader-hashes.js
Normal file
@ -0,0 +1,68 @@
|
||||
const { DataTypes } = require('sequelize')
|
||||
|
||||
/**
|
||||
* @typedef MigrationContext
|
||||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @property {import('../Logger')} logger - a Logger object.
|
||||
*
|
||||
* @typedef MigrationOptions
|
||||
* @property {MigrationContext} context - an object containing the migration context.
|
||||
*/
|
||||
|
||||
const migrationVersion = '2.21.0'
|
||||
const migrationName = `${migrationVersion}-add-koreader-hashes`
|
||||
const loggerPrefix = `[${migrationVersion} migration]`
|
||||
|
||||
/**
|
||||
* This migration adds MD5 hash fields to the books table for KOReader sync support
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function up({ context: { queryInterface, logger } }) {
|
||||
logger.info(`${loggerPrefix} UPGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
// Add MD5 hash columns for KOReader sync
|
||||
await queryInterface.addColumn('books', 'md5FileHash', {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: true
|
||||
})
|
||||
|
||||
await queryInterface.addColumn('books', 'md5FilenameHash', {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: true
|
||||
})
|
||||
|
||||
// Add indexes for efficient lookup by hash
|
||||
await queryInterface.addIndex('books', ['md5FileHash'], {
|
||||
name: 'books_md5_file_hash_index'
|
||||
})
|
||||
|
||||
await queryInterface.addIndex('books', ['md5FilenameHash'], {
|
||||
name: 'books_md5_filename_hash_index'
|
||||
})
|
||||
|
||||
logger.info(`${loggerPrefix} UPGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* This migration removes MD5 hash fields from the books table
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function down({ context: { queryInterface, logger } }) {
|
||||
logger.info(`${loggerPrefix} DOWNGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
// Remove indexes
|
||||
await queryInterface.removeIndex('books', 'books_md5_file_hash_index')
|
||||
await queryInterface.removeIndex('books', 'books_md5_filename_hash_index')
|
||||
|
||||
// Remove columns
|
||||
await queryInterface.removeColumn('books', 'md5FileHash')
|
||||
await queryInterface.removeColumn('books', 'md5FilenameHash')
|
||||
|
||||
logger.info(`${loggerPrefix} DOWNGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
module.exports = { up, down }
|
@ -119,6 +119,10 @@ class Book extends Model {
|
||||
this.tags
|
||||
/** @type {string[]} */
|
||||
this.genres
|
||||
/** @type {string} */
|
||||
this.md5FileHash
|
||||
/** @type {string} */
|
||||
this.md5FilenameHash
|
||||
/** @type {Date} */
|
||||
this.updatedAt
|
||||
/** @type {Date} */
|
||||
@ -164,7 +168,9 @@ class Book extends Model {
|
||||
ebookFile: DataTypes.JSON,
|
||||
chapters: DataTypes.JSON,
|
||||
tags: DataTypes.JSON,
|
||||
genres: DataTypes.JSON
|
||||
genres: DataTypes.JSON,
|
||||
md5FileHash: DataTypes.STRING,
|
||||
md5FilenameHash: DataTypes.STRING
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
@ -632,7 +638,11 @@ class Book extends Model {
|
||||
tags: [...(this.tags || [])],
|
||||
audioFiles: structuredClone(this.audioFiles),
|
||||
chapters: structuredClone(this.chapters),
|
||||
ebookFile: structuredClone(this.ebookFile)
|
||||
ebookFile: structuredClone(this.ebookFile),
|
||||
md5: {
|
||||
file: this.md5FileHash,
|
||||
filename: this.md5FilenameHash
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -680,7 +690,11 @@ class Book extends Model {
|
||||
ebookFile: structuredClone(this.ebookFile),
|
||||
duration: this.duration,
|
||||
size: this.size,
|
||||
tracks: this.getTracklist(libraryItemId)
|
||||
tracks: this.getTracklist(libraryItemId),
|
||||
md5: {
|
||||
file: this.md5FileHash,
|
||||
filename: this.md5FilenameHash
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
const express = require('express')
|
||||
const ShareController = require('../controllers/ShareController')
|
||||
const SessionController = require('../controllers/SessionController')
|
||||
const KOReaderController = require('../controllers/KOReaderController')
|
||||
|
||||
class PublicRouter {
|
||||
constructor(playbackSessionManager) {
|
||||
@ -19,6 +20,14 @@ class PublicRouter {
|
||||
this.router.get('/share/:slug/download', ShareController.downloadMediaItemShare.bind(this))
|
||||
this.router.patch('/share/:slug/progress', ShareController.updateMediaItemShareProgress.bind(this))
|
||||
this.router.get('/session/:id/track/:index', SessionController.getTrack.bind(this))
|
||||
|
||||
//
|
||||
// KOReader Routes
|
||||
//
|
||||
this.router.get('/users/auth', KOReaderController.authenticateUser.bind(this))
|
||||
this.router.post('/users/create', KOReaderController.createUser.bind(this))
|
||||
this.router.put('/syncs/progress', KOReaderController.updateProgress.bind(this))
|
||||
this.router.get('/syncs/progress/:documentHash', KOReaderController.getProgress.bind(this))
|
||||
}
|
||||
}
|
||||
module.exports = PublicRouter
|
||||
|
@ -19,6 +19,7 @@ const LibraryFile = require('../objects/files/LibraryFile')
|
||||
|
||||
const RssFeedManager = require('../managers/RssFeedManager')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const { generateBookHashes } = require('../utils/hashUtils')
|
||||
|
||||
const LibraryScan = require('./LibraryScan')
|
||||
const OpfFileScanner = require('./OpfFileScanner')
|
||||
@ -207,6 +208,25 @@ class BookScanner {
|
||||
|
||||
const ebookFileScanData = await parseEbookMetadata.parse(media.ebookFile)
|
||||
|
||||
// Generate/update MD5 hashes for KOReader sync if ebook file exists
|
||||
if (media.ebookFile) {
|
||||
try {
|
||||
const hashes = await generateBookHashes(media.ebookFile.metadata.path)
|
||||
if (media.md5FileHash !== hashes.fileHash || media.md5FilenameHash !== hashes.filenameHash) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating KOReader hashes for book "${media.title}": file=${hashes.fileHash}, filename=${hashes.filenameHash}`)
|
||||
media.md5FileHash = hashes.fileHash
|
||||
media.md5FilenameHash = hashes.filenameHash
|
||||
hasMediaChanges = true
|
||||
}
|
||||
} catch (error) {
|
||||
libraryScan.addLog(LogLevel.WARN, `Failed to generate KOReader hashes for book "${media.title}": ${error.message}`)
|
||||
}
|
||||
} else if (media.md5FileHash || media.md5FilenameHash) {
|
||||
media.md5FileHash = null
|
||||
media.md5FilenameHash = null
|
||||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
|
||||
let authorsUpdated = false
|
||||
const bookAuthorsRemoved = []
|
||||
@ -467,11 +487,28 @@ class BookScanner {
|
||||
|
||||
let duration = 0
|
||||
scannedAudioFiles.forEach((af) => (duration += !isNaN(af.duration) ? Number(af.duration) : 0))
|
||||
|
||||
// Generate MD5 hashes for KOReader sync if ebook file exists
|
||||
let md5FileHash = null
|
||||
let md5FilenameHash = null
|
||||
if (ebookLibraryFile) {
|
||||
try {
|
||||
const hashes = await generateBookHashes(ebookLibraryFile.metadata.path)
|
||||
md5FileHash = hashes.fileHash
|
||||
md5FilenameHash = hashes.filenameHash
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Generated KOReader hashes for book "${bookMetadata.title}": file=${md5FileHash}, filename=${md5FilenameHash}`)
|
||||
} catch (error) {
|
||||
libraryScan.addLog(LogLevel.WARN, `Failed to generate KOReader hashes for book "${bookMetadata.title}": ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
const bookObject = {
|
||||
...bookMetadata,
|
||||
audioFiles: scannedAudioFiles,
|
||||
ebookFile: ebookLibraryFile || null,
|
||||
duration,
|
||||
md5FileHash,
|
||||
md5FilenameHash,
|
||||
bookAuthors: [],
|
||||
bookSeries: []
|
||||
}
|
||||
|
76
server/utils/hashUtils.js
Normal file
76
server/utils/hashUtils.js
Normal file
@ -0,0 +1,76 @@
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const hash = require('crypto').createHash('md5')
|
||||
|
||||
/**
|
||||
* Generate MD5 hash from file content by sampling multiple chunks
|
||||
* See: https://github.com/koreader/koreader/blob/master/frontend/util.lua#L1102
|
||||
* @param {string} filePath - Path to the file
|
||||
* @returns {Promise<string>} MD5 hash of file content
|
||||
*/
|
||||
function generateFileHash(filepath) {
|
||||
if (!filepath) return null
|
||||
|
||||
try {
|
||||
const fd = fs.openSync(filepath, 'r')
|
||||
const step = 1024
|
||||
const size = 1024
|
||||
const hash = crypto.createHash('md5')
|
||||
|
||||
try {
|
||||
for (let i = -1; i <= 10; i++) {
|
||||
const position = step << (2 * i)
|
||||
const buffer = Buffer.alloc(size)
|
||||
|
||||
try {
|
||||
const bytesRead = fs.readSync(fd, buffer, 0, size, position)
|
||||
if (bytesRead > 0) {
|
||||
hash.update(buffer.subarray(0, bytesRead))
|
||||
} else {
|
||||
break
|
||||
}
|
||||
} catch (err) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return hash.digest('hex')
|
||||
} finally {
|
||||
fs.closeSync(fd)
|
||||
}
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate MD5 hash from filename
|
||||
* @param {string} filename - The filename (without path)
|
||||
* @returns {string} MD5 hash of filename
|
||||
*/
|
||||
function generateFilenameHash(filename) {
|
||||
return crypto.createHash('md5').update(filename).digest('hex')
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate both file content and filename hashes for a book
|
||||
* @param {string} ebookFilePath - Path to the ebook file
|
||||
* @returns {Promise<{fileHash: string, filenameHash: string}>}
|
||||
*/
|
||||
async function generateBookHashes(ebookFilePath) {
|
||||
const filename = path.basename(ebookFilePath)
|
||||
|
||||
const [fileHash, filenameHash] = await Promise.all([generateFileHash(ebookFilePath), Promise.resolve(generateFilenameHash(filename))])
|
||||
|
||||
return {
|
||||
fileHash,
|
||||
filenameHash
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateFileHash,
|
||||
generateFilenameHash,
|
||||
generateBookHashes
|
||||
}
|
Loading…
Reference in New Issue
Block a user