mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-02-19 00:18:56 +01:00
Starting db migration file
This commit is contained in:
parent
b2e1e24ca5
commit
c738e35a8c
@ -8,6 +8,10 @@ class Database {
|
||||
this.sequelize = null
|
||||
}
|
||||
|
||||
get models() {
|
||||
return this.sequelize?.models || {}
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (!await this.connect()) {
|
||||
throw new Error('Database connection failed')
|
||||
@ -49,6 +53,8 @@ class Database {
|
||||
require('./models/LibraryFile')(this.sequelize)
|
||||
require('./models/Person')(this.sequelize)
|
||||
require('./models/AudioBookmark')(this.sequelize)
|
||||
require('./models/MediaFile')(this.sequelize)
|
||||
require('./models/MediaStream')(this.sequelize)
|
||||
require('./models/AudioTrack')(this.sequelize)
|
||||
require('./models/BookAuthor')(this.sequelize)
|
||||
require('./models/BookChapter')(this.sequelize)
|
||||
@ -71,8 +77,10 @@ class Database {
|
||||
require('./models/FeedEpisode')(this.sequelize)
|
||||
require('./models/Setting')(this.sequelize)
|
||||
require('./models/LibrarySetting')(this.sequelize)
|
||||
require('./models/Notification')(this.sequelize)
|
||||
require('./models/UserPermission')(this.sequelize)
|
||||
|
||||
return this.sequelize.sync()
|
||||
return this.sequelize.sync({ force: false })
|
||||
}
|
||||
|
||||
async createTestUser() {
|
||||
|
@ -8,7 +8,8 @@ const rateLimit = require('./libs/expressRateLimit')
|
||||
const { version } = require('../package.json')
|
||||
|
||||
// Utils
|
||||
const dbMigration = require('./utils/dbMigration')
|
||||
const dbMigration2 = require('./utils/migrations/dbMigrationOld')
|
||||
const dbMigration3 = require('./utils/migrations/dbMigration')
|
||||
const filePerms = require('./utils/filePerms')
|
||||
const fileUtils = require('./utils/fileUtils')
|
||||
const Logger = require('./Logger')
|
||||
@ -100,21 +101,22 @@ class Server {
|
||||
Logger.info('[Server] Init v' + version)
|
||||
await this.playbackSessionManager.removeOrphanStreams()
|
||||
|
||||
// TODO: Test new db connection
|
||||
await Database.init()
|
||||
await Database.createTestUser()
|
||||
// await dbMigration3.migrate()
|
||||
|
||||
const previousVersion = await this.db.checkPreviousVersion() // Returns null if same server version
|
||||
if (previousVersion) {
|
||||
Logger.debug(`[Server] Upgraded from previous version ${previousVersion}`)
|
||||
}
|
||||
if (previousVersion && previousVersion.localeCompare('2.0.0') < 0) { // Old version data model migration
|
||||
Logger.debug(`[Server] Previous version was < 2.0.0 - migration required`)
|
||||
await dbMigration.migrate(this.db)
|
||||
await dbMigration2.migrate(this.db)
|
||||
} else {
|
||||
await this.db.init()
|
||||
}
|
||||
|
||||
// TODO: Test new db connection
|
||||
await Database.init()
|
||||
await Database.createTestUser()
|
||||
|
||||
// Create token secret if does not exist (Added v2.1.0)
|
||||
if (!this.db.serverSettings.tokenSecret) {
|
||||
await this.auth.initTokenSecret()
|
||||
|
@ -23,16 +23,21 @@ module.exports = (sequelize) => {
|
||||
},
|
||||
mediaItemId: DataTypes.UUIDV4,
|
||||
mediaItemType: DataTypes.STRING,
|
||||
index: DataTypes.INTEGER
|
||||
index: DataTypes.INTEGER,
|
||||
startOffset: DataTypes.INTEGER,
|
||||
duration: DataTypes.INTEGER,
|
||||
title: DataTypes.STRING,
|
||||
mimeType: DataTypes.STRING,
|
||||
codec: DataTypes.STRING
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'AudioTrack'
|
||||
})
|
||||
|
||||
const { Book, PodcastEpisode, FileMetadata } = sequelize.models
|
||||
const { Book, PodcastEpisode, MediaFile } = sequelize.models
|
||||
|
||||
FileMetadata.hasOne(AudioTrack)
|
||||
AudioTrack.belongsTo(FileMetadata)
|
||||
MediaFile.hasOne(AudioTrack)
|
||||
AudioTrack.belongsTo(MediaFile)
|
||||
|
||||
Book.hasMany(AudioTrack, {
|
||||
foreignKey: 'mediaItemId',
|
||||
|
@ -11,8 +11,8 @@ module.exports = (sequelize) => {
|
||||
},
|
||||
index: DataTypes.INTEGER,
|
||||
title: DataTypes.STRING,
|
||||
start: DataTypes.INTEGER,
|
||||
end: DataTypes.INTEGER
|
||||
start: DataTypes.FLOAT,
|
||||
end: DataTypes.FLOAT
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'BookChapter'
|
||||
|
@ -15,7 +15,7 @@ module.exports = (sequelize) => {
|
||||
mediaType: DataTypes.STRING,
|
||||
provider: DataTypes.STRING,
|
||||
lastScan: DataTypes.DATE,
|
||||
scanVersion: DataTypes.STRING
|
||||
lastScanVersion: DataTypes.STRING
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'Library'
|
||||
|
@ -20,7 +20,7 @@ module.exports = (sequelize) => {
|
||||
ctime: DataTypes.DATE(6),
|
||||
birthtime: DataTypes.DATE(6),
|
||||
lastScan: DataTypes.DATE,
|
||||
scanVersion: DataTypes.STRING
|
||||
lastScanVersion: DataTypes.STRING
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'LibraryItem'
|
||||
|
29
server/models/MediaFile.js
Normal file
29
server/models/MediaFile.js
Normal file
@ -0,0 +1,29 @@
|
||||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
module.exports = (sequelize) => {
|
||||
class MediaFile extends Model { }
|
||||
|
||||
MediaFile.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
formatName: DataTypes.STRING,
|
||||
formatNameLong: DataTypes.STRING,
|
||||
duration: DataTypes.FLOAT,
|
||||
bitrate: DataTypes.INTEGER,
|
||||
size: DataTypes.BIGINT,
|
||||
tags: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'MediaFile'
|
||||
})
|
||||
|
||||
const { FileMetadata } = sequelize.models
|
||||
|
||||
FileMetadata.hasOne(MediaFile)
|
||||
MediaFile.belongsTo(FileMetadata)
|
||||
|
||||
return MediaFile
|
||||
}
|
49
server/models/MediaStream.js
Normal file
49
server/models/MediaStream.js
Normal file
@ -0,0 +1,49 @@
|
||||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
module.exports = (sequelize) => {
|
||||
class MediaStream extends Model { }
|
||||
|
||||
MediaStream.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
index: DataTypes.INTEGER,
|
||||
codecType: DataTypes.STRING,
|
||||
codec: DataTypes.STRING,
|
||||
channels: DataTypes.INTEGER,
|
||||
channelLayout: DataTypes.STRING,
|
||||
bitrate: DataTypes.INTEGER,
|
||||
timeBase: DataTypes.STRING,
|
||||
duration: DataTypes.FLOAT,
|
||||
sampleRate: DataTypes.INTEGER,
|
||||
language: DataTypes.STRING,
|
||||
default: DataTypes.BOOLEAN,
|
||||
// Video stream specific
|
||||
profile: DataTypes.STRING,
|
||||
width: DataTypes.INTEGER,
|
||||
height: DataTypes.INTEGER,
|
||||
codedWidth: DataTypes.INTEGER,
|
||||
codedHeight: DataTypes.INTEGER,
|
||||
pixFmt: DataTypes.STRING,
|
||||
level: DataTypes.INTEGER,
|
||||
frameRate: DataTypes.FLOAT,
|
||||
colorSpace: DataTypes.STRING,
|
||||
colorRange: DataTypes.STRING,
|
||||
chromaLocation: DataTypes.STRING,
|
||||
displayAspectRatio: DataTypes.FLOAT,
|
||||
// Chapters JSON
|
||||
chapters: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'MediaStream'
|
||||
})
|
||||
|
||||
const { MediaFile } = sequelize.models
|
||||
|
||||
MediaFile.hasMany(MediaStream)
|
||||
MediaStream.belongsTo(MediaFile)
|
||||
|
||||
return MediaStream
|
||||
}
|
33
server/models/Notification.js
Normal file
33
server/models/Notification.js
Normal file
@ -0,0 +1,33 @@
|
||||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
module.exports = (sequelize) => {
|
||||
class Notification extends Model { }
|
||||
|
||||
Notification.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
eventName: DataTypes.STRING,
|
||||
urls: DataTypes.TEXT, // JSON array of urls
|
||||
titleTemplate: DataTypes.STRING(1000),
|
||||
bodyTemplate: DataTypes.TEXT,
|
||||
type: DataTypes.STRING,
|
||||
lastFiredAt: DataTypes.DATE,
|
||||
lastAttemptFailed: DataTypes.BOOLEAN,
|
||||
numConsecutiveFailedAttempts: DataTypes.INTEGER,
|
||||
numTimesFired: DataTypes.INTEGER,
|
||||
enabled: DataTypes.BOOLEAN
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'Notification'
|
||||
})
|
||||
|
||||
const { Library } = sequelize.models
|
||||
|
||||
Library.hasMany(Notification)
|
||||
Notification.belongsTo(Library)
|
||||
|
||||
return Notification
|
||||
}
|
@ -14,8 +14,14 @@ module.exports = (sequelize) => {
|
||||
pash: DataTypes.STRING,
|
||||
type: DataTypes.STRING,
|
||||
token: DataTypes.STRING,
|
||||
isActive: DataTypes.BOOLEAN,
|
||||
isLocked: DataTypes.BOOLEAN,
|
||||
isActive: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: false
|
||||
},
|
||||
isLocked: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: false
|
||||
},
|
||||
lastSeen: DataTypes.DATE
|
||||
}, {
|
||||
sequelize,
|
||||
|
25
server/models/UserPermission.js
Normal file
25
server/models/UserPermission.js
Normal file
@ -0,0 +1,25 @@
|
||||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
module.exports = (sequelize) => {
|
||||
class UserPermission extends Model { }
|
||||
|
||||
UserPermission.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
key: DataTypes.STRING,
|
||||
value: DataTypes.STRING
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'UserPermission'
|
||||
})
|
||||
|
||||
const { User } = sequelize.models
|
||||
|
||||
User.hasMany(UserPermission)
|
||||
UserPermission.belongsTo(User)
|
||||
|
||||
return UserPermission
|
||||
}
|
188
server/utils/migrations/dbMigration.js
Normal file
188
server/utils/migrations/dbMigration.js
Normal file
@ -0,0 +1,188 @@
|
||||
const package = require('../../../package.json')
|
||||
const Logger = require('../../Logger')
|
||||
const Database = require('../../Database')
|
||||
const oldDbFiles = require('./oldDbFiles')
|
||||
|
||||
const oldDbIdMap = {
|
||||
users: {},
|
||||
libraries: {},
|
||||
libraryFolders: {},
|
||||
libraryItems: {},
|
||||
books: {},
|
||||
tags: {}
|
||||
}
|
||||
|
||||
async function migrateBook(oldLibraryItem, LibraryItem) {
|
||||
const oldBook = oldLibraryItem.media
|
||||
|
||||
const Book = await Database.models.Book.create({
|
||||
title: oldBook.metadata.title,
|
||||
subtitle: oldBook.metadata.subtitle,
|
||||
publishedYear: oldBook.metadata.publishedYear,
|
||||
publishedDate: oldBook.metadata.publishedDate,
|
||||
publisher: oldBook.metadata.publisher,
|
||||
description: oldBook.metadata.description,
|
||||
isbn: oldBook.metadata.isbn,
|
||||
asin: oldBook.metadata.asin,
|
||||
language: oldBook.metadata.language,
|
||||
explicit: !!oldBook.metadata.explicit,
|
||||
lastCoverSearchQuery: oldBook.lastCoverSearchQuery,
|
||||
lastCoverSearch: oldBook.lastCoverSearch,
|
||||
LibraryItemId: LibraryItem.id,
|
||||
createdAt: LibraryItem.createdAt,
|
||||
updatedAt: LibraryItem.updatedAt
|
||||
})
|
||||
|
||||
oldDbIdMap.books[oldLibraryItem.id] = Book.id
|
||||
|
||||
// TODO: Handle cover image record
|
||||
// TODO: Handle EBook record
|
||||
|
||||
Logger.info(`[dbMigration] migrateBook: Book migrated "${Book.title}" (${Book.id})`)
|
||||
|
||||
const oldTags = oldBook.tags || []
|
||||
for (const oldTag of oldTags) {
|
||||
let tagId = null
|
||||
if (oldDbIdMap[oldTag]) {
|
||||
tagId = oldDbIdMap[oldTag]
|
||||
} else {
|
||||
const Tag = await Database.models.Tag.create({
|
||||
name: oldTag
|
||||
})
|
||||
tagId = Tag.id
|
||||
}
|
||||
|
||||
const BookTag = await Database.models.BookTag.create({
|
||||
BookId: Book.id,
|
||||
TagId: tagId
|
||||
})
|
||||
Logger.info(`[dbMigration] migrateBook: BookTag migrated "${oldTag}" (${BookTag.id})`)
|
||||
}
|
||||
|
||||
for (const oldChapter of oldBook.chapters) {
|
||||
const BookChapter = await Database.models.BookChapter.create({
|
||||
index: oldChapter.id,
|
||||
start: oldChapter.start,
|
||||
end: oldChapter.end,
|
||||
title: oldChapter.title,
|
||||
BookId: Book.id
|
||||
})
|
||||
Logger.info(`[dbMigration] migrateBook: BookChapter migrated "${BookChapter.title}" (${BookChapter.id})`)
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateLibraryItems(oldLibraryItems) {
|
||||
for (const oldLibraryItem of oldLibraryItems) {
|
||||
Logger.info(`[dbMigration] migrateLibraryItems: Migrating library item "${oldLibraryItem.media.metadata.title}" (${oldLibraryItem.id})`)
|
||||
|
||||
const LibraryId = oldDbIdMap.libraryFolders[oldLibraryItem.folderId]
|
||||
if (!LibraryId) {
|
||||
Logger.error(`[dbMigration] migrateLibraryItems: Old library folder id not found "${oldLibraryItem.folderId}"`)
|
||||
continue
|
||||
}
|
||||
|
||||
const LibraryItem = await Database.models.LibraryItem.create({
|
||||
ino: oldLibraryItem.ino,
|
||||
path: oldLibraryItem.path,
|
||||
relPath: oldLibraryItem.relPath,
|
||||
mediaType: oldLibraryItem.mediaType,
|
||||
isFile: !!oldLibraryItem.isFile,
|
||||
isMissing: !!oldLibraryItem.isMissing,
|
||||
isInvalid: !!oldLibraryItem.isInvalid,
|
||||
mtime: oldLibraryItem.mtimeMs,
|
||||
ctime: oldLibraryItem.ctimeMs,
|
||||
birthtime: oldLibraryItem.birthtimeMs,
|
||||
lastScan: oldLibraryItem.lastScan,
|
||||
lastScanVersion: oldLibraryItem.scanVersion,
|
||||
createdAt: oldLibraryItem.addedAt,
|
||||
updatedAt: oldLibraryItem.updatedAt,
|
||||
LibraryId
|
||||
})
|
||||
|
||||
Logger.info(`[dbMigration] migrateLibraryItems: LibraryItem "${LibraryItem.path}" migrated (${LibraryItem.id})`)
|
||||
|
||||
if (oldLibraryItem.mediaType === 'book') {
|
||||
await migrateBook(oldLibraryItem, LibraryItem)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateLibraries(oldLibraries) {
|
||||
for (const oldLibrary of oldLibraries) {
|
||||
Logger.info(`[dbMigration] migrateLibraries: Migrating library "${oldLibrary.name}" (${oldLibrary.id})`)
|
||||
|
||||
const Library = await Database.models.Library.create({
|
||||
name: oldLibrary.name,
|
||||
displayOrder: oldLibrary.displayOrder,
|
||||
icon: oldLibrary.icon || null,
|
||||
mediaType: oldLibrary.mediaType || null,
|
||||
provider: oldLibrary.provider,
|
||||
createdAt: oldLibrary.createdAt,
|
||||
updatedAt: oldLibrary.lastUpdate
|
||||
})
|
||||
|
||||
oldDbIdMap.libraries[oldLibrary.id] = Library.id
|
||||
|
||||
const oldLibrarySettings = oldLibrary.settings || {}
|
||||
for (const oldSettingsKey in oldLibrarySettings) {
|
||||
const LibrarySetting = await Database.models.LibrarySetting.create({
|
||||
key: oldSettingsKey,
|
||||
value: oldLibrarySettings[oldSettingsKey],
|
||||
LibraryId: Library.id
|
||||
})
|
||||
Logger.info(`[dbMigration] migrateLibraries: LibrarySetting "${LibrarySetting.key}" migrated (${LibrarySetting.id})`)
|
||||
}
|
||||
|
||||
Logger.info(`[dbMigration] migrateLibraries: Library "${Library.name}" migrated (${Library.id})`)
|
||||
|
||||
for (const oldFolder of oldLibrary.folders) {
|
||||
Logger.info(`[dbMigration] migrateLibraries: Migrating folder "${oldFolder.fullPath}" (${oldFolder.id})`)
|
||||
|
||||
const LibraryFolder = await Database.models.LibraryFolder.create({
|
||||
path: oldFolder.fullPath,
|
||||
LibraryId: Library.id,
|
||||
createdAt: oldFolder.addedAt
|
||||
})
|
||||
|
||||
oldDbIdMap.libraryFolders[oldFolder.id] = LibraryFolder.id
|
||||
|
||||
Logger.info(`[dbMigration] migrateLibraries: LibraryFolder "${LibraryFolder.path}" migrated (${LibraryFolder.id})`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateUsers(oldUsers) {
|
||||
for (const oldUser of oldUsers) {
|
||||
Logger.info(`[dbMigration] migrateUsers: Migrating user "${oldUser.username}" (${oldUser.id})`)
|
||||
|
||||
const User = await Database.models.User.create({
|
||||
username: oldUser.username,
|
||||
pash: oldUser.pash || null,
|
||||
type: oldUser.type || null,
|
||||
token: oldUser.token || null,
|
||||
isActive: !!oldUser.isActive,
|
||||
lastSeen: oldUser.lastSeen || null,
|
||||
createdAt: oldUser.createdAt || Date.now()
|
||||
})
|
||||
|
||||
oldDbIdMap.users[oldUser.id] = User.id
|
||||
|
||||
Logger.info(`[dbMigration] migrateUsers: User "${User.username}" migrated (${User.id})`)
|
||||
|
||||
// for (const oldMediaProgress of oldUser.mediaProgress) {
|
||||
// const MediaProgress = await Database.models.MediaProgress.create({
|
||||
|
||||
// })
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.migrate = async () => {
|
||||
Logger.info(`[dbMigration3] Starting migration`)
|
||||
|
||||
const data = await oldDbFiles.init()
|
||||
|
||||
await migrateLibraries(data.libraries)
|
||||
await migrateLibraryItems(data.libraryItems.slice(0, 10))
|
||||
await migrateUsers(data.users)
|
||||
}
|
@ -1,33 +1,33 @@
|
||||
const Path = require('path')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const njodb = require('../libs/njodb')
|
||||
const fs = require('../../libs/fsExtra')
|
||||
const njodb = require('../../libs/njodb')
|
||||
|
||||
const { SupportedEbookTypes } = require('./globals')
|
||||
const { PlayMethod } = require('./constants')
|
||||
const { getId } = require('./index')
|
||||
const { filePathToPOSIX } = require('./fileUtils')
|
||||
const Logger = require('../Logger')
|
||||
const { SupportedEbookTypes } = require('../globals')
|
||||
const { PlayMethod } = require('../constants')
|
||||
const { getId } = require('../index')
|
||||
const { filePathToPOSIX } = require('../fileUtils')
|
||||
const Logger = require('../../Logger')
|
||||
|
||||
const Library = require('../objects/Library')
|
||||
const LibraryItem = require('../objects/LibraryItem')
|
||||
const Book = require('../objects/mediaTypes/Book')
|
||||
const Library = require('../../objects/Library')
|
||||
const LibraryItem = require('../../objects/LibraryItem')
|
||||
const Book = require('../../objects/mediaTypes/Book')
|
||||
|
||||
const BookMetadata = require('../objects/metadata/BookMetadata')
|
||||
const FileMetadata = require('../objects/metadata/FileMetadata')
|
||||
const BookMetadata = require('../../objects/metadata/BookMetadata')
|
||||
const FileMetadata = require('../../objects/metadata/FileMetadata')
|
||||
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const EBookFile = require('../objects/files/EBookFile')
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const AudioMetaTags = require('../objects/metadata/AudioMetaTags')
|
||||
const AudioFile = require('../../objects/files/AudioFile')
|
||||
const EBookFile = require('../../objects/files/EBookFile')
|
||||
const LibraryFile = require('../../objects/files/LibraryFile')
|
||||
const AudioMetaTags = require('../../objects/metadata/AudioMetaTags')
|
||||
|
||||
const Author = require('../objects/entities/Author')
|
||||
const Series = require('../objects/entities/Series')
|
||||
const Author = require('../../objects/entities/Author')
|
||||
const Series = require('../../objects/entities/Series')
|
||||
|
||||
const MediaProgress = require('../objects/user/MediaProgress')
|
||||
const PlaybackSession = require('../objects/PlaybackSession')
|
||||
const MediaProgress = require('../../objects/user/MediaProgress')
|
||||
const PlaybackSession = require('../../objects/PlaybackSession')
|
||||
|
||||
const { isObject } = require('.')
|
||||
const User = require('../objects/user/User')
|
||||
const { isObject } = require('..')
|
||||
const User = require('../../objects/user/User')
|
||||
|
||||
var authorsToAdd = []
|
||||
var existingDbAuthors = []
|
93
server/utils/migrations/oldDbFiles.js
Normal file
93
server/utils/migrations/oldDbFiles.js
Normal file
@ -0,0 +1,93 @@
|
||||
const { once } = require('events')
|
||||
const { createInterface } = require('readline')
|
||||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const fs = require('../../libs/fsExtra')
|
||||
|
||||
async function processDbFile(filepath) {
|
||||
if (!fs.pathExistsSync(filepath)) {
|
||||
Logger.error(`[oldDbFiles] Db file does not exist at "${filepath}"`)
|
||||
return []
|
||||
}
|
||||
|
||||
const entities = []
|
||||
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filepath)
|
||||
|
||||
const rl = createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
|
||||
rl.on('line', (line) => {
|
||||
if (line && line.trim()) {
|
||||
try {
|
||||
const entity = JSON.parse(line)
|
||||
if (entity && Object.keys(entity).length) entities.push(entity)
|
||||
} catch (jsonParseError) {
|
||||
Logger.error(`[oldDbFiles] Failed to parse line "${line}" in db file "${filepath}"`, jsonParseError)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await once(rl, 'close')
|
||||
|
||||
console.log(`[oldDbFiles] Db file "${filepath}" processed`)
|
||||
|
||||
return entities
|
||||
} catch (error) {
|
||||
Logger.error(`[oldDbFiles] Failed to read db file "${filepath}"`, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async function loadDbData(dbpath) {
|
||||
try {
|
||||
Logger.info(`[oldDbFiles] Loading db data at "${dbpath}"`)
|
||||
const files = await fs.readdir(dbpath)
|
||||
|
||||
const entities = []
|
||||
for (const filename of files) {
|
||||
if (Path.extname(filename).toLowerCase() !== '.json') {
|
||||
Logger.warn(`[oldDbFiles] Ignoring filename "${filename}" in db folder "${dbpath}"`)
|
||||
continue
|
||||
}
|
||||
|
||||
const filepath = Path.join(dbpath, filename)
|
||||
Logger.info(`[oldDbFiles] Loading db data file "${filepath}"`)
|
||||
const someEntities = await processDbFile(filepath)
|
||||
Logger.info(`[oldDbFiles] Processed db data file with ${someEntities.length} entities`)
|
||||
entities.push(...someEntities)
|
||||
}
|
||||
|
||||
Logger.info(`[oldDbFiles] Finished loading db data with ${entities.length} entities`)
|
||||
return entities
|
||||
} catch (error) {
|
||||
Logger.error(`[oldDbFiles] Failed to load db data "${dbpath}"`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.init = async () => {
|
||||
const dbs = {
|
||||
libraryItems: Path.join(global.ConfigPath, 'libraryItems', 'data'),
|
||||
users: Path.join(global.ConfigPath, 'users', 'data'),
|
||||
sessions: Path.join(global.ConfigPath, 'sessions', 'data'),
|
||||
libraries: Path.join(global.ConfigPath, 'libraries', 'data'),
|
||||
settings: Path.join(global.ConfigPath, 'settings', 'data'),
|
||||
collections: Path.join(global.ConfigPath, 'collections', 'data'),
|
||||
playlists: Path.join(global.ConfigPath, 'playlists', 'data'),
|
||||
authors: Path.join(global.ConfigPath, 'authors', 'data'),
|
||||
series: Path.join(global.ConfigPath, 'series', 'data'),
|
||||
feeds: Path.join(global.ConfigPath, 'feeds', 'data')
|
||||
}
|
||||
|
||||
const data = {}
|
||||
for (const key in dbs) {
|
||||
data[key] = await loadDbData(dbs[key])
|
||||
Logger.info(`[oldDbFiles] ${data[key].length} ${key} loaded`)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
Loading…
Reference in New Issue
Block a user