audiobookshelf/server/utils/migrations/dbMigration.js

1706 lines
54 KiB
JavaScript
Raw Normal View History

const { DataTypes, QueryInterface } = require('sequelize')
2023-07-05 01:14:44 +02:00
const Path = require('path')
const uuidv4 = require("uuid").v4
const Logger = require('../../Logger')
const fs = require('../../libs/fsExtra')
const oldDbFiles = require('./oldDbFiles')
const parseNameString = require('../parsers/parseNameString')
2023-07-05 01:14:44 +02:00
const oldDbIdMap = {
users: {},
libraries: {},
libraryFolders: {},
libraryItems: {},
authors: {}, // key is (new) library id with another map of author ids
series: {}, // key is (new) library id with another map of series ids
2023-07-05 01:14:44 +02:00
collections: {},
podcastEpisodes: {},
books: {}, // key is library item id
podcasts: {}, // key is library item id
devices: {} // key is a json stringify of the old DeviceInfo data OR deviceId if it exists
}
let prefixesToIgnore = ['the']
function getTitleIgnorePrefix(title) {
if (!title?.trim()) return title
for (const prefix of prefixesToIgnore) {
// e.g. for prefix "the". If title is "The Book" return "Book"
if (title.toLowerCase().startsWith(`${prefix} `)) {
return title.substring(prefix.length).trim()
}
}
return title
}
2023-07-05 01:14:44 +02:00
function getDeviceInfoString(deviceInfo, UserId) {
if (!deviceInfo) return null
if (deviceInfo.deviceId) return deviceInfo.deviceId
const keys = [
UserId,
deviceInfo.browserName || null,
deviceInfo.browserVersion || null,
deviceInfo.osName || null,
deviceInfo.osVersion || null,
deviceInfo.clientVersion || null,
deviceInfo.manufacturer || null,
deviceInfo.model || null,
deviceInfo.sdkVersion || null,
deviceInfo.ipAddress || null
].map(k => k || '')
return 'temp-' + Buffer.from(keys.join('-'), 'utf-8').toString('base64')
}
/**
* Migrate oldLibraryItem.media to Book model
* Migrate BookSeries and BookAuthor
* @param {objects.LibraryItem} oldLibraryItem
* @param {object} LibraryItem models.LibraryItem object
* @returns {object} { book: object, bookSeries: [], bookAuthor: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateBook(oldLibraryItem, LibraryItem) {
const oldBook = oldLibraryItem.media
const _newRecords = {
book: null,
bookSeries: [],
bookAuthor: []
}
const tracks = (oldBook.audioFiles || []).filter(af => !af.exclude && !af.invalid)
let duration = 0
for (const track of tracks) {
if (track.duration !== null && !isNaN(track.duration)) {
duration += track.duration
}
}
2023-07-05 01:14:44 +02:00
//
// Migrate Book
//
const Book = {
id: uuidv4(),
title: oldBook.metadata.title,
titleIgnorePrefix: getTitleIgnorePrefix(oldBook.metadata.title),
2023-07-05 01:14:44 +02:00
subtitle: oldBook.metadata.subtitle,
publishedYear: oldBook.metadata.publishedYear,
publishedDate: oldBook.metadata.publishedDate,
publisher: oldBook.metadata.publisher,
description: oldBook.metadata.description,
isbn: oldBook.metadata.isbn,
asin: oldBook.metadata.asin,
language: oldBook.metadata.language,
explicit: !!oldBook.metadata.explicit,
abridged: !!oldBook.metadata.abridged,
lastCoverSearchQuery: oldBook.lastCoverSearchQuery,
lastCoverSearch: oldBook.lastCoverSearch,
createdAt: LibraryItem.createdAt,
updatedAt: LibraryItem.updatedAt,
narrators: oldBook.metadata.narrators,
ebookFile: oldBook.ebookFile,
coverPath: oldBook.coverPath,
duration,
2023-07-05 01:14:44 +02:00
audioFiles: oldBook.audioFiles,
chapters: oldBook.chapters,
tags: oldBook.tags,
genres: oldBook.metadata.genres
}
_newRecords.book = Book
2023-07-05 01:14:44 +02:00
oldDbIdMap.books[oldLibraryItem.id] = Book.id
//
// Migrate BookAuthors
//
const bookAuthorsInserted = []
2023-07-05 01:14:44 +02:00
for (const oldBookAuthor of oldBook.metadata.authors) {
if (oldDbIdMap.authors[LibraryItem.libraryId][oldBookAuthor.id]) {
const authorId = oldDbIdMap.authors[LibraryItem.libraryId][oldBookAuthor.id]
if (bookAuthorsInserted.includes(authorId)) continue // Duplicate prevention
bookAuthorsInserted.push(authorId)
_newRecords.bookAuthor.push({
2023-07-05 01:14:44 +02:00
id: uuidv4(),
authorId,
2023-07-05 01:14:44 +02:00
bookId: Book.id
})
} else {
Logger.warn(`[dbMigration] migrateBook: Book author not found "${oldBookAuthor.name}"`)
}
}
//
// Migrate BookSeries
//
const bookSeriesInserted = []
2023-07-05 01:14:44 +02:00
for (const oldBookSeries of oldBook.metadata.series) {
if (oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id]) {
const seriesId = oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id]
if (bookSeriesInserted.includes(seriesId)) continue // Duplicate prevention
bookSeriesInserted.push(seriesId)
_newRecords.bookSeries.push({
2023-07-05 01:14:44 +02:00
id: uuidv4(),
sequence: oldBookSeries.sequence,
seriesId: oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id],
2023-07-05 01:14:44 +02:00
bookId: Book.id
})
2023-07-05 01:14:44 +02:00
} else {
Logger.warn(`[dbMigration] migrateBook: Series not found "${oldBookSeries.name}"`)
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate oldLibraryItem.media to Podcast model
* Migrate PodcastEpisode
* @param {objects.LibraryItem} oldLibraryItem
* @param {object} LibraryItem models.LibraryItem object
* @returns {object} { podcast: object, podcastEpisode: [] }
*/
2023-07-05 01:14:44 +02:00
function migratePodcast(oldLibraryItem, LibraryItem) {
const _newRecords = {
podcast: null,
podcastEpisode: []
}
2023-07-05 01:14:44 +02:00
const oldPodcast = oldLibraryItem.media
const oldPodcastMetadata = oldPodcast.metadata
//
// Migrate Podcast
//
const Podcast = {
id: uuidv4(),
title: oldPodcastMetadata.title,
titleIgnorePrefix: getTitleIgnorePrefix(oldPodcastMetadata.title),
2023-07-05 01:14:44 +02:00
author: oldPodcastMetadata.author,
releaseDate: oldPodcastMetadata.releaseDate,
feedURL: oldPodcastMetadata.feedUrl,
imageURL: oldPodcastMetadata.imageUrl,
description: oldPodcastMetadata.description,
itunesPageURL: oldPodcastMetadata.itunesPageUrl,
itunesId: oldPodcastMetadata.itunesId,
itunesArtistId: oldPodcastMetadata.itunesArtistId,
language: oldPodcastMetadata.language,
podcastType: oldPodcastMetadata.type,
explicit: !!oldPodcastMetadata.explicit,
autoDownloadEpisodes: !!oldPodcast.autoDownloadEpisodes,
autoDownloadSchedule: oldPodcast.autoDownloadSchedule,
lastEpisodeCheck: oldPodcast.lastEpisodeCheck,
2023-07-14 21:04:28 +02:00
maxEpisodesToKeep: oldPodcast.maxEpisodesToKeep || 0,
maxNewEpisodesToDownload: oldPodcast.maxNewEpisodesToDownload || 3,
2023-07-05 01:14:44 +02:00
lastCoverSearchQuery: oldPodcast.lastCoverSearchQuery,
lastCoverSearch: oldPodcast.lastCoverSearch,
createdAt: LibraryItem.createdAt,
updatedAt: LibraryItem.updatedAt,
coverPath: oldPodcast.coverPath,
tags: oldPodcast.tags,
genres: oldPodcastMetadata.genres
}
_newRecords.podcast = Podcast
2023-07-05 01:14:44 +02:00
oldDbIdMap.podcasts[oldLibraryItem.id] = Podcast.id
//
// Migrate PodcastEpisodes
//
const oldEpisodes = oldPodcast.episodes || []
for (const oldEpisode of oldEpisodes) {
2023-07-06 01:18:37 +02:00
oldEpisode.audioFile.index = 1
2023-07-05 01:14:44 +02:00
const PodcastEpisode = {
id: uuidv4(),
oldEpisodeId: oldEpisode.id,
2023-07-05 01:14:44 +02:00
index: oldEpisode.index,
2023-07-14 21:04:47 +02:00
season: oldEpisode.season || null,
episode: oldEpisode.episode || null,
episodeType: oldEpisode.episodeType || null,
2023-07-05 01:14:44 +02:00
title: oldEpisode.title,
2023-07-14 21:04:47 +02:00
subtitle: oldEpisode.subtitle || null,
description: oldEpisode.description || null,
pubDate: oldEpisode.pubDate || null,
2023-07-05 01:14:44 +02:00
enclosureURL: oldEpisode.enclosure?.url || null,
enclosureSize: oldEpisode.enclosure?.length || null,
enclosureType: oldEpisode.enclosure?.type || null,
2023-07-14 21:04:47 +02:00
publishedAt: oldEpisode.publishedAt || null,
2023-07-05 01:14:44 +02:00
createdAt: oldEpisode.addedAt,
updatedAt: oldEpisode.updatedAt,
podcastId: Podcast.id,
audioFile: oldEpisode.audioFile,
2023-07-14 21:04:28 +02:00
chapters: oldEpisode.chapters || []
2023-07-05 01:14:44 +02:00
}
_newRecords.podcastEpisode.push(PodcastEpisode)
2023-07-05 01:14:44 +02:00
oldDbIdMap.podcastEpisodes[oldEpisode.id] = PodcastEpisode.id
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate libraryItems to LibraryItem, Book, Podcast models
* @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {object} { libraryItem: [], book: [], podcast: [], podcastEpisode: [], bookSeries: [], bookAuthor: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateLibraryItems(oldLibraryItems) {
const _newRecords = {
book: [],
podcast: [],
podcastEpisode: [],
bookSeries: [],
bookAuthor: [],
libraryItem: []
}
2023-07-05 01:14:44 +02:00
for (const oldLibraryItem of oldLibraryItems) {
const libraryFolderId = oldDbIdMap.libraryFolders[oldLibraryItem.folderId]
if (!libraryFolderId) {
Logger.error(`[dbMigration] migrateLibraryItems: Old library folder id not found "${oldLibraryItem.folderId}"`)
continue
}
const libraryId = oldDbIdMap.libraries[oldLibraryItem.libraryId]
if (!libraryId) {
Logger.error(`[dbMigration] migrateLibraryItems: Old library id not found "${oldLibraryItem.libraryId}"`)
continue
}
if (!['book', 'podcast'].includes(oldLibraryItem.mediaType)) {
Logger.error(`[dbMigration] migrateLibraryItems: Not migrating library item with mediaType=${oldLibraryItem.mediaType}`)
continue
}
let size = 0
for (const libraryFile of oldLibraryItem.libraryFiles) {
if (libraryFile.metadata?.size && !isNaN(libraryFile.metadata?.size)) {
size += libraryFile.metadata.size
}
}
2023-07-05 01:14:44 +02:00
//
// Migrate LibraryItem
//
const LibraryItem = {
id: uuidv4(),
oldLibraryItemId: oldLibraryItem.id,
2023-07-05 01:14:44 +02:00
ino: oldLibraryItem.ino,
path: oldLibraryItem.path,
relPath: oldLibraryItem.relPath,
mediaId: null, // set below
mediaType: oldLibraryItem.mediaType,
isFile: !!oldLibraryItem.isFile,
isMissing: !!oldLibraryItem.isMissing,
isInvalid: !!oldLibraryItem.isInvalid,
mtime: oldLibraryItem.mtimeMs,
ctime: oldLibraryItem.ctimeMs,
birthtime: oldLibraryItem.birthtimeMs,
size,
2023-07-05 01:14:44 +02:00
lastScan: oldLibraryItem.lastScan,
lastScanVersion: oldLibraryItem.scanVersion,
createdAt: oldLibraryItem.addedAt,
updatedAt: oldLibraryItem.updatedAt,
libraryId,
libraryFolderId,
libraryFiles: oldLibraryItem.libraryFiles.map(lf => {
if (lf.isSupplementary === undefined) lf.isSupplementary = null
return lf
})
}
oldDbIdMap.libraryItems[oldLibraryItem.id] = LibraryItem.id
_newRecords.libraryItem.push(LibraryItem)
2023-07-05 01:14:44 +02:00
//
// Migrate Book/Podcast
//
if (oldLibraryItem.mediaType === 'book') {
const bookRecords = migrateBook(oldLibraryItem, LibraryItem)
_newRecords.book.push(bookRecords.book)
_newRecords.bookAuthor.push(...bookRecords.bookAuthor)
_newRecords.bookSeries.push(...bookRecords.bookSeries)
2023-07-05 01:14:44 +02:00
LibraryItem.mediaId = oldDbIdMap.books[oldLibraryItem.id]
} else if (oldLibraryItem.mediaType === 'podcast') {
const podcastRecords = migratePodcast(oldLibraryItem, LibraryItem)
_newRecords.podcast.push(podcastRecords.podcast)
_newRecords.podcastEpisode.push(...podcastRecords.podcastEpisode)
2023-07-05 01:14:44 +02:00
LibraryItem.mediaId = oldDbIdMap.podcasts[oldLibraryItem.id]
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate Library and LibraryFolder
* @param {Array<objects.Library>} oldLibraries
* @returns {object} { library: [], libraryFolder: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateLibraries(oldLibraries) {
const _newRecords = {
library: [],
libraryFolder: []
}
2023-07-05 01:14:44 +02:00
for (const oldLibrary of oldLibraries) {
if (!['book', 'podcast'].includes(oldLibrary.mediaType)) {
Logger.error(`[dbMigration] migrateLibraries: Not migrating library with mediaType=${oldLibrary.mediaType}`)
continue
}
//
// Migrate Library
//
const Library = {
id: uuidv4(),
oldLibraryId: oldLibrary.id,
2023-07-05 01:14:44 +02:00
name: oldLibrary.name,
displayOrder: oldLibrary.displayOrder,
icon: oldLibrary.icon || null,
mediaType: oldLibrary.mediaType || null,
provider: oldLibrary.provider,
settings: oldLibrary.settings || {},
createdAt: oldLibrary.createdAt,
updatedAt: oldLibrary.lastUpdate
}
oldDbIdMap.libraries[oldLibrary.id] = Library.id
_newRecords.library.push(Library)
2023-07-05 01:14:44 +02:00
//
// Migrate LibraryFolders
//
for (const oldFolder of oldLibrary.folders) {
const LibraryFolder = {
id: uuidv4(),
path: oldFolder.fullPath,
createdAt: oldFolder.addedAt,
updatedAt: oldLibrary.lastUpdate,
libraryId: Library.id
}
oldDbIdMap.libraryFolders[oldFolder.id] = LibraryFolder.id
_newRecords.libraryFolder.push(LibraryFolder)
2023-07-05 01:14:44 +02:00
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate Author
* Previously Authors were shared between libraries, this will ensure every author has one library
* @param {Array<objects.entities.Author>} oldAuthors
* @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {Array<object>} Array of Author model objs
*/
function migrateAuthors(oldAuthors, oldLibraryItems) {
const _newRecords = []
2023-07-05 01:14:44 +02:00
for (const oldAuthor of oldAuthors) {
// Get an array of NEW library ids that have this author
const librariesWithThisAuthor = [...new Set(oldLibraryItems.map(li => {
if (!li.media.metadata.authors?.some(au => au.id === oldAuthor.id)) return null
if (!oldDbIdMap.libraries[li.libraryId]) {
Logger.warn(`[dbMigration] Authors library id ${li.libraryId} was not migrated`)
}
return oldDbIdMap.libraries[li.libraryId]
}).filter(lid => lid))]
if (!librariesWithThisAuthor.length) {
Logger.error(`[dbMigration] Author ${oldAuthor.name} was not found in any libraries`)
}
for (const libraryId of librariesWithThisAuthor) {
const lastFirst = oldAuthor.name ? parseNameString.nameToLastFirst(oldAuthor.name) : ''
const Author = {
id: uuidv4(),
name: oldAuthor.name,
lastFirst,
asin: oldAuthor.asin || null,
description: oldAuthor.description,
imagePath: oldAuthor.imagePath,
createdAt: oldAuthor.addedAt || Date.now(),
updatedAt: oldAuthor.updatedAt || Date.now(),
libraryId
}
if (!oldDbIdMap.authors[libraryId]) oldDbIdMap.authors[libraryId] = {}
oldDbIdMap.authors[libraryId][oldAuthor.id] = Author.id
_newRecords.push(Author)
}
2023-07-05 01:14:44 +02:00
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate Series
* Previously Series were shared between libraries, this will ensure every series has one library
* @param {Array<objects.entities.Series>} oldSerieses
* @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {Array<object>} Array of Series model objs
*/
function migrateSeries(oldSerieses, oldLibraryItems) {
const _newRecords = []
// Originaly series were shared between libraries if they had the same name
// Series will be separate between libraries
2023-07-05 01:14:44 +02:00
for (const oldSeries of oldSerieses) {
// Get an array of NEW library ids that have this series
const librariesWithThisSeries = [...new Set(oldLibraryItems.map(li => {
if (!li.media.metadata.series?.some(se => se.id === oldSeries.id)) return null
return oldDbIdMap.libraries[li.libraryId]
}).filter(lid => lid))]
if (!librariesWithThisSeries.length) {
Logger.error(`[dbMigration] Series ${oldSeries.name} was not found in any libraries`)
}
for (const libraryId of librariesWithThisSeries) {
const Series = {
id: uuidv4(),
name: oldSeries.name,
nameIgnorePrefix: getTitleIgnorePrefix(oldSeries.name),
description: oldSeries.description || null,
createdAt: oldSeries.addedAt || Date.now(),
updatedAt: oldSeries.updatedAt || Date.now(),
libraryId
}
if (!oldDbIdMap.series[libraryId]) oldDbIdMap.series[libraryId] = {}
oldDbIdMap.series[libraryId][oldSeries.id] = Series.id
_newRecords.push(Series)
2023-07-05 01:14:44 +02:00
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate users to User and MediaProgress models
* @param {Array<objects.User>} oldUsers
* @returns {object} { user: [], mediaProgress: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateUsers(oldUsers) {
const _newRecords = {
user: [],
mediaProgress: []
}
2023-07-05 01:14:44 +02:00
for (const oldUser of oldUsers) {
//
// Migrate User
//
// Convert old library ids to new ids
const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter(li => li)
// Convert old library item ids to new ids
const bookmarks = (oldUser.bookmarks || []).map(bm => {
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
return bm
}).filter(bm => bm.libraryItemId)
// Convert old series ids to new
const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || []).map(oldSeriesId => {
// Series were split to be per library
// This will use the first series it finds
for (const libraryId in oldDbIdMap.series) {
if (oldDbIdMap.series[libraryId][oldSeriesId]) {
return oldDbIdMap.series[libraryId][oldSeriesId]
}
}
return null
}).filter(se => se)
2023-07-05 01:14:44 +02:00
const User = {
id: uuidv4(),
username: oldUser.username,
pash: oldUser.pash || null,
type: oldUser.type || null,
token: oldUser.token || null,
isActive: !!oldUser.isActive,
lastSeen: oldUser.lastSeen || null,
extraData: {
seriesHideFromContinueListening,
2023-07-05 01:14:44 +02:00
oldUserId: oldUser.id // Used to keep old tokens
},
createdAt: oldUser.createdAt || Date.now(),
permissions: {
...oldUser.permissions,
librariesAccessible,
2023-07-05 01:14:44 +02:00
itemTagsSelected: oldUser.itemTagsSelected || []
},
bookmarks
2023-07-05 01:14:44 +02:00
}
oldDbIdMap.users[oldUser.id] = User.id
_newRecords.user.push(User)
2023-07-05 01:14:44 +02:00
//
// Migrate MediaProgress
//
for (const oldMediaProgress of oldUser.mediaProgress) {
let mediaItemType = 'book'
let mediaItemId = null
if (oldMediaProgress.episodeId) {
mediaItemType = 'podcastEpisode'
mediaItemId = oldDbIdMap.podcastEpisodes[oldMediaProgress.episodeId]
} else {
mediaItemId = oldDbIdMap.books[oldMediaProgress.libraryItemId]
}
if (!mediaItemId) {
Logger.warn(`[dbMigration] migrateUsers: Unable to find media item for media progress "${oldMediaProgress.id}"`)
continue
}
const MediaProgress = {
id: uuidv4(),
mediaItemId,
mediaItemType,
duration: oldMediaProgress.duration,
currentTime: oldMediaProgress.currentTime,
ebookLocation: oldMediaProgress.ebookLocation || null,
ebookProgress: oldMediaProgress.ebookProgress || null,
isFinished: !!oldMediaProgress.isFinished,
hideFromContinueListening: !!oldMediaProgress.hideFromContinueListening,
finishedAt: oldMediaProgress.finishedAt,
createdAt: oldMediaProgress.startedAt || oldMediaProgress.lastUpdate,
updatedAt: oldMediaProgress.lastUpdate,
userId: User.id,
extraData: {
libraryItemId: oldDbIdMap.libraryItems[oldMediaProgress.libraryItemId],
progress: oldMediaProgress.progress
}
}
_newRecords.mediaProgress.push(MediaProgress)
2023-07-05 01:14:44 +02:00
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate playbackSessions to PlaybackSession and Device models
* @param {Array<objects.PlaybackSession>} oldSessions
* @returns {object} { playbackSession: [], device: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateSessions(oldSessions) {
const _newRecords = {
device: [],
playbackSession: []
}
2023-07-05 01:14:44 +02:00
for (const oldSession of oldSessions) {
const userId = oldDbIdMap.users[oldSession.userId]
if (!userId) {
Logger.info(`[dbMigration] Not migrating playback session ${oldSession.id} because user was not found`)
continue
}
2023-07-05 01:14:44 +02:00
//
// Migrate Device
//
let deviceId = null
if (oldSession.deviceInfo) {
const oldDeviceInfo = oldSession.deviceInfo
const deviceDeviceId = getDeviceInfoString(oldDeviceInfo, userId)
deviceId = oldDbIdMap.devices[deviceDeviceId]
if (!deviceId) {
let clientName = 'Unknown'
let clientVersion = null
let deviceName = null
let deviceVersion = oldDeviceInfo.browserVersion || null
let extraData = {}
if (oldDeviceInfo.sdkVersion) {
clientName = 'Abs Android'
clientVersion = oldDeviceInfo.clientVersion || null
deviceName = `${oldDeviceInfo.manufacturer} ${oldDeviceInfo.model}`
deviceVersion = oldDeviceInfo.sdkVersion
} else if (oldDeviceInfo.model) {
clientName = 'Abs iOS'
clientVersion = oldDeviceInfo.clientVersion || null
deviceName = `${oldDeviceInfo.manufacturer} ${oldDeviceInfo.model}`
} else if (oldDeviceInfo.osName && oldDeviceInfo.browserName) {
clientName = 'Abs Web'
clientVersion = oldDeviceInfo.serverVersion || null
deviceName = `${oldDeviceInfo.osName} ${oldDeviceInfo.osVersion || 'N/A'} ${oldDeviceInfo.browserName}`
}
if (oldDeviceInfo.manufacturer) {
extraData.manufacturer = oldDeviceInfo.manufacturer
}
if (oldDeviceInfo.model) {
extraData.model = oldDeviceInfo.model
}
if (oldDeviceInfo.osName) {
extraData.osName = oldDeviceInfo.osName
}
if (oldDeviceInfo.osVersion) {
extraData.osVersion = oldDeviceInfo.osVersion
}
if (oldDeviceInfo.browserName) {
extraData.browserName = oldDeviceInfo.browserName
}
const id = uuidv4()
const Device = {
id,
deviceId: deviceDeviceId,
clientName,
clientVersion,
ipAddress: oldDeviceInfo.ipAddress,
deviceName, // e.g. Windows 10 Chrome, Google Pixel 6, Apple iPhone 10,3
deviceVersion,
userId,
extraData
}
deviceId = Device.id
_newRecords.device.push(Device)
2023-07-05 01:14:44 +02:00
oldDbIdMap.devices[deviceDeviceId] = Device.id
}
}
//
// Migrate PlaybackSession
//
let mediaItemId = null
let mediaItemType = 'book'
if (oldSession.mediaType === 'podcast') {
mediaItemId = oldDbIdMap.podcastEpisodes[oldSession.episodeId] || null
mediaItemType = 'podcastEpisode'
} else {
mediaItemId = oldDbIdMap.books[oldSession.libraryItemId] || null
}
const PlaybackSession = {
id: uuidv4(),
mediaItemId, // Can be null
mediaItemType,
libraryId: oldDbIdMap.libraries[oldSession.libraryId] || null,
displayTitle: oldSession.displayTitle,
displayAuthor: oldSession.displayAuthor,
duration: oldSession.duration,
playMethod: oldSession.playMethod,
mediaPlayer: oldSession.mediaPlayer,
startTime: oldSession.startTime,
currentTime: oldSession.currentTime,
serverVersion: oldSession.deviceInfo?.serverVersion || null,
createdAt: oldSession.startedAt,
updatedAt: oldSession.updatedAt,
userId,
2023-07-05 01:14:44 +02:00
deviceId,
timeListening: oldSession.timeListening,
coverPath: oldSession.coverPath,
mediaMetadata: oldSession.mediaMetadata,
date: oldSession.date,
dayOfWeek: oldSession.dayOfWeek,
extraData: {
libraryItemId: oldDbIdMap.libraryItems[oldSession.libraryItemId]
}
}
_newRecords.playbackSession.push(PlaybackSession)
2023-07-05 01:14:44 +02:00
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate collections to Collection & CollectionBook
* @param {Array<objects.Collection>} oldCollections
* @returns {object} { collection: [], collectionBook: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateCollections(oldCollections) {
const _newRecords = {
collection: [],
collectionBook: []
}
2023-07-05 01:14:44 +02:00
for (const oldCollection of oldCollections) {
const libraryId = oldDbIdMap.libraries[oldCollection.libraryId]
if (!libraryId) {
Logger.warn(`[dbMigration] migrateCollections: Library not found for collection "${oldCollection.name}" (id:${oldCollection.libraryId})`)
continue
}
const BookIds = oldCollection.books.map(lid => oldDbIdMap.books[lid]).filter(bid => bid)
if (!BookIds.length) {
Logger.warn(`[dbMigration] migrateCollections: Collection "${oldCollection.name}" has no books`)
continue
}
const Collection = {
id: uuidv4(),
name: oldCollection.name,
description: oldCollection.description,
createdAt: oldCollection.createdAt,
updatedAt: oldCollection.lastUpdate,
libraryId
}
oldDbIdMap.collections[oldCollection.id] = Collection.id
_newRecords.collection.push(Collection)
2023-07-05 01:14:44 +02:00
let order = 1
BookIds.forEach((bookId) => {
const CollectionBook = {
id: uuidv4(),
createdAt: Collection.createdAt,
bookId,
collectionId: Collection.id,
order: order++
}
_newRecords.collectionBook.push(CollectionBook)
2023-07-05 01:14:44 +02:00
})
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate playlists to Playlist and PlaylistMediaItem
* @param {Array<objects.Playlist>} oldPlaylists
* @returns {object} { playlist: [], playlistMediaItem: [] }
*/
2023-07-05 01:14:44 +02:00
function migratePlaylists(oldPlaylists) {
const _newRecords = {
playlist: [],
playlistMediaItem: []
}
2023-07-05 01:14:44 +02:00
for (const oldPlaylist of oldPlaylists) {
const libraryId = oldDbIdMap.libraries[oldPlaylist.libraryId]
if (!libraryId) {
Logger.warn(`[dbMigration] migratePlaylists: Library not found for playlist "${oldPlaylist.name}" (id:${oldPlaylist.libraryId})`)
continue
}
const userId = oldDbIdMap.users[oldPlaylist.userId]
if (!userId) {
Logger.warn(`[dbMigration] migratePlaylists: User not found for playlist "${oldPlaylist.name}" (id:${oldPlaylist.userId})`)
continue
}
let mediaItemType = 'book'
let MediaItemIds = []
oldPlaylist.items.forEach((itemObj) => {
if (itemObj.episodeId) {
mediaItemType = 'podcastEpisode'
if (oldDbIdMap.podcastEpisodes[itemObj.episodeId]) {
MediaItemIds.push(oldDbIdMap.podcastEpisodes[itemObj.episodeId])
}
} else if (oldDbIdMap.books[itemObj.libraryItemId]) {
MediaItemIds.push(oldDbIdMap.books[itemObj.libraryItemId])
}
})
if (!MediaItemIds.length) {
Logger.warn(`[dbMigration] migratePlaylists: Playlist "${oldPlaylist.name}" has no items`)
continue
}
const Playlist = {
id: uuidv4(),
name: oldPlaylist.name,
description: oldPlaylist.description,
createdAt: oldPlaylist.createdAt,
updatedAt: oldPlaylist.lastUpdate,
userId,
libraryId
}
_newRecords.playlist.push(Playlist)
2023-07-05 01:14:44 +02:00
let order = 1
MediaItemIds.forEach((mediaItemId) => {
const PlaylistMediaItem = {
id: uuidv4(),
mediaItemId,
mediaItemType,
createdAt: Playlist.createdAt,
playlistId: Playlist.id,
order: order++
}
_newRecords.playlistMediaItem.push(PlaylistMediaItem)
2023-07-05 01:14:44 +02:00
})
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate feeds to Feed and FeedEpisode models
* @param {Array<objects.Feed>} oldFeeds
* @returns {object} { feed: [], feedEpisode: [] }
*/
2023-07-05 01:14:44 +02:00
function migrateFeeds(oldFeeds) {
const _newRecords = {
feed: [],
feedEpisode: []
}
2023-07-05 01:14:44 +02:00
for (const oldFeed of oldFeeds) {
if (!oldFeed.episodes?.length) {
continue
}
let entityId = null
if (oldFeed.entityType === 'collection') {
entityId = oldDbIdMap.collections[oldFeed.entityId]
} else if (oldFeed.entityType === 'libraryItem') {
entityId = oldDbIdMap.libraryItems[oldFeed.entityId]
} else if (oldFeed.entityType === 'series') {
// Series were split to be per library
// This will use the first series it finds
for (const libraryId in oldDbIdMap.series) {
if (oldDbIdMap.series[libraryId][oldFeed.entityId]) {
entityId = oldDbIdMap.series[libraryId][oldFeed.entityId]
break
}
}
2023-07-05 01:14:44 +02:00
}
if (!entityId) {
Logger.warn(`[dbMigration] migrateFeeds: Entity not found for feed "${oldFeed.entityType}" (id:${oldFeed.entityId})`)
continue
}
const userId = oldDbIdMap.users[oldFeed.userId]
if (!userId) {
Logger.warn(`[dbMigration] migrateFeeds: User not found for feed (id:${oldFeed.userId})`)
continue
}
const oldFeedMeta = oldFeed.meta
const Feed = {
id: uuidv4(),
slug: oldFeed.slug,
entityType: oldFeed.entityType,
entityId,
entityUpdatedAt: oldFeed.entityUpdatedAt,
serverAddress: oldFeed.serverAddress,
feedURL: oldFeed.feedUrl,
2023-07-21 23:59:00 +02:00
coverPath: oldFeed.coverPath || null,
2023-07-05 01:14:44 +02:00
imageURL: oldFeedMeta.imageUrl,
siteURL: oldFeedMeta.link,
title: oldFeedMeta.title,
description: oldFeedMeta.description,
author: oldFeedMeta.author,
podcastType: oldFeedMeta.type || null,
language: oldFeedMeta.language || null,
ownerName: oldFeedMeta.ownerName || null,
ownerEmail: oldFeedMeta.ownerEmail || null,
explicit: !!oldFeedMeta.explicit,
preventIndexing: !!oldFeedMeta.preventIndexing,
createdAt: oldFeed.createdAt,
updatedAt: oldFeed.updatedAt,
userId
}
_newRecords.feed.push(Feed)
2023-07-05 01:14:44 +02:00
//
// Migrate FeedEpisodes
//
for (const oldFeedEpisode of oldFeed.episodes) {
const FeedEpisode = {
id: uuidv4(),
title: oldFeedEpisode.title,
author: oldFeedEpisode.author,
description: oldFeedEpisode.description,
siteURL: oldFeedEpisode.link,
enclosureURL: oldFeedEpisode.enclosure?.url || null,
enclosureType: oldFeedEpisode.enclosure?.type || null,
enclosureSize: oldFeedEpisode.enclosure?.size || null,
pubDate: oldFeedEpisode.pubDate,
season: oldFeedEpisode.season || null,
episode: oldFeedEpisode.episode || null,
episodeType: oldFeedEpisode.episodeType || null,
duration: oldFeedEpisode.duration,
filePath: oldFeedEpisode.fullPath,
explicit: !!oldFeedEpisode.explicit,
createdAt: oldFeed.createdAt,
updatedAt: oldFeed.updatedAt,
feedId: Feed.id
}
_newRecords.feedEpisode.push(FeedEpisode)
2023-07-05 01:14:44 +02:00
}
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Migrate ServerSettings, NotificationSettings and EmailSettings to Setting model
* @param {Array<objects.settings.*>} oldSettings
* @returns {Array<object>} Array of Setting model objs
*/
2023-07-05 01:14:44 +02:00
function migrateSettings(oldSettings) {
const _newRecords = []
2023-07-05 01:14:44 +02:00
const serverSettings = oldSettings.find(s => s.id === 'server-settings')
const notificationSettings = oldSettings.find(s => s.id === 'notification-settings')
const emailSettings = oldSettings.find(s => s.id === 'email-settings')
if (serverSettings) {
_newRecords.push({
2023-07-05 01:14:44 +02:00
key: 'server-settings',
value: serverSettings
})
if (serverSettings.sortingPrefixes?.length) {
// Used for migrating titles/names
prefixesToIgnore = serverSettings.sortingPrefixes
}
2023-07-05 01:14:44 +02:00
}
if (notificationSettings) {
_newRecords.push({
2023-07-05 01:14:44 +02:00
key: 'notification-settings',
value: notificationSettings
})
}
if (emailSettings) {
_newRecords.push({
2023-07-05 01:14:44 +02:00
key: 'email-settings',
value: emailSettings
})
}
return _newRecords
2023-07-05 01:14:44 +02:00
}
/**
* Load old libraries and bulkCreate new Library and LibraryFolder rows
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateLibraries(DatabaseModels) {
const oldLibraries = await oldDbFiles.loadOldData('libraries')
const newLibraryRecords = migrateLibraries(oldLibraries)
for (const model in newLibraryRecords) {
Logger.info(`[dbMigration] Inserting ${newLibraryRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newLibraryRecords[model])
}
}
2023-07-05 01:14:44 +02:00
/**
* Load old EmailSettings, NotificationSettings and ServerSettings and bulkCreate new Setting rows
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateSettings(DatabaseModels) {
const oldSettings = await oldDbFiles.loadOldData('settings')
const newSettings = migrateSettings(oldSettings)
Logger.info(`[dbMigration] Inserting ${newSettings.length} setting rows`)
await DatabaseModels.setting.bulkCreate(newSettings)
}
2023-07-05 01:14:44 +02:00
/**
* Load old authors and bulkCreate new Author rows
* @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems
*/
async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
const oldAuthors = await oldDbFiles.loadOldData('authors')
const newAuthors = migrateAuthors(oldAuthors, oldLibraryItems)
Logger.info(`[dbMigration] Inserting ${newAuthors.length} author rows`)
await DatabaseModels.author.bulkCreate(newAuthors)
}
/**
* Load old series and bulkCreate new Series rows
* @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems
*/
async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
const oldSeries = await oldDbFiles.loadOldData('series')
const newSeries = migrateSeries(oldSeries, oldLibraryItems)
Logger.info(`[dbMigration] Inserting ${newSeries.length} series rows`)
await DatabaseModels.series.bulkCreate(newSeries)
}
/**
* bulkCreate new LibraryItem, Book and Podcast rows
* @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems
*/
async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
const newItemsBooksPodcasts = migrateLibraryItems(oldLibraryItems)
for (const model in newItemsBooksPodcasts) {
Logger.info(`[dbMigration] Inserting ${newItemsBooksPodcasts[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newItemsBooksPodcasts[model])
}
}
/**
* Migrate authors, series then library items in chunks
* Authors and series require old library items loaded first
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
await handleMigrateAuthors(DatabaseModels, oldLibraryItems)
await handleMigrateSeries(DatabaseModels, oldLibraryItems)
// Migrate library items in chunks of 1000
const numChunks = Math.ceil(oldLibraryItems.length / 1000)
for (let i = 0; i < numChunks; i++) {
let start = i * 1000
await handleMigrateLibraryItems(DatabaseModels, oldLibraryItems.slice(start, start + 1000))
}
}
/**
* Load old users and bulkCreate new User rows
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateUsers(DatabaseModels) {
const oldUsers = await oldDbFiles.loadOldData('users')
const newUserRecords = migrateUsers(oldUsers)
for (const model in newUserRecords) {
Logger.info(`[dbMigration] Inserting ${newUserRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newUserRecords[model])
}
}
/**
* Load old sessions and bulkCreate new PlaybackSession & Device rows
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateSessions(DatabaseModels) {
const oldSessions = await oldDbFiles.loadOldData('sessions')
let chunkSize = 1000
let numChunks = Math.ceil(oldSessions.length / chunkSize)
for (let i = 0; i < numChunks; i++) {
let start = i * chunkSize
const newSessionRecords = migrateSessions(oldSessions.slice(start, start + chunkSize))
for (const model in newSessionRecords) {
Logger.info(`[dbMigration] Inserting ${newSessionRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newSessionRecords[model])
2023-07-05 01:14:44 +02:00
}
}
}
/**
* Load old collections and bulkCreate new Collection, CollectionBook models
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateCollections(DatabaseModels) {
const oldCollections = await oldDbFiles.loadOldData('collections')
const newCollectionRecords = migrateCollections(oldCollections)
for (const model in newCollectionRecords) {
Logger.info(`[dbMigration] Inserting ${newCollectionRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newCollectionRecords[model])
}
}
/**
* Load old playlists and bulkCreate new Playlist, PlaylistMediaItem models
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigratePlaylists(DatabaseModels) {
const oldPlaylists = await oldDbFiles.loadOldData('playlists')
const newPlaylistRecords = migratePlaylists(oldPlaylists)
for (const model in newPlaylistRecords) {
Logger.info(`[dbMigration] Inserting ${newPlaylistRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newPlaylistRecords[model])
}
}
/**
* Load old feeds and bulkCreate new Feed, FeedEpisode models
* @param {Map<string,Model>} DatabaseModels
*/
async function handleMigrateFeeds(DatabaseModels) {
const oldFeeds = await oldDbFiles.loadOldData('feeds')
const newFeedRecords = migrateFeeds(oldFeeds)
for (const model in newFeedRecords) {
Logger.info(`[dbMigration] Inserting ${newFeedRecords[model].length} ${model} rows`)
await DatabaseModels[model].bulkCreate(newFeedRecords[model])
}
}
module.exports.migrate = async (DatabaseModels) => {
Logger.info(`[dbMigration] Starting migration`)
const start = Date.now()
// Migrate to Library and LibraryFolder models
await handleMigrateLibraries(DatabaseModels)
// Migrate EmailSettings, NotificationSettings and ServerSettings to Setting model
await handleMigrateSettings(DatabaseModels)
// Migrate Series, Author, LibraryItem, Book, Podcast
await handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels)
// Migrate User, MediaProgress
await handleMigrateUsers(DatabaseModels)
// Migrate PlaybackSession, Device
await handleMigrateSessions(DatabaseModels)
// Migrate Collection, CollectionBook
await handleMigrateCollections(DatabaseModels)
// Migrate Playlist, PlaylistMediaItem
await handleMigratePlaylists(DatabaseModels)
// Migrate Feed, FeedEpisode
await handleMigrateFeeds(DatabaseModels)
2023-07-05 01:14:44 +02:00
// Purge author images and cover images from cache
try {
const CachePath = Path.join(global.MetadataPath, 'cache')
await fs.emptyDir(Path.join(CachePath, 'covers'))
await fs.emptyDir(Path.join(CachePath, 'images'))
} catch (error) {
Logger.error(`[dbMigration] Failed to purge author/cover image cache`, error)
}
// Put all old db folders into a zipfile oldDb.zip
await oldDbFiles.zipWrapOldDb()
const elapsed = Date.now() - start
Logger.info(`[dbMigration] Migration complete. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
2023-07-05 01:14:44 +02:00
}
/**
* @returns {boolean} true if old database exists
*/
2023-07-08 21:40:49 +02:00
module.exports.checkShouldMigrate = async () => {
2023-07-05 01:14:44 +02:00
if (await oldDbFiles.checkHasOldDb()) return true
return oldDbFiles.checkHasOldDbZip()
}
/**
* Migration from 2.3.0 to 2.3.1 - create extraData columns in LibraryItem and PodcastEpisode
* @param {QueryInterface} queryInterface
*/
async function migrationPatchNewColumns(queryInterface) {
try {
return queryInterface.sequelize.transaction(t => {
return Promise.all([
queryInterface.addColumn('libraryItems', 'extraData', {
type: DataTypes.JSON
}, { transaction: t }),
queryInterface.addColumn('podcastEpisodes', 'extraData', {
type: DataTypes.JSON
}, { transaction: t }),
queryInterface.addColumn('libraries', 'extraData', {
type: DataTypes.JSON
}, { transaction: t })
])
})
} catch (error) {
Logger.error(`[dbMigration] Migration from 2.3.0+ column creation failed`, error)
return false
}
}
/**
* Migration from 2.3.0 to 2.3.1 - old library item ids
* @param {/src/Database} ctx
*/
async function handleOldLibraryItems(ctx) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
const libraryItems = await ctx.models.libraryItem.getAllOldLibraryItems()
const bulkUpdateItems = []
const bulkUpdateEpisodes = []
for (const libraryItem of libraryItems) {
// Find matching old library item by ino
const matchingOldLibraryItem = oldLibraryItems.find(oli => oli.ino === libraryItem.ino)
if (matchingOldLibraryItem) {
oldDbIdMap.libraryItems[matchingOldLibraryItem.id] = libraryItem.id
bulkUpdateItems.push({
id: libraryItem.id,
extraData: {
oldLibraryItemId: matchingOldLibraryItem.id
}
})
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) {
for (const podcastEpisode of libraryItem.media.episodes) {
// Find matching old episode by audio file ino
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find(oep => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
if (matchingOldPodcastEpisode) {
oldDbIdMap.podcastEpisodes[matchingOldPodcastEpisode.id] = podcastEpisode.id
bulkUpdateEpisodes.push({
id: podcastEpisode.id,
extraData: {
oldEpisodeId: matchingOldPodcastEpisode.id
}
})
}
}
}
}
}
if (bulkUpdateEpisodes.length) {
await ctx.models.podcastEpisode.bulkCreate(bulkUpdateEpisodes, {
updateOnDuplicate: ['extraData']
})
}
if (bulkUpdateItems.length) {
await ctx.models.libraryItem.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['extraData']
})
}
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${bulkUpdateItems.length} library items & ${bulkUpdateEpisodes.length} episodes`)
}
/**
* Migration from 2.3.0 to 2.3.1 - updating oldLibraryId
* @param {/src/Database} ctx
*/
async function handleOldLibraries(ctx) {
const oldLibraries = await oldDbFiles.loadOldData('libraries')
const libraries = await ctx.models.library.getAllOldLibraries()
let librariesUpdated = 0
for (const library of libraries) {
// Find matching old library using exact match on folder paths, exact match on library name
const matchingOldLibrary = oldLibraries.find(ol => {
if (ol.name !== library.name) {
return false
}
const folderPaths = ol.folders?.map(f => f.fullPath) || []
return folderPaths.join(',') === library.folderPaths.join(',')
})
if (matchingOldLibrary) {
library.oldLibraryId = matchingOldLibrary.id
oldDbIdMap.libraries[library.oldLibraryId] = library.id
await ctx.models.library.updateFromOld(library)
librariesUpdated++
}
}
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${librariesUpdated} libraries`)
}
/**
* Migration from 2.3.0 to 2.3.1 - fixing librariesAccessible and bookmarks
* @param {/src/Database} ctx
*/
async function handleOldUsers(ctx) {
const users = await ctx.models.user.getOldUsers()
let usersUpdated = 0
for (const user of users) {
let hasUpdates = false
if (user.bookmarks?.length) {
user.bookmarks = user.bookmarks.map(bm => {
// Only update if this is not the old id format
if (!bm.libraryItemId.startsWith('li_')) return bm
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
hasUpdates = true
return bm
}).filter(bm => bm.libraryItemId)
}
// Convert old library ids to new library ids
if (user.librariesAccessible?.length) {
user.librariesAccessible = user.librariesAccessible.map(lid => {
if (!lid.startsWith('lib_') && lid !== 'main') return lid // Already not an old library id so dont change
hasUpdates = true
return oldDbIdMap.libraries[lid]
}).filter(lid => lid)
}
if (user.seriesHideFromContinueListening?.length) {
user.seriesHideFromContinueListening = user.seriesHideFromContinueListening.map((seriesId) => {
if (seriesId.startsWith('se_')) {
hasUpdates = true
return null // Filter out old series ids
}
return seriesId
}).filter(se => se)
}
if (hasUpdates) {
await ctx.models.user.updateFromOld(user)
usersUpdated++
}
}
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${usersUpdated} users`)
}
/**
* Migration from 2.3.0 to 2.3.1
* @param {/src/Database} ctx
*/
module.exports.migrationPatch = async (ctx) => {
const queryInterface = ctx.sequelize.getQueryInterface()
const librariesTableDescription = await queryInterface.describeTable('libraries')
if (librariesTableDescription?.extraData) {
Logger.info(`[dbMigration] Migration patch 2.3.0+ - extraData columns already on model`)
} else {
const migrationResult = await migrationPatchNewColumns(queryInterface)
if (migrationResult === false) {
return
}
}
const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip')
if (!await fs.pathExists(oldDbPath)) {
Logger.info(`[dbMigration] Migration patch 2.3.0+ unnecessary - no oldDb.zip found`)
return
}
const migrationStart = Date.now()
Logger.info(`[dbMigration] Applying migration patch from 2.3.0+`)
// Extract from oldDb.zip
if (!await oldDbFiles.checkExtractItemsUsersAndLibraries()) {
return
}
await handleOldLibraryItems(ctx)
await handleOldLibraries(ctx)
await handleOldUsers(ctx)
await oldDbFiles.removeOldItemsUsersAndLibrariesFolders()
const elapsed = Date.now() - migrationStart
Logger.info(`[dbMigration] Migration patch 2.3.0+ finished. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
2023-07-21 23:59:00 +02:00
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the size column on libraryItem
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2LibraryItems(ctx, offset = 0) {
const libraryItems = await ctx.models.libraryItem.findAll({
limit: 500,
offset
})
if (!libraryItems.length) return
const bulkUpdateItems = []
for (const libraryItem of libraryItems) {
if (libraryItem.libraryFiles?.length) {
let size = 0
libraryItem.libraryFiles.forEach(lf => {
if (!isNaN(lf.metadata?.size)) {
size += Number(lf.metadata.size)
}
})
bulkUpdateItems.push({
id: libraryItem.id,
size
})
}
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} library items`)
await ctx.models.libraryItem.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['size']
})
}
if (libraryItems.length < 500) {
return
}
return migrationPatch2LibraryItems(ctx, offset + libraryItems.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the duration & titleIgnorePrefix column on book
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2Books(ctx, offset = 0) {
const books = await ctx.models.book.findAll({
limit: 500,
offset
})
if (!books.length) return
const bulkUpdateItems = []
for (const book of books) {
let duration = 0
if (book.audioFiles?.length) {
const tracks = book.audioFiles.filter(af => !af.exclude && !af.invalid)
for (const track of tracks) {
if (track.duration !== null && !isNaN(track.duration)) {
duration += track.duration
}
}
}
bulkUpdateItems.push({
id: book.id,
titleIgnorePrefix: getTitleIgnorePrefix(book.title),
duration
})
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} books`)
await ctx.models.book.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['duration', 'titleIgnorePrefix']
})
}
if (books.length < 500) {
return
}
return migrationPatch2Books(ctx, offset + books.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the titleIgnorePrefix column on podcast
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2Podcasts(ctx, offset = 0) {
const podcasts = await ctx.models.podcast.findAll({
limit: 500,
offset
})
if (!podcasts.length) return
const bulkUpdateItems = []
for (const podcast of podcasts) {
bulkUpdateItems.push({
id: podcast.id,
titleIgnorePrefix: getTitleIgnorePrefix(podcast.title)
})
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} podcasts`)
await ctx.models.podcast.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['titleIgnorePrefix']
})
}
if (podcasts.length < 500) {
return
}
return migrationPatch2Podcasts(ctx, offset + podcasts.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the nameIgnorePrefix column on series
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2Series(ctx, offset = 0) {
const allSeries = await ctx.models.series.findAll({
limit: 500,
offset
})
if (!allSeries.length) return
const bulkUpdateItems = []
for (const series of allSeries) {
bulkUpdateItems.push({
id: series.id,
nameIgnorePrefix: getTitleIgnorePrefix(series.name)
})
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} series`)
await ctx.models.series.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['nameIgnorePrefix']
})
}
if (allSeries.length < 500) {
return
}
return migrationPatch2Series(ctx, offset + allSeries.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the lastFirst column on author
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2Authors(ctx, offset = 0) {
const authors = await ctx.models.author.findAll({
limit: 500,
offset
})
if (!authors.length) return
const bulkUpdateItems = []
for (const author of authors) {
if (author.name?.trim()) {
bulkUpdateItems.push({
id: author.id,
lastFirst: parseNameString.nameToLastFirst(author.name)
})
}
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} authors`)
await ctx.models.author.bulkCreate(bulkUpdateItems, {
updateOnDuplicate: ['lastFirst']
})
}
if (authors.length < 500) {
return
}
return migrationPatch2Authors(ctx, offset + authors.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the createdAt column on bookAuthor
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2BookAuthors(ctx, offset = 0) {
const bookAuthors = await ctx.models.bookAuthor.findAll({
include: {
model: ctx.models.author
},
limit: 500,
offset
})
if (!bookAuthors.length) return
const bulkUpdateItems = []
for (const bookAuthor of bookAuthors) {
if (bookAuthor.author?.createdAt) {
const dateString = bookAuthor.author.createdAt.toISOString().replace('T', ' ').replace('Z', '')
bulkUpdateItems.push(`("${bookAuthor.id}","${dateString}")`)
}
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} bookAuthors`)
await ctx.sequelize.query(`INSERT INTO bookAuthors ('id','createdAt') VALUES ${bulkUpdateItems.join(',')} ON CONFLICT(id) DO UPDATE SET 'createdAt' = EXCLUDED.createdAt;`)
}
if (bookAuthors.length < 500) {
return
}
return migrationPatch2BookAuthors(ctx, offset + bookAuthors.length)
}
/**
* Migration from 2.3.3 to 2.3.4
* Populating the createdAt column on bookSeries
* @param {/src/Database} ctx
* @param {number} offset
*/
async function migrationPatch2BookSeries(ctx, offset = 0) {
const allBookSeries = await ctx.models.bookSeries.findAll({
include: {
model: ctx.models.series
},
limit: 500,
offset
})
if (!allBookSeries.length) return
const bulkUpdateItems = []
for (const bookSeries of allBookSeries) {
if (bookSeries.series?.createdAt) {
const dateString = bookSeries.series.createdAt.toISOString().replace('T', ' ').replace('Z', '')
bulkUpdateItems.push(`("${bookSeries.id}","${dateString}")`)
}
}
if (bulkUpdateItems.length) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} bookSeries`)
await ctx.sequelize.query(`INSERT INTO bookSeries ('id','createdAt') VALUES ${bulkUpdateItems.join(',')} ON CONFLICT(id) DO UPDATE SET 'createdAt' = EXCLUDED.createdAt;`)
}
if (allBookSeries.length < 500) {
return
}
return migrationPatch2BookSeries(ctx, offset + allBookSeries.length)
}
2023-07-21 23:59:00 +02:00
/**
* Migration from 2.3.3 to 2.3.4
* Adding coverPath column to Feed model
* @param {/src/Database} ctx
*/
module.exports.migrationPatch2 = async (ctx) => {
const queryInterface = ctx.sequelize.getQueryInterface()
const feedTableDescription = await queryInterface.describeTable('feeds')
const authorsTableDescription = await queryInterface.describeTable('authors')
const bookAuthorsTableDescription = await queryInterface.describeTable('bookAuthors')
2023-07-21 23:59:00 +02:00
if (feedTableDescription?.coverPath && authorsTableDescription?.lastFirst && bookAuthorsTableDescription?.createdAt) {
Logger.info(`[dbMigration] Migration patch 2.3.3+ - columns already on model`)
return false
2023-07-21 23:59:00 +02:00
}
Logger.info(`[dbMigration] Applying migration patch from 2.3.3+`)
2023-07-21 23:59:00 +02:00
try {
await queryInterface.sequelize.transaction(t => {
const queries = [
queryInterface.addColumn('bookAuthors', 'createdAt', {
type: DataTypes.DATE
}, { transaction: t }),
queryInterface.addColumn('bookSeries', 'createdAt', {
type: DataTypes.DATE
}, { transaction: t }),
]
if (!authorsTableDescription?.lastFirst) {
queries.push(...[
queryInterface.addColumn('authors', 'lastFirst', {
type: DataTypes.STRING
}, { transaction: t }),
queryInterface.addColumn('libraryItems', 'size', {
type: DataTypes.BIGINT
}, { transaction: t }),
queryInterface.addColumn('books', 'duration', {
type: DataTypes.FLOAT
}, { transaction: t }),
queryInterface.addColumn('books', 'titleIgnorePrefix', {
type: DataTypes.STRING
}, { transaction: t }),
queryInterface.addColumn('podcasts', 'titleIgnorePrefix', {
type: DataTypes.STRING
}, { transaction: t }),
queryInterface.addColumn('series', 'nameIgnorePrefix', {
type: DataTypes.STRING
}, { transaction: t }),
])
}
if (!feedTableDescription?.coverPath) {
queries.push(queryInterface.addColumn('feeds', 'coverPath', {
type: DataTypes.STRING
}, { transaction: t }))
}
return Promise.all(queries)
2023-07-21 23:59:00 +02:00
})
if (!authorsTableDescription?.lastFirst) {
if (global.ServerSettings.sortingPrefixes?.length) {
prefixesToIgnore = global.ServerSettings.sortingPrefixes
}
// Patch library items size column
await migrationPatch2LibraryItems(ctx, 0)
// Patch books duration & titleIgnorePrefix column
await migrationPatch2Books(ctx, 0)
// Patch podcasts titleIgnorePrefix column
await migrationPatch2Podcasts(ctx, 0)
// Patch authors lastFirst column
await migrationPatch2Authors(ctx, 0)
// Patch series nameIgnorePrefix column
await migrationPatch2Series(ctx, 0)
}
// Patch bookAuthors createdAt column
await migrationPatch2BookAuthors(ctx, 0)
// Patch bookSeries createdAt column
await migrationPatch2BookSeries(ctx, 0)
2023-07-21 23:59:00 +02:00
Logger.info(`[dbMigration] Migration patch 2.3.3+ finished`)
return true
2023-07-21 23:59:00 +02:00
} catch (error) {
Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error)
throw new Error('Migration 2.3.3+ failed ' + error)
}
2023-07-05 01:14:44 +02:00
}