diff --git a/frontend/.gitignore b/frontend/.gitignore index 4d29575de..800f3a80c 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -10,6 +10,7 @@ # production /build +/dist # misc .DS_Store diff --git a/frontend/public/locales/en-GB/translation.json b/frontend/public/locales/en-GB/translation.json index 570c2e697..f34301dc4 100644 --- a/frontend/public/locales/en-GB/translation.json +++ b/frontend/public/locales/en-GB/translation.json @@ -1576,7 +1576,13 @@ "dragDrop": "Drag & Drop files here", "clickToUpload": "Click to upload files", "selectedFiles": "Selected Files", - "clearAll": "Clear All" + "clearAll": "Clear All", + "storage": "Storage", + "filesStored": "files stored", + "storageError": "Storage error occurred", + "storageLow": "Storage is running low. Consider removing old files.", + "uploadError": "Failed to upload some files.", + "supportMessage": "Powered by browser database storage for unlimited capacity" }, "pageEditor": { "title": "Page Editor", diff --git a/frontend/src/components/FileCard.standalone.tsx b/frontend/src/components/FileCard.standalone.tsx new file mode 100644 index 000000000..4d140689b --- /dev/null +++ b/frontend/src/components/FileCard.standalone.tsx @@ -0,0 +1,136 @@ +import React from "react"; +import { Card, Stack, Text, Group, Badge, Button, Box, Image, ThemeIcon } from "@mantine/core"; +import { useTranslation } from "react-i18next"; +import PictureAsPdfIcon from "@mui/icons-material/PictureAsPdf"; +import StorageIcon from "@mui/icons-material/Storage"; + +import { FileWithUrl } from "../types/file"; +import { getFileSize, getFileDate } from "../utils/fileUtils"; +import { useIndexedDBThumbnail } from "../hooks/useIndexedDBThumbnail"; + +interface FileCardProps { + file: FileWithUrl; + onRemove: () => void; + onDoubleClick?: () => void; +} + +const FileCard: React.FC = ({ file, onRemove, onDoubleClick }) => { + const { t } = useTranslation(); + const { thumbnail: thumb, isGenerating } = useIndexedDBThumbnail(file); + + return ( + + + + {thumb ? ( + PDF thumbnail + ) : isGenerating ? ( +
+
+ Generating... +
+ ) : ( +
+ 100 * 1024 * 1024 ? "orange" : "red"} + size={60} + radius="sm" + style={{ display: "flex", alignItems: "center", justifyContent: "center" }} + > + + + {file.size > 100 * 1024 * 1024 && ( + Large File + )} +
+ )} + + + + {file.name} + + + + + {getFileSize(file)} + + + {getFileDate(file)} + + {file.storedInIndexedDB && ( + } + > + DB + + )} + + + + + + ); +}; + +export default FileCard; \ No newline at end of file diff --git a/frontend/src/components/FileManager.tsx b/frontend/src/components/FileManager.tsx index 9eeb8315e..b436cd87b 100644 --- a/frontend/src/components/FileManager.tsx +++ b/frontend/src/components/FileManager.tsx @@ -1,134 +1,20 @@ import React, { useState, useEffect } from "react"; -import { Card, Group, Text, Stack, Image, Badge, Button, Box, Flex, ThemeIcon } from "@mantine/core"; +import { Box, Flex, Text, Notification } from "@mantine/core"; import { Dropzone, MIME_TYPES } from "@mantine/dropzone"; import { useTranslation } from "react-i18next"; -import PictureAsPdfIcon from "@mui/icons-material/PictureAsPdf"; -import { GlobalWorkerOptions, getDocument } from "pdfjs-dist"; +import { GlobalWorkerOptions } from "pdfjs-dist"; +import { StorageStats } from "../services/fileStorage"; +import { FileWithUrl, defaultStorageConfig } from "../types/file"; + +// Refactored imports +import { fileOperationsService } from "../services/fileOperationsService"; +import { checkStorageWarnings } from "../utils/storageUtils"; +import StorageStatsCard from "./StorageStatsCard"; +import FileCard from "./FileCard.standalone"; + GlobalWorkerOptions.workerSrc = "/pdf.worker.js"; -export interface FileWithUrl extends File { - url?: string; - file?: File; -} - -function getFileDate(file: File): string { - if (file.lastModified) { - return new Date(file.lastModified).toLocaleString(); - } - return "Unknown"; -} - -function getFileSize(file: File): string { - if (!file.size) return "Unknown"; - if (file.size < 1024) return `${file.size} B`; - if (file.size < 1024 * 1024) return `${(file.size / 1024).toFixed(1)} KB`; - return `${(file.size / (1024 * 1024)).toFixed(2)} MB`; -} - -function usePdfThumbnail(file: File | undefined | null): string | null { - const [thumb, setThumb] = useState(null); - - useEffect(() => { - let cancelled = false; - async function generate() { - if (!file) return; - try { - const arrayBuffer = await file.arrayBuffer(); - const pdf = await getDocument({ data: arrayBuffer }).promise; - const page = await pdf.getPage(1); - const viewport = page.getViewport({ scale: 0.5 }); - const canvas = document.createElement("canvas"); - canvas.width = viewport.width; - canvas.height = viewport.height; - const context = canvas.getContext("2d"); - if (context) { - await page.render({ canvasContext: context, viewport }).promise; - if (!cancelled) setThumb(canvas.toDataURL()); - } - } catch { - if (!cancelled) setThumb(null); - } - } - generate(); - return () => { cancelled = true; }; - }, [file]); - - return thumb; -} - -interface FileCardProps { - file: File; - onRemove: () => void; - onDoubleClick?: () => void; -} - -function FileCard({ file, onRemove, onDoubleClick }: FileCardProps) { - const { t } = useTranslation(); - const thumb = usePdfThumbnail(file); - - return ( - - - - {thumb ? ( - PDF thumbnail - ) : ( - - - - )} - - - {file.name} - - - - {getFileSize(file)} - - - {getFileDate(file)} - - - - - - ); -} - interface FileManagerProps { files: FileWithUrl[]; setFiles: React.Dispatch>; @@ -145,21 +31,212 @@ const FileManager: React.FC = ({ setCurrentView, }) => { const { t } = useTranslation(); - const handleDrop = (uploadedFiles: File[]) => { - setFiles((prevFiles) => (allowMultiple ? [...prevFiles, ...uploadedFiles] : uploadedFiles)); + const [loading, setLoading] = useState(false); + const [storageStats, setStorageStats] = useState(null); + const [notification, setNotification] = useState(null); + const [filesLoaded, setFilesLoaded] = useState(false); + + // Extract operations from service for cleaner code + const { + loadStorageStats, + forceReloadFiles, + loadExistingFiles, + uploadFiles, + removeFile, + clearAllFiles, + createBlobUrlForFile, + checkForPurge, + updateStorageStatsIncremental + } = fileOperationsService; + + // Add CSS for spinner animation + useEffect(() => { + if (!document.querySelector('#spinner-animation')) { + const style = document.createElement('style'); + style.id = 'spinner-animation'; + style.textContent = ` + @keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } + } + `; + document.head.appendChild(style); + } + }, []); + + // Load existing files from IndexedDB on mount + useEffect(() => { + if (!filesLoaded) { + handleLoadExistingFiles(); + } + }, [filesLoaded]); + + // Load storage stats and set up periodic updates + useEffect(() => { + handleLoadStorageStats(); + + const interval = setInterval(async () => { + await handleLoadStorageStats(); + await handleCheckForPurge(); + }, 10000); // Update every 10 seconds + + return () => clearInterval(interval); + }, []); + + // Sync UI with IndexedDB whenever storage stats change + useEffect(() => { + const syncWithStorage = async () => { + if (storageStats && filesLoaded) { + // If file counts don't match, force reload + if (storageStats.fileCount !== files.length) { + console.warn('File count mismatch: storage has', storageStats.fileCount, 'but UI shows', files.length, '- forcing reload'); + const reloadedFiles = await forceReloadFiles(); + setFiles(reloadedFiles); + } + } + }; + + syncWithStorage(); + }, [storageStats, filesLoaded, files.length]); + + // Handlers using extracted operations + const handleLoadStorageStats = async () => { + const stats = await loadStorageStats(); + if (stats) { + setStorageStats(stats); + + // Check for storage warnings + const warning = checkStorageWarnings(stats); + if (warning) { + setNotification(warning); + } + } }; - const handleRemoveFile = (index: number) => { - setFiles((prevFiles) => prevFiles.filter((_, i) => i !== index)); + const handleLoadExistingFiles = async () => { + try { + const loadedFiles = await loadExistingFiles(filesLoaded, files); + setFiles(loadedFiles); + setFilesLoaded(true); + } catch (error) { + console.error('Failed to load existing files:', error); + setFilesLoaded(true); + } + }; + + const handleCheckForPurge = async () => { + try { + const isPurged = await checkForPurge(files); + if (isPurged) { + console.warn('IndexedDB purge detected - forcing UI reload'); + setNotification('Browser cleared storage. Files have been removed. Please re-upload.'); + const reloadedFiles = await forceReloadFiles(); + setFiles(reloadedFiles); + setFilesLoaded(true); + } + } catch (error) { + console.error('Error checking for purge:', error); + } + }; + + const handleDrop = async (uploadedFiles: File[]) => { + setLoading(true); + + try { + const newFiles = await uploadFiles(uploadedFiles, defaultStorageConfig.useIndexedDB); + + // Update files state + setFiles((prevFiles) => (allowMultiple ? [...prevFiles, ...newFiles] : newFiles)); + + // Update storage stats incrementally + if (storageStats) { + const updatedStats = updateStorageStatsIncremental(storageStats, 'add', newFiles); + setStorageStats(updatedStats); + + // Check for storage warnings + const warning = checkStorageWarnings(updatedStats); + if (warning) { + setNotification(warning); + } + } + } catch (error) { + console.error('Error handling file drop:', error); + setNotification(t("fileManager.uploadError", "Failed to upload some files.")); + } finally { + setLoading(false); + } + }; + + const handleRemoveFile = async (index: number) => { + const file = files[index]; + + try { + await removeFile(file); + + // Update storage stats incrementally + if (storageStats) { + const updatedStats = updateStorageStatsIncremental(storageStats, 'remove', [file]); + setStorageStats(updatedStats); + } + + setFiles((prevFiles) => prevFiles.filter((_, i) => i !== index)); + } catch (error) { + console.error('Failed to remove file:', error); + } + }; + + const handleClearAll = async () => { + try { + await clearAllFiles(files); + + // Reset storage stats + if (storageStats) { + const clearedStats = updateStorageStatsIncremental(storageStats, 'clear'); + setStorageStats(clearedStats); + } + + setFiles([]); + } catch (error) { + console.error('Failed to clear all files:', error); + } + }; + + const handleReloadFiles = () => { + setFilesLoaded(false); + setFiles([]); + }; + + const handleFileDoubleClick = async (file: FileWithUrl) => { + if (setPdfFile) { + try { + const url = await createBlobUrlForFile(file); + setPdfFile({ file: file, url: url }); + setCurrentView && setCurrentView("viewer"); + } catch (error) { + console.error('Failed to create blob URL for file:', error); + setNotification('Failed to open file. It may have been removed from storage.'); + } + } }; return ( -
+
+ + {/* File Upload Dropzone */} = ({ display: "flex", alignItems: "center", justifyContent: "center", - width:"90%" + width: "90%" }} > - - - {t("fileChooser.dragAndDropPDF", "Drag PDF files here or click to select")} - - + + {t("fileChooser.dragAndDropPDF", "Drag PDF files here or click to select")} + + + {/* Storage Stats Card */} + + + {/* Files Display */} {files.length === 0 ? ( {t("noFileSelected", "No files uploaded yet.")} @@ -192,23 +277,26 @@ const FileManager: React.FC = ({ > {files.map((file, idx) => ( handleRemoveFile(idx)} - onDoubleClick={() => { - const fileObj = (file as FileWithUrl).file || file; - setPdfFile && - setPdfFile({ - file: fileObj, - url: URL.createObjectURL(fileObj), - }); - setCurrentView && setCurrentView("viewer"); - }} - /> + onDoubleClick={() => handleFileDoubleClick(file)} + as FileWithUrl /> ))} )} + + {/* Notifications */} + {notification && ( + setNotification(null)} + style={{ position: "fixed", bottom: 20, right: 20, zIndex: 1000 }} + > + {notification} + + )}
); }; diff --git a/frontend/src/components/StorageStatsCard.tsx b/frontend/src/components/StorageStatsCard.tsx new file mode 100644 index 000000000..31c991208 --- /dev/null +++ b/frontend/src/components/StorageStatsCard.tsx @@ -0,0 +1,76 @@ +import React from "react"; +import { Card, Group, Text, Button, Progress } from "@mantine/core"; +import { useTranslation } from "react-i18next"; +import StorageIcon from "@mui/icons-material/Storage"; +import DeleteIcon from "@mui/icons-material/Delete"; +import { StorageStats } from "../services/fileStorage"; +import { formatFileSize } from "../utils/fileUtils"; +import { getStorageUsagePercent } from "../utils/storageUtils"; + +interface StorageStatsCardProps { + storageStats: StorageStats | null; + filesCount: number; + onClearAll: () => void; + onReloadFiles: () => void; +} + +const StorageStatsCard: React.FC = ({ + storageStats, + filesCount, + onClearAll, + onReloadFiles, +}) => { + const { t } = useTranslation(); + + if (!storageStats) return null; + + const storageUsagePercent = getStorageUsagePercent(storageStats); + + return ( + + + +
+ + {t("fileManager.storage", "Storage")}: {formatFileSize(storageStats.used)} + {storageStats.quota && ` / ${formatFileSize(storageStats.quota)}`} + + {storageStats.quota && ( + 80 ? "red" : storageUsagePercent > 60 ? "yellow" : "blue"} + size="sm" + mt={4} + /> + )} + + {storageStats.fileCount} {t("fileManager.filesStored", "files stored")} + +
+ + {filesCount > 0 && ( + + )} + + +
+
+ ); +}; + +export default StorageStatsCard; \ No newline at end of file diff --git a/frontend/src/components/Viewer.tsx b/frontend/src/components/Viewer.tsx index f119b4114..14143b916 100644 --- a/frontend/src/components/Viewer.tsx +++ b/frontend/src/components/Viewer.tsx @@ -10,9 +10,118 @@ import ViewSidebarIcon from "@mui/icons-material/ViewSidebar"; import ViewWeekIcon from "@mui/icons-material/ViewWeek"; // for dual page (book) import DescriptionIcon from "@mui/icons-material/Description"; // for single page import { useLocalStorage } from "@mantine/hooks"; +import { fileStorage } from "../services/fileStorage"; GlobalWorkerOptions.workerSrc = "/pdf.worker.js"; +// Lazy loading page image component +interface LazyPageImageProps { + pageIndex: number; + zoom: number; + theme: any; + isFirst: boolean; + renderPage: (pageIndex: number) => Promise; + pageImages: (string | null)[]; + setPageRef: (index: number, ref: HTMLImageElement | null) => void; +} + +const LazyPageImage: React.FC = ({ + pageIndex, zoom, theme, isFirst, renderPage, pageImages, setPageRef +}) => { + const [isVisible, setIsVisible] = useState(false); + const [imageUrl, setImageUrl] = useState(pageImages[pageIndex]); + const imgRef = useRef(null); + + useEffect(() => { + const observer = new IntersectionObserver( + (entries) => { + entries.forEach((entry) => { + if (entry.isIntersecting && !imageUrl) { + setIsVisible(true); + } + }); + }, + { + rootMargin: '200px', // Start loading 200px before visible + threshold: 0.1 + } + ); + + if (imgRef.current) { + observer.observe(imgRef.current); + } + + return () => observer.disconnect(); + }, [imageUrl]); + + useEffect(() => { + if (isVisible && !imageUrl) { + renderPage(pageIndex).then((url) => { + if (url) setImageUrl(url); + }); + } + }, [isVisible, imageUrl, pageIndex, renderPage]); + + useEffect(() => { + if (imgRef.current) { + setPageRef(pageIndex, imgRef.current); + } + }, [pageIndex, setPageRef]); + + if (imageUrl) { + return ( + {`Page + ); + } + + // Placeholder while loading + return ( +
+ {isVisible ? ( +
+
+ Loading page {pageIndex + 1}... +
+ ) : ( + Page {pageIndex + 1} + )} +
+ ); +}; + export interface ViewerProps { pdfFile: { file: File; url: string } | null; setPdfFile: (file: { file: File; url: string } | null) => void; @@ -38,7 +147,52 @@ const Viewer: React.FC = ({ const scrollAreaRef = useRef(null); const userInitiatedRef = useRef(false); const suppressScrollRef = useRef(false); + const pdfDocRef = useRef(null); + const renderingPagesRef = useRef>(new Set()); + const currentArrayBufferRef = useRef(null); + // Function to render a specific page on-demand + const renderPage = async (pageIndex: number): Promise => { + if (!pdfFile || !pdfDocRef.current || renderingPagesRef.current.has(pageIndex)) { + return null; + } + + const pageNum = pageIndex + 1; + if (pageImages[pageIndex]) { + return pageImages[pageIndex]; // Already rendered + } + + renderingPagesRef.current.add(pageIndex); + + try { + const page = await pdfDocRef.current.getPage(pageNum); + const viewport = page.getViewport({ scale: 1.2 }); + const canvas = document.createElement("canvas"); + canvas.width = viewport.width; + canvas.height = viewport.height; + const ctx = canvas.getContext("2d"); + + if (ctx) { + await page.render({ canvasContext: ctx, viewport }).promise; + const dataUrl = canvas.toDataURL(); + + // Update the pageImages array + setPageImages(prev => { + const newImages = [...prev]; + newImages[pageIndex] = dataUrl; + return newImages; + }); + + renderingPagesRef.current.delete(pageIndex); + return dataUrl; + } + } catch (error) { + console.error(`Failed to render page ${pageNum}:`, error); + } + + renderingPagesRef.current.delete(pageIndex); + return null; + }; // Listen for hash changes and update currentPage useEffect(() => { @@ -121,7 +275,7 @@ const Viewer: React.FC = ({ useEffect(() => { let cancelled = false; - async function renderPages() { + async function loadPdfInfo() { if (!pdfFile || !pdfFile.url) { setNumPages(0); setPageImages([]); @@ -129,29 +283,49 @@ const Viewer: React.FC = ({ } setLoading(true); try { - const pdf = await getDocument(pdfFile.url).promise; - setNumPages(pdf.numPages); - const images: string[] = []; - for (let i = 1; i <= pdf.numPages; i++) { - const page = await pdf.getPage(i); - const viewport = page.getViewport({ scale: 1.2 }); - const canvas = document.createElement("canvas"); - canvas.width = viewport.width; - canvas.height = viewport.height; - const ctx = canvas.getContext("2d"); - if (ctx) { - await page.render({ canvasContext: ctx, viewport }).promise; - images.push(canvas.toDataURL()); + let pdfUrl = pdfFile.url; + + // Handle special IndexedDB URLs for large files + if (pdfFile.url.startsWith('indexeddb:')) { + const fileId = pdfFile.url.replace('indexeddb:', ''); + console.log('Loading large file from IndexedDB:', fileId); + + // Get data directly from IndexedDB + const arrayBuffer = await fileStorage.getFileData(fileId); + if (!arrayBuffer) { + throw new Error('File not found in IndexedDB - may have been purged by browser'); } + + // Store reference for cleanup + currentArrayBufferRef.current = arrayBuffer; + + // Use ArrayBuffer directly instead of creating blob URL + const pdf = await getDocument({ data: arrayBuffer }).promise; + pdfDocRef.current = pdf; + setNumPages(pdf.numPages); + if (!cancelled) setPageImages(new Array(pdf.numPages).fill(null)); + } else { + // Standard blob URL or regular URL + const pdf = await getDocument(pdfUrl).promise; + pdfDocRef.current = pdf; + setNumPages(pdf.numPages); + if (!cancelled) setPageImages(new Array(pdf.numPages).fill(null)); + } + } catch (error) { + console.error('Failed to load PDF:', error); + if (!cancelled) { + setPageImages([]); + setNumPages(0); } - if (!cancelled) setPageImages(images); - } catch { - if (!cancelled) setPageImages([]); } if (!cancelled) setLoading(false); } - renderPages(); - return () => { cancelled = true; }; + loadPdfInfo(); + return () => { + cancelled = true; + // Cleanup ArrayBuffer reference to help garbage collection + currentArrayBufferRef.current = null; + }; }, [pdfFile]); useEffect(() => { @@ -210,53 +384,44 @@ const Viewer: React.FC = ({ viewportRef={scrollAreaRef} > - {pageImages.length === 0 && ( + {numPages === 0 && ( {t("viewer.noPagesToDisplay", "No pages to display.")} )} {dualPage - ? Array.from({ length: Math.ceil(pageImages.length / 2) }).map((_, i) => ( + ? Array.from({ length: Math.ceil(numPages / 2) }).map((_, i) => ( - { pageRefs.current[i * 2] = el; }} - src={pageImages[i * 2]} - alt={`Page ${i * 2 + 1}`} - style={{ - width: `${100 * zoom}%`, - maxWidth: 700 * zoom, - boxShadow: "0 2px 8px rgba(0,0,0,0.08)", - borderRadius: 8, - marginTop: i === 0 ? theme.spacing.xl : 0, // <-- add gap to first row - }} + { pageRefs.current[index] = ref; }} /> - {pageImages[i * 2 + 1] && ( - { pageRefs.current[i * 2 + 1] = el; }} - src={pageImages[i * 2 + 1]} - alt={`Page ${i * 2 + 2}`} - style={{ - width: `${100 * zoom}%`, - maxWidth: 700 * zoom, - boxShadow: "0 2px 8px rgba(0,0,0,0.08)", - borderRadius: 8, - marginTop: i === 0 ? theme.spacing.xl : 0, // <-- add gap to first row - }} + {i * 2 + 1 < numPages && ( + { pageRefs.current[index] = ref; }} /> )} )) - : pageImages.map((img, idx) => ( - ( + { pageRefs.current[idx] = el; }} - src={img} - alt={`Page ${idx + 1}`} - style={{ - width: `${100 * zoom}%`, - maxWidth: 700 * zoom, - boxShadow: "0 2px 8px rgba(0,0,0,0.08)", - borderRadius: 8, - marginTop: idx === 0 ? theme.spacing.xl : 0, // <-- add gap to first page - }} + pageIndex={idx} + zoom={zoom} + theme={theme} + isFirst={idx === 0} + renderPage={renderPage} + pageImages={pageImages} + setPageRef={(index, ref) => { pageRefs.current[index] = ref; }} /> ))} diff --git a/frontend/src/hooks/useIndexedDBThumbnail.ts b/frontend/src/hooks/useIndexedDBThumbnail.ts new file mode 100644 index 000000000..b8404e5fe --- /dev/null +++ b/frontend/src/hooks/useIndexedDBThumbnail.ts @@ -0,0 +1,72 @@ +import { useState, useEffect } from "react"; +import { getDocument } from "pdfjs-dist"; +import { FileWithUrl } from "../types/file"; + +/** + * Hook for IndexedDB-aware thumbnail loading + * Handles thumbnail generation for files not in IndexedDB + */ +export function useIndexedDBThumbnail(file: FileWithUrl | undefined | null): { + thumbnail: string | null; + isGenerating: boolean +} { + const [thumb, setThumb] = useState(null); + const [generating, setGenerating] = useState(false); + + useEffect(() => { + let cancelled = false; + + async function loadThumbnail() { + if (!file) { + setThumb(null); + return; + } + + // First priority: use stored thumbnail + if (file.thumbnail) { + setThumb(file.thumbnail); + return; + } + + // Second priority: for IndexedDB files without stored thumbnails, just use placeholder + if (file.storedInIndexedDB && file.id) { + // Don't generate thumbnails for files loaded from IndexedDB - just use placeholder + setThumb(null); + return; + } + + // Third priority: generate from blob for regular files during upload (small files only) + if (!file.storedInIndexedDB && file.size < 50 * 1024 * 1024 && !generating) { + setGenerating(true); + try { + const arrayBuffer = await file.arrayBuffer(); + const pdf = await getDocument({ data: arrayBuffer }).promise; + const page = await pdf.getPage(1); + const viewport = page.getViewport({ scale: 0.2 }); + const canvas = document.createElement("canvas"); + canvas.width = viewport.width; + canvas.height = viewport.height; + const context = canvas.getContext("2d"); + if (context && !cancelled) { + await page.render({ canvasContext: context, viewport }).promise; + if (!cancelled) setThumb(canvas.toDataURL()); + } + pdf.destroy(); // Clean up memory + } catch (error) { + console.warn('Failed to generate thumbnail for regular file', file.name, error); + if (!cancelled) setThumb(null); + } finally { + if (!cancelled) setGenerating(false); + } + } else { + // Large files or files without proper conditions - show placeholder + setThumb(null); + } + } + + loadThumbnail(); + return () => { cancelled = true; }; + }, [file, file?.thumbnail, file?.id]); + + return { thumbnail: thumb, isGenerating: generating }; +} \ No newline at end of file diff --git a/frontend/src/services/fileOperationsService.ts b/frontend/src/services/fileOperationsService.ts new file mode 100644 index 000000000..d13965837 --- /dev/null +++ b/frontend/src/services/fileOperationsService.ts @@ -0,0 +1,194 @@ +import { FileWithUrl } from "../types/file"; +import { fileStorage, StorageStats } from "./fileStorage"; +import { loadFilesFromIndexedDB, createEnhancedFileFromStored, cleanupFileUrls } from "../utils/fileUtils"; +import { generateThumbnailForFile } from "../utils/thumbnailUtils"; +import { updateStorageStatsIncremental } from "../utils/storageUtils"; + +/** + * Service for file storage operations + * Contains all IndexedDB operations and file management logic + */ +export const fileOperationsService = { + + /** + * Load storage statistics + */ + async loadStorageStats(): Promise { + try { + return await fileStorage.getStorageStats(); + } catch (error) { + console.error('Failed to load storage stats:', error); + return null; + } + }, + + /** + * Force reload files from IndexedDB + */ + async forceReloadFiles(): Promise { + try { + return await loadFilesFromIndexedDB(); + } catch (error) { + console.error('Failed to force reload files:', error); + return []; + } + }, + + /** + * Load existing files from IndexedDB if not already loaded + */ + async loadExistingFiles( + filesLoaded: boolean, + currentFiles: FileWithUrl[] + ): Promise { + if (filesLoaded && currentFiles.length > 0) { + return currentFiles; + } + + try { + await fileStorage.init(); + const storedFiles = await fileStorage.getAllFileMetadata(); + + // Detect if IndexedDB was purged by comparing with current UI state + if (currentFiles.length > 0 && storedFiles.length === 0) { + console.warn('IndexedDB appears to have been purged - clearing UI state'); + return []; + } + + return await loadFilesFromIndexedDB(); + } catch (error) { + console.error('Failed to load existing files:', error); + return []; + } + }, + + /** + * Upload files to IndexedDB with thumbnail generation + */ + async uploadFiles( + uploadedFiles: File[], + useIndexedDB: boolean + ): Promise { + const newFiles: FileWithUrl[] = []; + + for (const file of uploadedFiles) { + if (useIndexedDB) { + try { + console.log('Storing file in IndexedDB:', file.name); + + // Generate thumbnail only during upload + const thumbnail = await generateThumbnailForFile(file); + + const storedFile = await fileStorage.storeFile(file, thumbnail); + console.log('File stored with ID:', storedFile.id); + + const baseFile = fileStorage.createFileFromStored(storedFile); + const enhancedFile = createEnhancedFileFromStored(storedFile, thumbnail); + + // Copy File interface methods from baseFile + enhancedFile.arrayBuffer = baseFile.arrayBuffer.bind(baseFile); + enhancedFile.slice = baseFile.slice.bind(baseFile); + enhancedFile.stream = baseFile.stream.bind(baseFile); + enhancedFile.text = baseFile.text.bind(baseFile); + + newFiles.push(enhancedFile); + } catch (error) { + console.error('Failed to store file in IndexedDB:', error); + // Fallback to RAM storage + const enhancedFile: FileWithUrl = Object.assign(file, { + url: URL.createObjectURL(file), + storedInIndexedDB: false + }); + newFiles.push(enhancedFile); + } + } else { + // IndexedDB disabled - use RAM + const enhancedFile: FileWithUrl = Object.assign(file, { + url: URL.createObjectURL(file), + storedInIndexedDB: false + }); + newFiles.push(enhancedFile); + } + } + + return newFiles; + }, + + /** + * Remove a file from storage + */ + async removeFile(file: FileWithUrl): Promise { + // Clean up blob URL + if (file.url && !file.url.startsWith('indexeddb:')) { + URL.revokeObjectURL(file.url); + } + + // Remove from IndexedDB if stored there + if (file.storedInIndexedDB && file.id) { + try { + await fileStorage.deleteFile(file.id); + } catch (error) { + console.error('Failed to delete file from IndexedDB:', error); + } + } + }, + + /** + * Clear all files from storage + */ + async clearAllFiles(files: FileWithUrl[]): Promise { + // Clean up all blob URLs + cleanupFileUrls(files); + + // Clear IndexedDB + try { + await fileStorage.clearAll(); + } catch (error) { + console.error('Failed to clear IndexedDB:', error); + } + }, + + /** + * Create blob URL for file viewing + */ + async createBlobUrlForFile(file: FileWithUrl): Promise { + // For large files, use IndexedDB direct access to avoid memory issues + const FILE_SIZE_LIMIT = 100 * 1024 * 1024; // 100MB + if (file.size > FILE_SIZE_LIMIT) { + console.warn(`File ${file.name} is too large for blob URL. Use direct IndexedDB access.`); + return `indexeddb:${file.id}`; + } + + // For all files, avoid persistent blob URLs + if (file.storedInIndexedDB && file.id) { + const storedFile = await fileStorage.getFile(file.id); + if (storedFile) { + return fileStorage.createBlobUrl(storedFile); + } + } + + // Fallback for files not in IndexedDB + return URL.createObjectURL(file); + }, + + /** + * Check for IndexedDB purge + */ + async checkForPurge(currentFiles: FileWithUrl[]): Promise { + if (currentFiles.length === 0) return false; + + try { + await fileStorage.init(); + const storedFiles = await fileStorage.getAllFileMetadata(); + return storedFiles.length === 0; // Purge detected if no files in storage but UI shows files + } catch (error) { + console.error('Error checking for purge:', error); + return true; // Assume purged if can't access IndexedDB + } + }, + + /** + * Update storage stats incrementally (re-export utility for convenience) + */ + updateStorageStatsIncremental +}; \ No newline at end of file diff --git a/frontend/src/services/fileStorage.ts b/frontend/src/services/fileStorage.ts new file mode 100644 index 000000000..9ba2e7def --- /dev/null +++ b/frontend/src/services/fileStorage.ts @@ -0,0 +1,576 @@ +/** + * IndexedDB File Storage Service + * Provides high-capacity file storage for PDF processing + */ + +export interface StoredFile { + id: string; + name: string; + type: string; + size: number; + lastModified: number; + data: ArrayBuffer; + thumbnail?: string; + url?: string; // For compatibility with existing components +} + +export interface StorageStats { + used: number; + available: number; + fileCount: number; + quota?: number; +} + +class FileStorageService { + private dbName = 'stirling-pdf-files'; + private dbVersion = 2; // Increment version to force schema update + private storeName = 'files'; + private db: IDBDatabase | null = null; + private initPromise: Promise | null = null; + + /** + * Initialize the IndexedDB database (singleton pattern) + */ + async init(): Promise { + if (this.db) { + return Promise.resolve(); + } + + if (this.initPromise) { + return this.initPromise; + } + + this.initPromise = new Promise((resolve, reject) => { + const request = indexedDB.open(this.dbName, this.dbVersion); + + request.onerror = () => { + this.initPromise = null; + reject(request.error); + }; + + request.onsuccess = () => { + this.db = request.result; + console.log('IndexedDB connection established'); + resolve(); + }; + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result; + const oldVersion = (event as any).oldVersion; + + console.log('IndexedDB upgrade needed from version', oldVersion, 'to', this.dbVersion); + + // Only recreate object store if it doesn't exist or if upgrading from version < 2 + if (!db.objectStoreNames.contains(this.storeName)) { + const store = db.createObjectStore(this.storeName, { keyPath: 'id' }); + store.createIndex('name', 'name', { unique: false }); + store.createIndex('lastModified', 'lastModified', { unique: false }); + console.log('IndexedDB object store created with keyPath: id'); + } else if (oldVersion < 2) { + // Only delete and recreate if upgrading from version 1 to 2 + db.deleteObjectStore(this.storeName); + const store = db.createObjectStore(this.storeName, { keyPath: 'id' }); + store.createIndex('name', 'name', { unique: false }); + store.createIndex('lastModified', 'lastModified', { unique: false }); + console.log('IndexedDB object store recreated with keyPath: id (version upgrade)'); + } + }; + }); + + return this.initPromise; + } + + /** + * Store a file in IndexedDB + */ + async storeFile(file: File, thumbnail?: string): Promise { + if (!this.db) await this.init(); + + const id = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; + const arrayBuffer = await file.arrayBuffer(); + + const storedFile: StoredFile = { + id, + name: file.name, + type: file.type, + size: file.size, + lastModified: file.lastModified, + data: arrayBuffer, + thumbnail + }; + + return new Promise((resolve, reject) => { + try { + const transaction = this.db!.transaction([this.storeName], 'readwrite'); + const store = transaction.objectStore(this.storeName); + + // Debug logging + console.log('Object store keyPath:', store.keyPath); + console.log('Storing file:', { + id: storedFile.id, + name: storedFile.name, + hasData: !!storedFile.data, + dataSize: storedFile.data.byteLength + }); + + const request = store.add(storedFile); + + request.onerror = () => { + console.error('IndexedDB add error:', request.error); + console.error('Failed object:', storedFile); + reject(request.error); + }; + request.onsuccess = () => { + console.log('File stored successfully with ID:', storedFile.id); + resolve(storedFile); + }; + } catch (error) { + console.error('Transaction error:', error); + reject(error); + } + }); + } + + /** + * Retrieve a file from IndexedDB + */ + async getFile(id: string): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readonly'); + const store = transaction.objectStore(this.storeName); + const request = store.get(id); + + request.onerror = () => reject(request.error); + request.onsuccess = () => resolve(request.result || null); + }); + } + + /** + * Get all stored files (WARNING: loads all data into memory) + */ + async getAllFiles(): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readonly'); + const store = transaction.objectStore(this.storeName); + const request = store.getAll(); + + request.onerror = () => reject(request.error); + request.onsuccess = () => { + // Filter out null/corrupted entries + const files = request.result.filter(file => + file && + file.data && + file.name && + typeof file.size === 'number' + ); + resolve(files); + }; + }); + } + + /** + * Get metadata of all stored files (without loading data into memory) + */ + async getAllFileMetadata(): Promise[]> { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readonly'); + const store = transaction.objectStore(this.storeName); + const request = store.openCursor(); + const files: Omit[] = []; + + request.onerror = () => reject(request.error); + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result; + if (cursor) { + const storedFile = cursor.value; + // Only extract metadata, skip the data field + if (storedFile && storedFile.name && typeof storedFile.size === 'number') { + files.push({ + id: storedFile.id, + name: storedFile.name, + type: storedFile.type, + size: storedFile.size, + lastModified: storedFile.lastModified, + thumbnail: storedFile.thumbnail + }); + } + cursor.continue(); + } else { + console.log('Loaded metadata for', files.length, 'files without loading data'); + resolve(files); + } + }; + }); + } + + /** + * Delete a file from IndexedDB + */ + async deleteFile(id: string): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readwrite'); + const store = transaction.objectStore(this.storeName); + const request = store.delete(id); + + request.onerror = () => reject(request.error); + request.onsuccess = () => resolve(); + }); + } + + /** + * Clear all stored files + */ + async clearAll(): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readwrite'); + const store = transaction.objectStore(this.storeName); + const request = store.clear(); + + request.onerror = () => reject(request.error); + request.onsuccess = () => resolve(); + }); + } + + /** + * Get storage statistics (only our IndexedDB usage) + */ + async getStorageStats(): Promise { + if (!this.db) await this.init(); + + let used = 0; + let available = 0; + let quota: number | undefined; + let fileCount = 0; + + try { + // Get browser quota for context + if ('storage' in navigator && 'estimate' in navigator.storage) { + const estimate = await navigator.storage.estimate(); + quota = estimate.quota; + available = estimate.quota || 0; + } + + // Calculate our actual IndexedDB usage from file metadata + const files = await this.getAllFileMetadata(); + used = files.reduce((total, file) => total + (file?.size || 0), 0); + fileCount = files.length; + + // Adjust available space + if (quota) { + available = quota - used; + } + + } catch (error) { + console.warn('Could not get storage stats:', error); + // If we can't read metadata, database might be purged + used = 0; + fileCount = 0; + } + + return { + used, + available, + fileCount, + quota + }; + } + + /** + * Get file count quickly without loading metadata + */ + async getFileCount(): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readonly'); + const store = transaction.objectStore(this.storeName); + const request = store.count(); + + request.onerror = () => reject(request.error); + request.onsuccess = () => resolve(request.result); + }); + } + + /** + * Check all IndexedDB databases to see if files are in another version + */ + async debugAllDatabases(): Promise { + console.log('=== Checking All IndexedDB Databases ==='); + + if ('databases' in indexedDB) { + try { + const databases = await indexedDB.databases(); + console.log('Found databases:', databases); + + for (const dbInfo of databases) { + if (dbInfo.name?.includes('stirling') || dbInfo.name?.includes('pdf')) { + console.log(`Checking database: ${dbInfo.name} (version: ${dbInfo.version})`); + try { + const db = await new Promise((resolve, reject) => { + const request = indexedDB.open(dbInfo.name!, dbInfo.version); + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error); + }); + + console.log(`Database ${dbInfo.name} object stores:`, Array.from(db.objectStoreNames)); + db.close(); + } catch (error) { + console.error(`Failed to open database ${dbInfo.name}:`, error); + } + } + } + } catch (error) { + console.error('Failed to list databases:', error); + } + } else { + console.log('indexedDB.databases() not supported'); + } + + // Also check our specific database with different versions + for (let version = 1; version <= 3; version++) { + try { + console.log(`Trying to open ${this.dbName} version ${version}...`); + const db = await new Promise((resolve, reject) => { + const request = indexedDB.open(this.dbName, version); + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error); + request.onupgradeneeded = () => { + // Don't actually upgrade, just check + request.transaction?.abort(); + }; + }); + + console.log(`Version ${version} object stores:`, Array.from(db.objectStoreNames)); + + if (db.objectStoreNames.contains('files')) { + const transaction = db.transaction(['files'], 'readonly'); + const store = transaction.objectStore('files'); + const countRequest = store.count(); + countRequest.onsuccess = () => { + console.log(`Version ${version} files store has ${countRequest.result} entries`); + }; + } + + db.close(); + } catch (error) { + console.log(`Version ${version} not accessible:`, error.message); + } + } + } + + /** + * Debug method to check what's actually in the database + */ + async debugDatabaseContents(): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + const transaction = this.db!.transaction([this.storeName], 'readonly'); + const store = transaction.objectStore(this.storeName); + + // First try getAll to see if there's anything + const getAllRequest = store.getAll(); + getAllRequest.onsuccess = () => { + console.log('=== Raw getAll() result ==='); + console.log('Raw entries found:', getAllRequest.result.length); + getAllRequest.result.forEach((item, index) => { + console.log(`Raw entry ${index}:`, { + keys: Object.keys(item || {}), + id: item?.id, + name: item?.name, + size: item?.size, + type: item?.type, + hasData: !!item?.data, + dataSize: item?.data?.byteLength, + fullObject: item + }); + }); + }; + + // Then try cursor + const cursorRequest = store.openCursor(); + console.log('=== IndexedDB Cursor Debug ==='); + let count = 0; + + cursorRequest.onerror = () => { + console.error('Cursor error:', cursorRequest.error); + reject(cursorRequest.error); + }; + + cursorRequest.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result; + if (cursor) { + count++; + const value = cursor.value; + console.log(`Cursor File ${count}:`, { + id: value?.id, + name: value?.name, + size: value?.size, + type: value?.type, + hasData: !!value?.data, + dataSize: value?.data?.byteLength, + hasThumbnail: !!value?.thumbnail, + allKeys: Object.keys(value || {}) + }); + cursor.continue(); + } else { + console.log(`=== End Cursor Debug - Found ${count} files ===`); + resolve(); + } + }; + }); + } + + /** + * Convert StoredFile back to File object for compatibility + */ + createFileFromStored(storedFile: StoredFile): File { + if (!storedFile || !storedFile.data) { + throw new Error('Invalid stored file: missing data'); + } + + if (!storedFile.name || typeof storedFile.size !== 'number') { + throw new Error('Invalid stored file: missing metadata'); + } + + const blob = new Blob([storedFile.data], { type: storedFile.type }); + const file = new File([blob], storedFile.name, { + type: storedFile.type, + lastModified: storedFile.lastModified + }); + + // Add custom properties for compatibility + Object.defineProperty(file, 'id', { value: storedFile.id, writable: false }); + Object.defineProperty(file, 'thumbnail', { value: storedFile.thumbnail, writable: false }); + + return file; + } + + /** + * Create blob URL for stored file + */ + createBlobUrl(storedFile: StoredFile): string { + const blob = new Blob([storedFile.data], { type: storedFile.type }); + return URL.createObjectURL(blob); + } + + /** + * Get file data as ArrayBuffer for streaming/chunked processing + */ + async getFileData(id: string): Promise { + try { + const storedFile = await this.getFile(id); + return storedFile ? storedFile.data : null; + } catch (error) { + console.warn(`Failed to get file data for ${id}:`, error); + return null; + } + } + + /** + * Create a temporary blob URL that gets revoked automatically + */ + async createTemporaryBlobUrl(id: string): Promise { + const data = await this.getFileData(id); + if (!data) return null; + + const blob = new Blob([data], { type: 'application/pdf' }); + const url = URL.createObjectURL(blob); + + // Auto-revoke after a short delay to free memory + setTimeout(() => { + URL.revokeObjectURL(url); + }, 10000); // 10 seconds + + return url; + } + + /** + * Update thumbnail for an existing file + */ + async updateThumbnail(id: string, thumbnail: string): Promise { + if (!this.db) await this.init(); + + return new Promise((resolve, reject) => { + try { + const transaction = this.db!.transaction([this.storeName], 'readwrite'); + const store = transaction.objectStore(this.storeName); + const getRequest = store.get(id); + + getRequest.onsuccess = () => { + const storedFile = getRequest.result; + if (storedFile) { + storedFile.thumbnail = thumbnail; + const updateRequest = store.put(storedFile); + + updateRequest.onsuccess = () => { + console.log('Thumbnail updated for file:', id); + resolve(true); + }; + updateRequest.onerror = () => { + console.error('Failed to update thumbnail:', updateRequest.error); + resolve(false); + }; + } else { + resolve(false); + } + }; + + getRequest.onerror = () => { + console.error('Failed to get file for thumbnail update:', getRequest.error); + resolve(false); + }; + } catch (error) { + console.error('Transaction error during thumbnail update:', error); + resolve(false); + } + }); + } + + /** + * Check if storage quota is running low + */ + async isStorageLow(): Promise { + const stats = await this.getStorageStats(); + if (!stats.quota) return false; + + const usagePercent = stats.used / stats.quota; + return usagePercent > 0.8; // Consider low if over 80% used + } + + /** + * Clean up old files if storage is low + */ + async cleanupOldFiles(maxFiles: number = 50): Promise { + const files = await this.getAllFileMetadata(); + + if (files.length <= maxFiles) return; + + // Sort by last modified (oldest first) + files.sort((a, b) => a.lastModified - b.lastModified); + + // Delete oldest files + const filesToDelete = files.slice(0, files.length - maxFiles); + for (const file of filesToDelete) { + await this.deleteFile(file.id); + } + } +} + +// Export singleton instance +export const fileStorage = new FileStorageService(); + +// Helper hook for React components +export function useFileStorage() { + return fileStorage; +} \ No newline at end of file diff --git a/frontend/src/tools/Compress.tsx b/frontend/src/tools/Compress.tsx index 4ad40e63b..23ad4279c 100644 --- a/frontend/src/tools/Compress.tsx +++ b/frontend/src/tools/Compress.tsx @@ -2,9 +2,11 @@ import React, { useState } from "react"; import { useSearchParams } from "react-router-dom"; import { useTranslation } from "react-i18next"; import { Stack, Slider, Group, Text, Button, Checkbox, TextInput, Paper } from "@mantine/core"; +import { FileWithUrl } from "../types/file"; +import { fileStorage } from "../services/fileStorage"; export interface CompressProps { - files?: File[]; + files?: FileWithUrl[]; setDownloadUrl?: (url: string) => void; setLoading?: (loading: boolean) => void; } @@ -41,21 +43,39 @@ const CompressPdfPanel: React.FC = ({ setLocalLoading(true); setLoading?.(true); - const formData = new FormData(); - selectedFiles.forEach(file => formData.append("fileInput", file)); - formData.append("compressionLevel", compressionLevel.toString()); - formData.append("grayscale", grayscale.toString()); - formData.append("removeMetadata", removeMetadata.toString()); - formData.append("aggressive", aggressive.toString()); - if (expectedSize) formData.append("expectedSize", expectedSize); - try { + const formData = new FormData(); + + // Handle IndexedDB files + for (const file of selectedFiles) { + if (!file.id) { + continue; // Skip files without an id + } + const storedFile = await fileStorage.getFile(file.id); + if (storedFile) { + const blob = new Blob([storedFile.data], { type: storedFile.type }); + const actualFile = new File([blob], storedFile.name, { + type: storedFile.type, + lastModified: storedFile.lastModified + }); + formData.append("fileInput", actualFile); + } + } + + formData.append("compressionLevel", compressionLevel.toString()); + formData.append("grayscale", grayscale.toString()); + formData.append("removeMetadata", removeMetadata.toString()); + formData.append("aggressive", aggressive.toString()); + if (expectedSize) formData.append("expectedSize", expectedSize); + const res = await fetch("/api/v1/general/compress-pdf", { method: "POST", body: formData, }); const blob = await res.blob(); setDownloadUrl?.(URL.createObjectURL(blob)); + } catch (error) { + console.error('Compression failed:', error); } finally { setLocalLoading(false); setLoading?.(false); diff --git a/frontend/src/tools/Merge.tsx b/frontend/src/tools/Merge.tsx index 1f5630826..e9f528739 100644 --- a/frontend/src/tools/Merge.tsx +++ b/frontend/src/tools/Merge.tsx @@ -2,9 +2,11 @@ import React, { useState, useEffect } from "react"; import { Paper, Button, Checkbox, Stack, Text, Group, Loader, Alert } from "@mantine/core"; import { useSearchParams } from "react-router-dom"; import { useTranslation } from "react-i18next"; +import { FileWithUrl } from "../types/file"; +import { fileStorage } from "../services/fileStorage"; export interface MergePdfPanelProps { - files: File[]; + files: FileWithUrl[]; setDownloadUrl: (url: string) => void; params: { order: string; @@ -38,7 +40,22 @@ const MergePdfPanel: React.FC = ({ } const formData = new FormData(); - filesToMerge.forEach((file) => formData.append("fileInput", file)); + + // Handle IndexedDB files + for (const file of filesToMerge) { + if (!file.id) { + continue; // Skip files without an id + } + const storedFile = await fileStorage.getFile(file?.id); + if (storedFile) { + const blob = new Blob([storedFile.data], { type: storedFile.type }); + const actualFile = new File([blob], storedFile.name, { + type: storedFile.type, + lastModified: storedFile.lastModified + }); + formData.append("fileInput", actualFile); + } + } setIsLoading(true); setErrorMessage(null); diff --git a/frontend/src/tools/Split.tsx b/frontend/src/tools/Split.tsx index d6cc56680..4d44473d1 100644 --- a/frontend/src/tools/Split.tsx +++ b/frontend/src/tools/Split.tsx @@ -12,9 +12,11 @@ import { import { useSearchParams } from "react-router-dom"; import { useTranslation } from "react-i18next"; import DownloadIcon from "@mui/icons-material/Download"; +import { FileWithUrl } from "../types/file"; +import { fileStorage } from "../services/fileStorage"; export interface SplitPdfPanelProps { - file: { file: File; url: string } | null; + file: { file: FileWithUrl; url: string } | null; downloadUrl?: string | null; setDownloadUrl: (url: string | null) => void; params: { @@ -68,7 +70,21 @@ const SplitPdfPanel: React.FC = ({ } const formData = new FormData(); - formData.append("fileInput", file.file); + + // Handle IndexedDB files + if (!file.file.id) { + setStatus(t("noFileSelected")); + return; + } + const storedFile = await fileStorage.getFile(file.file.id); + if (storedFile) { + const blob = new Blob([storedFile.data], { type: storedFile.type }); + const actualFile = new File([blob], storedFile.name, { + type: storedFile.type, + lastModified: storedFile.lastModified + }); + formData.append("fileInput", actualFile); + } let endpoint = ""; diff --git a/frontend/src/types/file.ts b/frontend/src/types/file.ts new file mode 100644 index 000000000..f2915e32e --- /dev/null +++ b/frontend/src/types/file.ts @@ -0,0 +1,19 @@ +/** + * Enhanced file types for IndexedDB storage + */ + +export interface FileWithUrl extends File { + id?: string; + url?: string; + thumbnail?: string; + storedInIndexedDB?: boolean; +} + +export interface StorageConfig { + useIndexedDB: boolean; + // Simplified - no thresholds needed, IndexedDB for everything +} + +export const defaultStorageConfig: StorageConfig = { + useIndexedDB: true, +}; \ No newline at end of file diff --git a/frontend/src/utils/fileUtils.ts b/frontend/src/utils/fileUtils.ts new file mode 100644 index 000000000..f9d94eecc --- /dev/null +++ b/frontend/src/utils/fileUtils.ts @@ -0,0 +1,124 @@ +import { FileWithUrl } from "../types/file"; +import { StoredFile, fileStorage } from "../services/fileStorage"; + +/** + * Consolidated file size formatting utility + */ +export function formatFileSize(bytes: number): string { + if (bytes === 0) return '0 B'; + const k = 1024; + const sizes = ['B', 'KB', 'MB', 'GB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; +} + +/** + * Get file date as string + */ +export function getFileDate(file: File): string { + if (file.lastModified) { + return new Date(file.lastModified).toLocaleString(); + } + return "Unknown"; +} + +/** + * Get file size as string (legacy method for backward compatibility) + */ +export function getFileSize(file: File): string { + if (!file.size) return "Unknown"; + return formatFileSize(file.size); +} + +/** + * Create enhanced file object from stored file metadata + * This eliminates the repeated pattern in FileManager + */ +export function createEnhancedFileFromStored(storedFile: StoredFile, thumbnail?: string): FileWithUrl { + const enhancedFile: FileWithUrl = { + id: storedFile.id, + storedInIndexedDB: true, + url: undefined, // Don't create blob URL immediately to save memory + thumbnail: thumbnail || storedFile.thumbnail, + // File metadata + name: storedFile.name, + size: storedFile.size, + type: storedFile.type, + lastModified: storedFile.lastModified, + // Lazy-loading File interface methods + arrayBuffer: async () => { + const data = await fileStorage.getFileData(storedFile.id); + if (!data) throw new Error(`File ${storedFile.name} not found in IndexedDB - may have been purged`); + return data; + }, + slice: (start?: number, end?: number, contentType?: string) => { + // Return a promise-based slice that loads from IndexedDB + return new Blob([], { type: contentType || storedFile.type }); + }, + stream: () => { + throw new Error('Stream not implemented for IndexedDB files'); + }, + text: async () => { + const data = await fileStorage.getFileData(storedFile.id); + if (!data) throw new Error(`File ${storedFile.name} not found in IndexedDB - may have been purged`); + return new TextDecoder().decode(data); + } + } as FileWithUrl; + + return enhancedFile; +} + +/** + * Load files from IndexedDB and convert to enhanced file objects + */ +export async function loadFilesFromIndexedDB(): Promise { + try { + await fileStorage.init(); + const storedFiles = await fileStorage.getAllFileMetadata(); + + if (storedFiles.length === 0) { + return []; + } + + const restoredFiles: FileWithUrl[] = storedFiles + .filter(storedFile => { + // Filter out corrupted entries + return storedFile && + storedFile.name && + typeof storedFile.size === 'number'; + }) + .map(storedFile => { + try { + return createEnhancedFileFromStored(storedFile); + } catch (error) { + console.error('Failed to restore file:', storedFile?.name || 'unknown', error); + return null; + } + }) + .filter((file): file is FileWithUrl => file !== null); + + return restoredFiles; + } catch (error) { + console.error('Failed to load files from IndexedDB:', error); + return []; + } +} + +/** + * Clean up blob URLs from file objects + */ +export function cleanupFileUrls(files: FileWithUrl[]): void { + files.forEach(file => { + if (file.url && !file.url.startsWith('indexeddb:')) { + URL.revokeObjectURL(file.url); + } + }); +} + +/** + * Check if file should use blob URL or IndexedDB direct access + */ +export function shouldUseDirectIndexedDBAccess(file: FileWithUrl): boolean { + const FILE_SIZE_LIMIT = 100 * 1024 * 1024; // 100MB + return file.size > FILE_SIZE_LIMIT; +} \ No newline at end of file diff --git a/frontend/src/utils/storageUtils.ts b/frontend/src/utils/storageUtils.ts new file mode 100644 index 000000000..def05b96d --- /dev/null +++ b/frontend/src/utils/storageUtils.ts @@ -0,0 +1,71 @@ +import { StorageStats } from "../services/fileStorage"; +import { FileWithUrl } from "../types/file"; + +/** + * Storage operation types for incremental updates + */ +export type StorageOperation = 'add' | 'remove' | 'clear'; + +/** + * Update storage stats incrementally based on operation + */ +export function updateStorageStatsIncremental( + currentStats: StorageStats, + operation: StorageOperation, + files: FileWithUrl[] = [] +): StorageStats { + const filesSizeTotal = files.reduce((total, file) => total + file.size, 0); + + switch (operation) { + case 'add': + return { + ...currentStats, + used: currentStats.used + filesSizeTotal, + available: currentStats.available - filesSizeTotal, + fileCount: currentStats.fileCount + files.length + }; + + case 'remove': + return { + ...currentStats, + used: Math.max(0, currentStats.used - filesSizeTotal), + available: currentStats.available + filesSizeTotal, + fileCount: Math.max(0, currentStats.fileCount - files.length) + }; + + case 'clear': + return { + ...currentStats, + used: 0, + available: currentStats.quota || currentStats.available, + fileCount: 0 + }; + + default: + return currentStats; + } +} + +/** + * Check storage usage and return warning message if needed + */ +export function checkStorageWarnings(stats: StorageStats): string | null { + if (!stats.quota || stats.used === 0) return null; + + const usagePercent = (stats.used / stats.quota) * 100; + + if (usagePercent > 90) { + return 'Warning: Storage is nearly full (>90%). Browser may start clearing data.'; + } else if (usagePercent > 80) { + return 'Storage is getting full (>80%). Consider removing old files.'; + } + + return null; +} + +/** + * Calculate storage usage percentage + */ +export function getStorageUsagePercent(stats: StorageStats): number { + return stats.quota ? (stats.used / stats.quota) * 100 : 0; +} \ No newline at end of file diff --git a/frontend/src/utils/thumbnailUtils.ts b/frontend/src/utils/thumbnailUtils.ts new file mode 100644 index 000000000..1bc9bf069 --- /dev/null +++ b/frontend/src/utils/thumbnailUtils.ts @@ -0,0 +1,51 @@ +import { getDocument } from "pdfjs-dist"; + +/** + * Generate thumbnail for a PDF file during upload + * Returns base64 data URL or undefined if generation fails + */ +export async function generateThumbnailForFile(file: File): Promise { + // Skip thumbnail generation for large files to avoid memory issues + if (file.size >= 50 * 1024 * 1024) { // 50MB limit + console.log('Skipping thumbnail generation for large file:', file.name); + return undefined; + } + + try { + console.log('Generating thumbnail for', file.name); + + // Only read first 2MB for thumbnail generation to save memory + const chunkSize = 2 * 1024 * 1024; // 2MB + const chunk = file.slice(0, Math.min(chunkSize, file.size)); + const arrayBuffer = await chunk.arrayBuffer(); + + const pdf = await getDocument({ + data: arrayBuffer, + disableAutoFetch: true, + disableStream: true + }).promise; + + const page = await pdf.getPage(1); + const viewport = page.getViewport({ scale: 0.2 }); // Smaller scale for memory efficiency + const canvas = document.createElement("canvas"); + canvas.width = viewport.width; + canvas.height = viewport.height; + const context = canvas.getContext("2d"); + + if (!context) { + throw new Error('Could not get canvas context'); + } + + await page.render({ canvasContext: context, viewport }).promise; + const thumbnail = canvas.toDataURL(); + + // Immediately clean up memory after thumbnail generation + pdf.destroy(); + console.log('Thumbnail generated and PDF destroyed for', file.name); + + return thumbnail; + } catch (error) { + console.warn('Failed to generate thumbnail for', file.name, error); + return undefined; + } +} \ No newline at end of file diff --git a/src/main/resources/messages_en_GB.properties b/src/main/resources/messages_en_GB.properties index 8e7c58a32..48c6fe948 100644 --- a/src/main/resources/messages_en_GB.properties +++ b/src/main/resources/messages_en_GB.properties @@ -1453,6 +1453,12 @@ fileManager.dragDrop=Drag & Drop files here fileManager.clickToUpload=Click to upload files fileManager.selectedFiles=Selected Files fileManager.clearAll=Clear All +fileManager.storage=Storage +fileManager.filesStored=files stored +fileManager.storageError=Storage error occurred +fileManager.storageLow=Storage is running low. Consider removing old files. +fileManager.uploadError=Failed to upload some files. +fileManager.supportMessage=Powered by browser database storage for unlimited capacity # Page Editor pageEditor.title=Page Editor