Enforce type checking in CI (#4126)

# Description of Changes
Currently, the `tsconfig.json` file enforces strict type checking, but
nothing in CI checks that the code is actually correctly typed. [Vite
only transpiles TypeScript
code](https://vite.dev/guide/features.html#transpile-only) so doesn't
ensure that the TS code we're running is correct.

This PR adds running of the type checker to CI and fixes the type errors
that have already crept into the codebase.

Note that many of the changes I've made to 'fix the types' are just
using `any` to disable the type checker because the code is under too
much churn to fix anything properly at the moment. I still think
enabling the type checker now is the best course of action though
because otherwise we'll never be able to fix all of them, and it should
at least help us not break things when adding new code.

Co-authored-by: James <james@crosscourtanalytics.com>
This commit is contained in:
James Brunton
2025-08-11 09:16:16 +01:00
committed by GitHub
parent 507ad1dc61
commit af5a9d1ae1
52 changed files with 1141 additions and 919 deletions

View File

@@ -45,32 +45,32 @@ export class EnhancedPDFProcessingService {
*/
async processFile(file: File, customConfig?: Partial<ProcessingConfig>): Promise<ProcessedFile | null> {
const fileKey = await this.generateFileKey(file);
// Check cache first
const cached = this.cache.get(fileKey);
if (cached) {
this.updateMetrics('cacheHit');
return cached;
}
// Check if already processing
if (this.processing.has(fileKey)) {
return null;
}
// Analyze file to determine optimal strategy
const analysis = await FileAnalyzer.analyzeFile(file);
if (analysis.isCorrupted) {
throw new Error(`File ${file.name} appears to be corrupted`);
}
// Create processing config
const config: ProcessingConfig = {
...this.defaultConfig,
strategy: analysis.recommendedStrategy,
...customConfig
};
// Start processing
this.startProcessing(file, fileKey, config, analysis.estimatedProcessingTime);
return null;
@@ -80,14 +80,14 @@ export class EnhancedPDFProcessingService {
* Start processing a file with the specified configuration
*/
private async startProcessing(
file: File,
fileKey: string,
file: File,
fileKey: string,
config: ProcessingConfig,
estimatedTime: number
): Promise<void> {
// Create cancellation token
const cancellationToken = new AbortController();
// Set initial state
const state: ProcessingState = {
fileKey,
@@ -99,7 +99,7 @@ export class EnhancedPDFProcessingService {
estimatedTimeRemaining: estimatedTime,
cancellationToken
};
this.processing.set(fileKey, state);
this.notifyListeners();
this.updateMetrics('started');
@@ -117,14 +117,14 @@ export class EnhancedPDFProcessingService {
// Cache the result
this.cache.set(fileKey, processedFile);
// Update state to completed
state.status = 'completed';
state.progress = 100;
state.completedAt = Date.now();
this.notifyListeners();
this.updateMetrics('completed', Date.now() - state.startedAt);
// Remove from processing map after brief delay
setTimeout(() => {
this.processing.delete(fileKey);
@@ -133,13 +133,13 @@ export class EnhancedPDFProcessingService {
} catch (error) {
console.error('Processing failed for', file.name, ':', error);
const processingError = ProcessingErrorHandler.createProcessingError(error);
state.status = 'error';
state.error = processingError;
this.notifyListeners();
this.updateMetrics('failed');
// Remove failed processing after delay
setTimeout(() => {
this.processing.delete(fileKey);
@@ -152,23 +152,23 @@ export class EnhancedPDFProcessingService {
* Execute the actual processing based on strategy
*/
private async executeProcessingStrategy(
file: File,
config: ProcessingConfig,
file: File,
config: ProcessingConfig,
state: ProcessingState
): Promise<ProcessedFile> {
switch (config.strategy) {
case 'immediate_full':
return this.processImmediateFull(file, config, state);
case 'priority_pages':
return this.processPriorityPages(file, config, state);
case 'progressive_chunked':
return this.processProgressiveChunked(file, config, state);
case 'metadata_only':
return this.processMetadataOnly(file, config, state);
default:
return this.processImmediateFull(file, config, state);
}
@@ -178,29 +178,29 @@ export class EnhancedPDFProcessingService {
* Process all pages immediately (for small files)
*/
private async processImmediateFull(
file: File,
config: ProcessingConfig,
file: File,
config: ProcessingConfig,
state: ProcessingState
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({ data: arrayBuffer }).promise;
const totalPages = pdf.numPages;
state.progress = 10;
this.notifyListeners();
const pages: PDFPage[] = [];
for (let i = 1; i <= totalPages; i++) {
// Check for cancellation
if (state.cancellationToken?.signal.aborted) {
pdf.destroy();
throw new Error('Processing cancelled');
}
const page = await pdf.getPage(i);
const thumbnail = await this.renderPageThumbnail(page, config.thumbnailQuality);
pages.push({
id: `${file.name}-page-${i}`,
pageNumber: i,
@@ -208,17 +208,17 @@ export class EnhancedPDFProcessingService {
rotation: 0,
selected: false
});
// Update progress
state.progress = 10 + (i / totalPages) * 85;
state.currentPage = i;
this.notifyListeners();
}
pdf.destroy();
state.progress = 100;
this.notifyListeners();
return this.createProcessedFile(file, pages, totalPages);
}
@@ -226,30 +226,30 @@ export class EnhancedPDFProcessingService {
* Process priority pages first, then queue the rest
*/
private async processPriorityPages(
file: File,
config: ProcessingConfig,
file: File,
config: ProcessingConfig,
state: ProcessingState
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({ data: arrayBuffer }).promise;
const totalPages = pdf.numPages;
state.progress = 10;
this.notifyListeners();
const pages: PDFPage[] = [];
const priorityCount = Math.min(config.priorityPageCount, totalPages);
// Process priority pages first
for (let i = 1; i <= priorityCount; i++) {
if (state.cancellationToken?.signal.aborted) {
pdf.destroy();
throw new Error('Processing cancelled');
}
const page = await pdf.getPage(i);
const thumbnail = await this.renderPageThumbnail(page, config.thumbnailQuality);
pages.push({
id: `${file.name}-page-${i}`,
pageNumber: i,
@@ -257,12 +257,12 @@ export class EnhancedPDFProcessingService {
rotation: 0,
selected: false
});
state.progress = 10 + (i / priorityCount) * 60;
state.currentPage = i;
this.notifyListeners();
}
// Create placeholder pages for remaining pages
for (let i = priorityCount + 1; i <= totalPages; i++) {
pages.push({
@@ -273,11 +273,11 @@ export class EnhancedPDFProcessingService {
selected: false
});
}
pdf.destroy();
state.progress = 100;
this.notifyListeners();
return this.createProcessedFile(file, pages, totalPages);
}
@@ -285,33 +285,33 @@ export class EnhancedPDFProcessingService {
* Process in chunks with breaks between chunks
*/
private async processProgressiveChunked(
file: File,
config: ProcessingConfig,
file: File,
config: ProcessingConfig,
state: ProcessingState
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({ data: arrayBuffer }).promise;
const totalPages = pdf.numPages;
state.progress = 10;
this.notifyListeners();
const pages: PDFPage[] = [];
const chunkSize = config.chunkSize;
let processedPages = 0;
// Process first chunk immediately
const firstChunkEnd = Math.min(chunkSize, totalPages);
for (let i = 1; i <= firstChunkEnd; i++) {
if (state.cancellationToken?.signal.aborted) {
pdf.destroy();
throw new Error('Processing cancelled');
}
const page = await pdf.getPage(i);
const thumbnail = await this.renderPageThumbnail(page, config.thumbnailQuality);
pages.push({
id: `${file.name}-page-${i}`,
pageNumber: i,
@@ -319,18 +319,18 @@ export class EnhancedPDFProcessingService {
rotation: 0,
selected: false
});
processedPages++;
state.progress = 10 + (processedPages / totalPages) * 70;
state.currentPage = i;
this.notifyListeners();
// Small delay to prevent UI blocking
if (i % 5 === 0) {
await new Promise(resolve => setTimeout(resolve, 10));
}
}
// Create placeholders for remaining pages
for (let i = firstChunkEnd + 1; i <= totalPages; i++) {
pages.push({
@@ -341,11 +341,11 @@ export class EnhancedPDFProcessingService {
selected: false
});
}
pdf.destroy();
state.progress = 100;
this.notifyListeners();
return this.createProcessedFile(file, pages, totalPages);
}
@@ -353,17 +353,17 @@ export class EnhancedPDFProcessingService {
* Process metadata only (for very large files)
*/
private async processMetadataOnly(
file: File,
config: ProcessingConfig,
file: File,
config: ProcessingConfig,
state: ProcessingState
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({ data: arrayBuffer }).promise;
const totalPages = pdf.numPages;
state.progress = 50;
this.notifyListeners();
// Create placeholder pages without thumbnails
const pages: PDFPage[] = [];
for (let i = 1; i <= totalPages; i++) {
@@ -375,11 +375,11 @@ export class EnhancedPDFProcessingService {
selected: false
});
}
pdf.destroy();
state.progress = 100;
this.notifyListeners();
return this.createProcessedFile(file, pages, totalPages);
}
@@ -389,17 +389,17 @@ export class EnhancedPDFProcessingService {
private async renderPageThumbnail(page: any, quality: 'low' | 'medium' | 'high'): Promise<string> {
const scales = { low: 0.2, medium: 0.5, high: 0.8 }; // Reduced low quality for page editor
const scale = scales[quality];
const viewport = page.getViewport({ scale });
const canvas = document.createElement('canvas');
canvas.width = viewport.width;
canvas.height = viewport.height;
const context = canvas.getContext('2d');
if (!context) {
throw new Error('Could not get canvas context');
}
await page.render({ canvasContext: context, viewport }).promise;
return canvas.toDataURL('image/jpeg', 0.8); // Use JPEG for better compression
}
@@ -513,14 +513,15 @@ export class EnhancedPDFProcessingService {
state.cancellationToken.abort();
}
});
// Clear processing states
this.processing.clear();
this.notifyListeners();
// Force memory cleanup hint
if (typeof window !== 'undefined' && window.gc) {
setTimeout(() => window.gc(), 100);
let gc = window.gc;
setTimeout(() => gc(), 100);
}
}
@@ -542,4 +543,4 @@ export class EnhancedPDFProcessingService {
}
// Export singleton instance
export const enhancedPDFProcessingService = EnhancedPDFProcessingService.getInstance();
export const enhancedPDFProcessingService = EnhancedPDFProcessingService.getInstance();

View File

@@ -36,11 +36,11 @@ export class FileAnalyzer {
// Determine strategy based on file characteristics
analysis.recommendedStrategy = this.determineStrategy(file.size, quickAnalysis.pageCount);
// Estimate processing time
analysis.estimatedProcessingTime = this.estimateProcessingTime(
file.size,
quickAnalysis.pageCount,
file.size,
quickAnalysis.pageCount,
analysis.recommendedStrategy
);
@@ -66,15 +66,15 @@ export class FileAnalyzer {
// For large files, try the whole file first (PDF.js needs the complete structure)
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({
const pdf = await getDocument({
data: arrayBuffer,
stopAtErrors: false, // Don't stop at minor errors
verbosity: 0 // Suppress PDF.js warnings
}).promise;
const pageCount = pdf.numPages;
const isEncrypted = pdf.isEncrypted;
const isEncrypted = (pdf as any).isEncrypted;
// Clean up
pdf.destroy();
@@ -88,7 +88,7 @@ export class FileAnalyzer {
// Try to determine if it's corruption vs encryption
const errorMessage = error instanceof Error ? error.message.toLowerCase() : '';
const isEncrypted = errorMessage.includes('password') || errorMessage.includes('encrypted');
return {
pageCount: 0,
isEncrypted,
@@ -129,8 +129,8 @@ export class FileAnalyzer {
* Estimate processing time based on file characteristics and strategy
*/
private static estimateProcessingTime(
fileSize: number,
pageCount: number = 0,
fileSize: number,
pageCount: number = 0,
strategy: ProcessingStrategy
): number {
const baseTimes = {
@@ -145,20 +145,20 @@ export class FileAnalyzer {
switch (strategy) {
case 'metadata_only':
return baseTime;
case 'immediate_full':
return pageCount * baseTime;
case 'priority_pages':
// Estimate time for priority pages (first 10)
const priorityPages = Math.min(pageCount, 10);
return priorityPages * baseTime;
case 'progressive_chunked':
// Estimate time for first chunk (20 pages)
const firstChunk = Math.min(pageCount, 20);
return firstChunk * baseTime;
default:
return pageCount * baseTime;
}
@@ -209,11 +209,11 @@ export class FileAnalyzer {
if (totalSize > this.SIZE_THRESHOLDS.LARGE) {
return Math.max(1, Math.floor(fileCount / 4));
}
if (totalSize > this.SIZE_THRESHOLDS.MEDIUM) {
return Math.max(2, Math.floor(fileCount / 2));
}
// Process all at once for smaller total sizes
return fileCount;
}
@@ -231,10 +231,10 @@ export class FileAnalyzer {
const header = file.slice(0, 8);
const headerBytes = new Uint8Array(await header.arrayBuffer());
const headerString = String.fromCharCode(...headerBytes);
return headerString.startsWith('%PDF-');
} catch (error) {
return false;
}
}
}
}

View File

@@ -35,11 +35,11 @@ class FileStorageService {
if (this.db) {
return Promise.resolve();
}
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = new Promise((resolve, reject) => {
const request = indexedDB.open(this.dbName, this.dbVersion);
@@ -47,7 +47,7 @@ class FileStorageService {
this.initPromise = null;
reject(request.error);
};
request.onsuccess = () => {
this.db = request.result;
console.log('IndexedDB connection established');
@@ -57,9 +57,9 @@ class FileStorageService {
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
const oldVersion = (event as any).oldVersion;
console.log('IndexedDB upgrade needed from version', oldVersion, 'to', this.dbVersion);
// Only recreate object store if it doesn't exist or if upgrading from version < 2
if (!db.objectStoreNames.contains(this.storeName)) {
const store = db.createObjectStore(this.storeName, { keyPath: 'id' });
@@ -76,7 +76,7 @@ class FileStorageService {
}
};
});
return this.initPromise;
}
@@ -88,7 +88,7 @@ class FileStorageService {
const id = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const arrayBuffer = await file.arrayBuffer();
const storedFile: StoredFile = {
id,
name: file.name,
@@ -103,16 +103,16 @@ class FileStorageService {
try {
const transaction = this.db!.transaction([this.storeName], 'readwrite');
const store = transaction.objectStore(this.storeName);
// Debug logging
console.log('Object store keyPath:', store.keyPath);
console.log('Storing file:', {
id: storedFile.id,
name: storedFile.name,
console.log('Storing file:', {
id: storedFile.id,
name: storedFile.name,
hasData: !!storedFile.data,
dataSize: storedFile.data.byteLength
dataSize: storedFile.data.byteLength
});
const request = store.add(storedFile);
request.onerror = () => {
@@ -161,10 +161,10 @@ class FileStorageService {
request.onerror = () => reject(request.error);
request.onsuccess = () => {
// Filter out null/corrupted entries
const files = request.result.filter(file =>
file &&
file.data &&
file.name &&
const files = request.result.filter(file =>
file &&
file.data &&
file.name &&
typeof file.size === 'number'
);
resolve(files);
@@ -277,7 +277,7 @@ class FileStorageService {
let available = 0;
let quota: number | undefined;
let fileCount = 0;
try {
// Get browser quota for context
if ('storage' in navigator && 'estimate' in navigator.storage) {
@@ -285,17 +285,17 @@ class FileStorageService {
quota = estimate.quota;
available = estimate.quota || 0;
}
// Calculate our actual IndexedDB usage from file metadata
const files = await this.getAllFileMetadata();
used = files.reduce((total, file) => total + (file?.size || 0), 0);
fileCount = files.length;
// Adjust available space
if (quota) {
available = quota - used;
}
} catch (error) {
console.warn('Could not get storage stats:', error);
// If we can't read metadata, database might be purged
@@ -332,12 +332,12 @@ class FileStorageService {
*/
async debugAllDatabases(): Promise<void> {
console.log('=== Checking All IndexedDB Databases ===');
if ('databases' in indexedDB) {
try {
const databases = await indexedDB.databases();
console.log('Found databases:', databases);
for (const dbInfo of databases) {
if (dbInfo.name?.includes('stirling') || dbInfo.name?.includes('pdf')) {
console.log(`Checking database: ${dbInfo.name} (version: ${dbInfo.version})`);
@@ -347,7 +347,7 @@ class FileStorageService {
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
});
console.log(`Database ${dbInfo.name} object stores:`, Array.from(db.objectStoreNames));
db.close();
} catch (error) {
@@ -361,7 +361,7 @@ class FileStorageService {
} else {
console.log('indexedDB.databases() not supported');
}
// Also check our specific database with different versions
for (let version = 1; version <= 3; version++) {
try {
@@ -375,9 +375,9 @@ class FileStorageService {
request.transaction?.abort();
};
});
console.log(`Version ${version} object stores:`, Array.from(db.objectStoreNames));
if (db.objectStoreNames.contains('files')) {
const transaction = db.transaction(['files'], 'readonly');
const store = transaction.objectStore('files');
@@ -386,10 +386,12 @@ class FileStorageService {
console.log(`Version ${version} files store has ${countRequest.result} entries`);
};
}
db.close();
} catch (error) {
console.log(`Version ${version} not accessible:`, error.message);
if (error instanceof Error) {
console.log(`Version ${version} not accessible:`, error.message);
}
}
}
}
@@ -403,7 +405,7 @@ class FileStorageService {
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([this.storeName], 'readonly');
const store = transaction.objectStore(this.storeName);
// First try getAll to see if there's anything
const getAllRequest = store.getAll();
getAllRequest.onsuccess = () => {
@@ -422,7 +424,7 @@ class FileStorageService {
});
});
};
// Then try cursor
const cursorRequest = store.openCursor();
console.log('=== IndexedDB Cursor Debug ===');
@@ -432,7 +434,7 @@ class FileStorageService {
console.error('Cursor error:', cursorRequest.error);
reject(cursorRequest.error);
};
cursorRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result;
if (cursor) {
@@ -464,21 +466,21 @@ class FileStorageService {
if (!storedFile || !storedFile.data) {
throw new Error('Invalid stored file: missing data');
}
if (!storedFile.name || typeof storedFile.size !== 'number') {
throw new Error('Invalid stored file: missing metadata');
}
const blob = new Blob([storedFile.data], { type: storedFile.type });
const file = new File([blob], storedFile.name, {
type: storedFile.type,
lastModified: storedFile.lastModified
});
// Add custom properties for compatibility
Object.defineProperty(file, 'id', { value: storedFile.id, writable: false });
Object.defineProperty(file, 'thumbnail', { value: storedFile.thumbnail, writable: false });
return file;
}
@@ -509,15 +511,15 @@ class FileStorageService {
async createTemporaryBlobUrl(id: string): Promise<string | null> {
const data = await this.getFileData(id);
if (!data) return null;
const blob = new Blob([data], { type: 'application/pdf' });
const url = URL.createObjectURL(blob);
// Auto-revoke after a short delay to free memory
setTimeout(() => {
URL.revokeObjectURL(url);
}, 10000); // 10 seconds
return url;
}
@@ -538,7 +540,7 @@ class FileStorageService {
if (storedFile) {
storedFile.thumbnail = thumbnail;
const updateRequest = store.put(storedFile);
updateRequest.onsuccess = () => {
console.log('Thumbnail updated for file:', id);
resolve(true);
@@ -569,7 +571,7 @@ class FileStorageService {
async isStorageLow(): Promise<boolean> {
const stats = await this.getStorageStats();
if (!stats.quota) return false;
const usagePercent = stats.used / stats.quota;
return usagePercent > 0.8; // Consider low if over 80% used
}
@@ -579,12 +581,12 @@ class FileStorageService {
*/
async cleanupOldFiles(maxFiles: number = 50): Promise<void> {
const files = await this.getAllFileMetadata();
if (files.length <= maxFiles) return;
// Sort by last modified (oldest first)
files.sort((a, b) => a.lastModified - b.lastModified);
// Delete oldest files
const filesToDelete = files.slice(0, files.length - maxFiles);
for (const file of filesToDelete) {
@@ -599,4 +601,4 @@ export const fileStorage = new FileStorageService();
// Helper hook for React components
export function useFileStorage() {
return fileStorage;
}
}

View File

@@ -22,20 +22,20 @@ export class PDFProcessingService {
async getProcessedFile(file: File): Promise<ProcessedFile | null> {
const fileKey = this.generateFileKey(file);
// Check cache first
const cached = this.cache.get(fileKey);
if (cached) {
console.log('Cache hit for:', file.name);
return cached;
}
// Check if already processing
if (this.processing.has(fileKey)) {
console.log('Already processing:', file.name);
return null; // Will be available when processing completes
}
// Start processing
this.startProcessing(file, fileKey);
return null;
@@ -48,9 +48,10 @@ export class PDFProcessingService {
fileName: file.name,
status: 'processing',
progress: 0,
startedAt: Date.now()
startedAt: Date.now(),
strategy: 'immediate_full'
};
this.processing.set(fileKey, state);
this.notifyListeners();
@@ -63,13 +64,13 @@ export class PDFProcessingService {
// Cache the result
this.cache.set(fileKey, processedFile);
// Update state to completed
state.status = 'completed';
state.progress = 100;
state.completedAt = Date.now();
this.notifyListeners();
// Remove from processing map after brief delay
setTimeout(() => {
this.processing.delete(fileKey);
@@ -79,9 +80,9 @@ export class PDFProcessingService {
} catch (error) {
console.error('Processing failed for', file.name, ':', error);
state.status = 'error';
state.error = error instanceof Error ? error.message : 'Unknown error';
state.error = (error instanceof Error ? error.message : 'Unknown error') as any;
this.notifyListeners();
// Remove failed processing after delay
setTimeout(() => {
this.processing.delete(fileKey);
@@ -91,29 +92,29 @@ export class PDFProcessingService {
}
private async processFileWithProgress(
file: File,
file: File,
onProgress: (progress: number) => void
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await getDocument({ data: arrayBuffer }).promise;
const totalPages = pdf.numPages;
onProgress(10); // PDF loaded
const pages: PDFPage[] = [];
for (let i = 1; i <= totalPages; i++) {
const page = await pdf.getPage(i);
const viewport = page.getViewport({ scale: 0.5 });
const canvas = document.createElement('canvas');
canvas.width = viewport.width;
canvas.height = viewport.height;
const context = canvas.getContext('2d');
if (context) {
await page.render({ canvasContext: context, viewport }).promise;
const thumbnail = canvas.toDataURL();
pages.push({
id: `${file.name}-page-${i}`,
pageNumber: i,
@@ -122,15 +123,15 @@ export class PDFProcessingService {
selected: false
});
}
// Update progress
const progress = 10 + (i / totalPages) * 85; // 10-95%
onProgress(progress);
}
pdf.destroy();
onProgress(100);
return {
id: `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
pages,
@@ -185,4 +186,4 @@ export class PDFProcessingService {
}
// Export singleton instance
export const pdfProcessingService = PDFProcessingService.getInstance();
export const pdfProcessingService = PDFProcessingService.getInstance();

View File

@@ -1,4 +1,17 @@
import JSZip from 'jszip';
import JSZip, { JSZipObject } from 'jszip';
// Undocumented interface in JSZip for JSZipObject._data
interface CompressedObject {
compressedSize: number;
uncompressedSize: number;
crc32: number;
compression: object;
compressedContent: string|ArrayBuffer|Uint8Array|Buffer;
}
const getData = (zipEntry: JSZipObject): CompressedObject | undefined => {
return (zipEntry as any)._data as CompressedObject;
}
export interface ZipExtractionResult {
success: boolean;
@@ -68,7 +81,7 @@ export class ZipFileService {
}
fileCount++;
const uncompressedSize = zipEntry._data?.uncompressedSize || 0;
const uncompressedSize = getData(zipEntry)?.uncompressedSize || 0;
totalSize += uncompressedSize;
// Check if file is a PDF
@@ -109,25 +122,25 @@ export class ZipFileService {
async createZipFromFiles(files: File[], zipFilename: string): Promise<{ zipFile: File; size: number }> {
try {
const zip = new JSZip();
// Add each file to the ZIP
for (const file of files) {
const content = await file.arrayBuffer();
zip.file(file.name, content);
}
// Generate ZIP blob
const zipBlob = await zip.generateAsync({
const zipBlob = await zip.generateAsync({
type: 'blob',
compression: 'DEFLATE',
compressionOptions: { level: 6 }
});
const zipFile = new File([zipBlob], zipFilename, {
const zipFile = new File([zipBlob], zipFilename, {
type: 'application/zip',
lastModified: Date.now()
});
return { zipFile, size: zipFile.size };
} catch (error) {
throw new Error(`Failed to create ZIP file: ${error instanceof Error ? error.message : 'Unknown error'}`);
@@ -162,7 +175,7 @@ export class ZipFileService {
const zipContents = await zip.loadAsync(file);
// Get all PDF files
const pdfFiles = Object.entries(zipContents.files).filter(([filename, zipEntry]) =>
const pdfFiles = Object.entries(zipContents.files).filter(([filename, zipEntry]) =>
!zipEntry.dir && this.isPdfFile(filename)
);
@@ -171,7 +184,7 @@ export class ZipFileService {
// Extract each PDF file
for (let i = 0; i < pdfFiles.length; i++) {
const [filename, zipEntry] = pdfFiles[i];
try {
// Report progress
if (onProgress) {
@@ -185,9 +198,9 @@ export class ZipFileService {
// Extract file content
const content = await zipEntry.async('uint8array');
// Create File object
const extractedFile = new File([content], this.sanitizeFilename(filename), {
const extractedFile = new File([content as any], this.sanitizeFilename(filename), {
type: 'application/pdf',
lastModified: zipEntry.date?.getTime() || Date.now()
});
@@ -235,7 +248,7 @@ export class ZipFileService {
const validExtensions = ['.zip'];
const hasValidType = validTypes.includes(file.type);
const hasValidExtension = validExtensions.some(ext =>
const hasValidExtension = validExtensions.some(ext =>
file.name.toLowerCase().endsWith(ext)
);
@@ -257,7 +270,7 @@ export class ZipFileService {
// Read first few bytes to check PDF header
const buffer = await file.slice(0, 8).arrayBuffer();
const bytes = new Uint8Array(buffer);
// Check for PDF header: %PDF-
return bytes[0] === 0x25 && // %
bytes[1] === 0x50 && // P
@@ -275,7 +288,7 @@ export class ZipFileService {
private sanitizeFilename(filename: string): string {
// Remove directory path and get just the filename
const basename = filename.split('/').pop() || filename;
// Remove or replace unsafe characters
return basename
.replace(/[<>:"/\\|?*]/g, '_') // Replace unsafe chars with underscore
@@ -309,15 +322,15 @@ export class ZipFileService {
try {
const zip = new JSZip();
await zip.loadAsync(file);
// Check if any files are encrypted
for (const [filename, zipEntry] of Object.entries(zip.files)) {
if (zipEntry.options?.compression === 'STORE' && zipEntry._data?.compressedSize === 0) {
if (zipEntry.options?.compression === 'STORE' && getData(zipEntry)?.compressedSize === 0) {
// This might indicate encryption, but JSZip doesn't provide direct encryption detection
// We'll handle this in the extraction phase
}
}
return false; // JSZip will throw an error if password is required
} catch (error) {
// If we can't load the ZIP, it might be password protected
@@ -328,4 +341,4 @@ export class ZipFileService {
}
// Export singleton instance
export const zipFileService = new ZipFileService();
export const zipFileService = new ZipFileService();