diff --git a/frontend/eslint.config.mjs b/frontend/eslint.config.mjs index 1405d64c2..7d0e65171 100644 --- a/frontend/eslint.config.mjs +++ b/frontend/eslint.config.mjs @@ -16,7 +16,6 @@ export default defineConfig( { rules: { "no-undef": "off", // Temporarily disabled until codebase conformant - "no-case-declarations": "off", // Temporarily disabled until codebase conformant "@typescript-eslint/ban-ts-comment": "off", // Temporarily disabled until codebase conformant "@typescript-eslint/no-empty-object-type": "off", // Temporarily disabled until codebase conformant "@typescript-eslint/no-explicit-any": "off", // Temporarily disabled until codebase conformant diff --git a/frontend/src/hooks/tools/shared/useToolOperation.ts b/frontend/src/hooks/tools/shared/useToolOperation.ts index d8d35176d..3005d5c35 100644 --- a/frontend/src/hooks/tools/shared/useToolOperation.ts +++ b/frontend/src/hooks/tools/shared/useToolOperation.ts @@ -174,7 +174,7 @@ export const useToolOperation = ( let processedFiles: File[]; switch (config.toolType) { - case ToolType.singleFile: + case ToolType.singleFile: { // Individual file processing - separate API call per file const apiCallsConfig: ApiCallsConfig = { endpoint: config.endpoint, @@ -190,8 +190,9 @@ export const useToolOperation = ( actions.setStatus ); break; + } - case ToolType.multiFile: + case ToolType.multiFile: { // Multi-file processing - single API call with all files actions.setStatus('Processing files...'); const formData = config.buildFormData(params, validFiles); @@ -204,7 +205,7 @@ export const useToolOperation = ( // Use custom responseHandler for multi-file (handles ZIP extraction) processedFiles = await config.responseHandler(response.data, validFiles); } else if (response.data.type === 'application/pdf' || - (response.headers && response.headers['content-type'] === 'application/pdf')) { + (response.headers && response.headers['content-type'] === 'application/pdf')) { // Single PDF response (e.g. split with merge option) - use original filename const originalFileName = validFiles[0]?.name || 'document.pdf'; const singleFile = new File([response.data], originalFileName, { type: 'application/pdf' }); @@ -219,6 +220,7 @@ export const useToolOperation = ( } } break; + } case ToolType.custom: actions.setStatus('Processing files...'); @@ -243,7 +245,7 @@ export const useToolOperation = ( // Replace input files with processed files (consumeFiles handles pinning) const inputFileIds: FileId[] = []; const inputFileRecords: FileRecord[] = []; - + // Build parallel arrays of IDs and records for undo tracking for (const file of validFiles) { const fileId = findFileId(file); @@ -259,9 +261,9 @@ export const useToolOperation = ( console.warn(`No file ID found for file: ${file.name}`); } } - + const outputFileIds = await consumeFiles(inputFileIds, processedFiles); - + // Store operation data for undo (only store what we need to avoid memory bloat) lastOperationRef.current = { inputFiles: validFiles, // Keep original File objects for undo @@ -326,17 +328,17 @@ export const useToolOperation = ( try { // Undo the consume operation await undoConsumeFiles(inputFiles, inputFileRecords, outputFileIds); - + // Clear results and operation tracking resetResults(); lastOperationRef.current = null; - + // Show success message actions.setStatus(t('undoSuccess', 'Operation undone successfully')); - + } catch (error: any) { let errorMessage = extractErrorMessage(error); - + // Provide more specific error messages based on error type if (error.message?.includes('Mismatch between input files')) { errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted'); @@ -345,9 +347,9 @@ export const useToolOperation = ( } else if (error.name === 'QuotaExceededError') { errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space'); } - + actions.setError(`${t('undoFailed', 'Failed to undo operation')}: ${errorMessage}`); - + // Don't clear the operation data if undo failed - user might want to try again } }, [undoConsumeFiles, resetResults, actions, t]); diff --git a/frontend/src/services/enhancedPDFProcessingService.ts b/frontend/src/services/enhancedPDFProcessingService.ts index 65d0c6519..11d534a4f 100644 --- a/frontend/src/services/enhancedPDFProcessingService.ts +++ b/frontend/src/services/enhancedPDFProcessingService.ts @@ -459,11 +459,12 @@ export class EnhancedPDFProcessingService { case 'failed': this.metrics.failedFiles++; break; - case 'cacheHit': + case 'cacheHit': { // Update cache hit rate const totalAttempts = this.metrics.totalFiles + 1; this.metrics.cacheHitRate = (this.metrics.cacheHitRate * this.metrics.totalFiles + 1) / totalAttempts; break; + } } } diff --git a/frontend/src/services/fileAnalyzer.ts b/frontend/src/services/fileAnalyzer.ts index 537692600..1705bb54e 100644 --- a/frontend/src/services/fileAnalyzer.ts +++ b/frontend/src/services/fileAnalyzer.ts @@ -148,15 +148,17 @@ export class FileAnalyzer { case 'immediate_full': return pageCount * baseTime; - case 'priority_pages': + case 'priority_pages': { // Estimate time for priority pages (first 10) const priorityPages = Math.min(pageCount, 10); return priorityPages * baseTime; + } - case 'progressive_chunked': + case 'progressive_chunked': { // Estimate time for first chunk (20 pages) const firstChunk = Math.min(pageCount, 20); return firstChunk * baseTime; + } default: return pageCount * baseTime;