Enable ESLint no-case-declarations rule

This commit is contained in:
James Brunton 2025-09-02 17:48:25 +01:00
parent 65e331ae26
commit 4c39979460
4 changed files with 21 additions and 17 deletions

View File

@ -19,7 +19,6 @@ export default defineConfig(
"no-empty-pattern": "off", // Temporarily disabled until codebase conformant "no-empty-pattern": "off", // Temporarily disabled until codebase conformant
"no-undef": "off", // Temporarily disabled until codebase conformant "no-undef": "off", // Temporarily disabled until codebase conformant
"no-useless-escape": "off", // Temporarily disabled until codebase conformant "no-useless-escape": "off", // Temporarily disabled until codebase conformant
"no-case-declarations": "off", // Temporarily disabled until codebase conformant
"prefer-const": "off", // Temporarily disabled until codebase conformant "prefer-const": "off", // Temporarily disabled until codebase conformant
"@typescript-eslint/ban-ts-comment": "off", // Temporarily disabled until codebase conformant "@typescript-eslint/ban-ts-comment": "off", // Temporarily disabled until codebase conformant
"@typescript-eslint/no-empty-object-type": "off", // Temporarily disabled until codebase conformant "@typescript-eslint/no-empty-object-type": "off", // Temporarily disabled until codebase conformant

View File

@ -174,7 +174,7 @@ export const useToolOperation = <TParams>(
let processedFiles: File[]; let processedFiles: File[];
switch (config.toolType) { switch (config.toolType) {
case ToolType.singleFile: case ToolType.singleFile: {
// Individual file processing - separate API call per file // Individual file processing - separate API call per file
const apiCallsConfig: ApiCallsConfig<TParams> = { const apiCallsConfig: ApiCallsConfig<TParams> = {
endpoint: config.endpoint, endpoint: config.endpoint,
@ -190,8 +190,9 @@ export const useToolOperation = <TParams>(
actions.setStatus actions.setStatus
); );
break; break;
}
case ToolType.multiFile: case ToolType.multiFile: {
// Multi-file processing - single API call with all files // Multi-file processing - single API call with all files
actions.setStatus('Processing files...'); actions.setStatus('Processing files...');
const formData = config.buildFormData(params, validFiles); const formData = config.buildFormData(params, validFiles);
@ -204,7 +205,7 @@ export const useToolOperation = <TParams>(
// Use custom responseHandler for multi-file (handles ZIP extraction) // Use custom responseHandler for multi-file (handles ZIP extraction)
processedFiles = await config.responseHandler(response.data, validFiles); processedFiles = await config.responseHandler(response.data, validFiles);
} else if (response.data.type === 'application/pdf' || } else if (response.data.type === 'application/pdf' ||
(response.headers && response.headers['content-type'] === 'application/pdf')) { (response.headers && response.headers['content-type'] === 'application/pdf')) {
// Single PDF response (e.g. split with merge option) - use original filename // Single PDF response (e.g. split with merge option) - use original filename
const originalFileName = validFiles[0]?.name || 'document.pdf'; const originalFileName = validFiles[0]?.name || 'document.pdf';
const singleFile = new File([response.data], originalFileName, { type: 'application/pdf' }); const singleFile = new File([response.data], originalFileName, { type: 'application/pdf' });
@ -219,6 +220,7 @@ export const useToolOperation = <TParams>(
} }
} }
break; break;
}
case ToolType.custom: case ToolType.custom:
actions.setStatus('Processing files...'); actions.setStatus('Processing files...');
@ -243,7 +245,7 @@ export const useToolOperation = <TParams>(
// Replace input files with processed files (consumeFiles handles pinning) // Replace input files with processed files (consumeFiles handles pinning)
const inputFileIds: FileId[] = []; const inputFileIds: FileId[] = [];
const inputFileRecords: FileRecord[] = []; const inputFileRecords: FileRecord[] = [];
// Build parallel arrays of IDs and records for undo tracking // Build parallel arrays of IDs and records for undo tracking
for (const file of validFiles) { for (const file of validFiles) {
const fileId = findFileId(file); const fileId = findFileId(file);
@ -259,9 +261,9 @@ export const useToolOperation = <TParams>(
console.warn(`No file ID found for file: ${file.name}`); console.warn(`No file ID found for file: ${file.name}`);
} }
} }
const outputFileIds = await consumeFiles(inputFileIds, processedFiles); const outputFileIds = await consumeFiles(inputFileIds, processedFiles);
// Store operation data for undo (only store what we need to avoid memory bloat) // Store operation data for undo (only store what we need to avoid memory bloat)
lastOperationRef.current = { lastOperationRef.current = {
inputFiles: validFiles, // Keep original File objects for undo inputFiles: validFiles, // Keep original File objects for undo
@ -326,17 +328,17 @@ export const useToolOperation = <TParams>(
try { try {
// Undo the consume operation // Undo the consume operation
await undoConsumeFiles(inputFiles, inputFileRecords, outputFileIds); await undoConsumeFiles(inputFiles, inputFileRecords, outputFileIds);
// Clear results and operation tracking // Clear results and operation tracking
resetResults(); resetResults();
lastOperationRef.current = null; lastOperationRef.current = null;
// Show success message // Show success message
actions.setStatus(t('undoSuccess', 'Operation undone successfully')); actions.setStatus(t('undoSuccess', 'Operation undone successfully'));
} catch (error: any) { } catch (error: any) {
let errorMessage = extractErrorMessage(error); let errorMessage = extractErrorMessage(error);
// Provide more specific error messages based on error type // Provide more specific error messages based on error type
if (error.message?.includes('Mismatch between input files')) { if (error.message?.includes('Mismatch between input files')) {
errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted'); errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted');
@ -345,9 +347,9 @@ export const useToolOperation = <TParams>(
} else if (error.name === 'QuotaExceededError') { } else if (error.name === 'QuotaExceededError') {
errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space'); errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space');
} }
actions.setError(`${t('undoFailed', 'Failed to undo operation')}: ${errorMessage}`); actions.setError(`${t('undoFailed', 'Failed to undo operation')}: ${errorMessage}`);
// Don't clear the operation data if undo failed - user might want to try again // Don't clear the operation data if undo failed - user might want to try again
} }
}, [undoConsumeFiles, resetResults, actions, t]); }, [undoConsumeFiles, resetResults, actions, t]);

View File

@ -182,7 +182,7 @@ export class EnhancedPDFProcessingService {
): Promise<ProcessedFile> { ): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer(); const arrayBuffer = await file.arrayBuffer();
const pdf = await pdfWorkerManager.createDocument(arrayBuffer); const pdf = await pdfWorkerManager.createDocument(arrayBuffer);
try { try {
const totalPages = pdf.numPages; const totalPages = pdf.numPages;
@ -459,11 +459,12 @@ export class EnhancedPDFProcessingService {
case 'failed': case 'failed':
this.metrics.failedFiles++; this.metrics.failedFiles++;
break; break;
case 'cacheHit': case 'cacheHit': {
// Update cache hit rate // Update cache hit rate
const totalAttempts = this.metrics.totalFiles + 1; const totalAttempts = this.metrics.totalFiles + 1;
this.metrics.cacheHitRate = (this.metrics.cacheHitRate * this.metrics.totalFiles + 1) / totalAttempts; this.metrics.cacheHitRate = (this.metrics.cacheHitRate * this.metrics.totalFiles + 1) / totalAttempts;
break; break;
}
} }
} }

View File

@ -148,15 +148,17 @@ export class FileAnalyzer {
case 'immediate_full': case 'immediate_full':
return pageCount * baseTime; return pageCount * baseTime;
case 'priority_pages': case 'priority_pages': {
// Estimate time for priority pages (first 10) // Estimate time for priority pages (first 10)
const priorityPages = Math.min(pageCount, 10); const priorityPages = Math.min(pageCount, 10);
return priorityPages * baseTime; return priorityPages * baseTime;
}
case 'progressive_chunked': case 'progressive_chunked': {
// Estimate time for first chunk (20 pages) // Estimate time for first chunk (20 pages)
const firstChunk = Math.min(pageCount, 20); const firstChunk = Math.min(pageCount, 20);
return firstChunk * baseTime; return firstChunk * baseTime;
}
default: default:
return pageCount * baseTime; return pageCount * baseTime;