diff --git a/frontend/src/core/hooks/tools/convert/useConvertOperation.ts b/frontend/src/core/hooks/tools/convert/useConvertOperation.ts index 24b4b5fbb..e16fd78d4 100644 --- a/frontend/src/core/hooks/tools/convert/useConvertOperation.ts +++ b/frontend/src/core/hooks/tools/convert/useConvertOperation.ts @@ -172,13 +172,22 @@ export const useConvertOperation = () => { return useToolOperation({ ...convertOperationConfig, customProcessor: customConvertProcessor, // Use instance-specific processor for translation support - getErrorMessage: (error) => { - if (error.response?.data && typeof error.response.data === 'string') { - return error.response.data; + getErrorMessage: (error: unknown) => { + const response = (typeof error === 'object' && error !== null && 'response' in error) + ? (error as { response?: { data?: unknown } }).response + : undefined; + if (typeof response?.data === 'string') { + return response.data; } - if (error.message) { + if (error instanceof Error && error.message) { return error.message; } + if (typeof error === 'object' && error !== null && 'message' in error) { + const potentialMessage = (error as { message?: unknown }).message; + if (typeof potentialMessage === 'string') { + return potentialMessage; + } + } return t("convert.errorConversion", "An error occurred while converting the file."); }, }); diff --git a/frontend/src/core/hooks/tools/ocr/useOCROperation.ts b/frontend/src/core/hooks/tools/ocr/useOCROperation.ts index 6e86a083e..3c47af293 100644 --- a/frontend/src/core/hooks/tools/ocr/useOCROperation.ts +++ b/frontend/src/core/hooks/tools/ocr/useOCROperation.ts @@ -115,10 +115,19 @@ export const useOCROperation = () => { const ocrConfig: ToolOperationConfig = { ...ocrOperationConfig, responseHandler, - getErrorMessage: (error) => - error.message?.includes('OCR tools') && error.message?.includes('not installed') - ? 'OCR tools (OCRmyPDF or Tesseract) are not installed on the server. Use the standard or fat Docker image instead of ultra-lite, or install OCR tools manually.' - : createStandardErrorHandler(t('ocr.error.failed', 'OCR operation failed'))(error), + getErrorMessage: (error: unknown) => { + const message = error instanceof Error + ? error.message + : (typeof error === 'object' && error !== null && 'message' in error && typeof (error as { message?: unknown }).message === 'string' + ? (error as { message: string }).message + : undefined); + + if (message && message.includes('OCR tools') && message.includes('not installed')) { + return 'OCR tools (OCRmyPDF or Tesseract) are not installed on the server. Use the standard or fat Docker image instead of ultra-lite, or install OCR tools manually.'; + } + + return createStandardErrorHandler(t('ocr.error.failed', 'OCR operation failed'))(error); + }, }; return useToolOperation(ocrConfig); diff --git a/frontend/src/core/hooks/tools/shared/useToolApiCalls.ts b/frontend/src/core/hooks/tools/shared/useToolApiCalls.ts index a8e6a88a0..ad1ba95fc 100644 --- a/frontend/src/core/hooks/tools/shared/useToolApiCalls.ts +++ b/frontend/src/core/hooks/tools/shared/useToolApiCalls.ts @@ -4,6 +4,7 @@ import apiClient from '@app/services/apiClient'; // Our configured instance import { processResponse, ResponseHandler } from '@app/utils/toolResponseProcessor'; import { isEmptyOutput } from '@app/services/errorUtils'; import type { ProcessingProgress } from '@app/hooks/tools/shared/useToolState'; +import { isStirlingFile, type FileId } from '@app/types/fileContext'; export interface ApiCallsConfig { endpoint: string | ((params: TParams) => string); @@ -22,10 +23,10 @@ export const useToolApiCalls = () => { config: ApiCallsConfig, onProgress: (progress: ProcessingProgress) => void, onStatus: (status: string) => void, - markFileError?: (fileId: string) => void, - ): Promise<{ outputFiles: File[]; successSourceIds: string[] }> => { + markFileError?: (fileId: FileId) => void, + ): Promise<{ outputFiles: File[]; successSourceIds: FileId[] }> => { const processedFiles: File[] = []; - const successSourceIds: string[] = []; + const successSourceIds: FileId[] = []; const failedFiles: string[] = []; const total = validFiles.length; @@ -35,7 +36,9 @@ export const useToolApiCalls = () => { for (let i = 0; i < validFiles.length; i++) { const file = validFiles[i]; - console.debug('[processFiles] Start', { index: i, total, name: file.name, fileId: (file as any).fileId }); + const fileId = isStirlingFile(file) ? file.fileId : undefined; + + console.debug('[processFiles] Start', { index: i, total, name: file.name, fileId }); onProgress({ current: i + 1, total, currentFileName: file.name }); onStatus(`Processing ${file.name} (${i + 1}/${total})`); @@ -47,7 +50,7 @@ export const useToolApiCalls = () => { responseType: 'blob', cancelToken: cancelTokenRef.current?.token, }); - console.debug('[processFiles] Response OK', { name: file.name, status: (response as any)?.status }); + console.debug('[processFiles] Response OK', { name: file.name, status: response.status }); // Forward to shared response processor (uses tool-specific responseHandler if provided) const responseFiles = await processResponse( @@ -62,16 +65,20 @@ export const useToolApiCalls = () => { if (empty) { console.warn('[processFiles] Empty output treated as failure', { name: file.name }); failedFiles.push(file.name); - try { - (markFileError as any)?.((file as any).fileId); - } catch (e) { - console.debug('markFileError', e); + if (fileId && markFileError) { + try { + markFileError(fileId); + } catch (e) { + console.debug('markFileError', e); + } } continue; } processedFiles.push(...responseFiles); // record source id as successful - successSourceIds.push((file as any).fileId); + if (fileId) { + successSourceIds.push(fileId); + } console.debug('[processFiles] Success', { name: file.name, produced: responseFiles.length }); } catch (error) { @@ -81,10 +88,12 @@ export const useToolApiCalls = () => { console.error('[processFiles] Failed', { name: file.name, error }); failedFiles.push(file.name); // mark errored file so UI can highlight - try { - (markFileError as any)?.((file as any).fileId); - } catch (e) { - console.debug('markFileError', e); + if (fileId && markFileError) { + try { + markFileError(fileId); + } catch (e) { + console.debug('markFileError', e); + } } } } diff --git a/frontend/src/core/hooks/tools/shared/useToolOperation.ts b/frontend/src/core/hooks/tools/shared/useToolOperation.ts index 3d8e1406e..0f18543de 100644 --- a/frontend/src/core/hooks/tools/shared/useToolOperation.ts +++ b/frontend/src/core/hooks/tools/shared/useToolOperation.ts @@ -7,7 +7,7 @@ import { useToolApiCalls, type ApiCallsConfig } from '@app/hooks/tools/shared/us import { useToolResources } from '@app/hooks/tools/shared/useToolResources'; import { extractErrorMessage } from '@app/utils/toolErrorHandler'; import { StirlingFile, extractFiles, FileId, StirlingFileStub, createStirlingFile } from '@app/types/fileContext'; -import { FILE_EVENTS } from '@app/services/errorUtils'; +import { FILE_EVENTS, extractErrorFileIds, normalizeAxiosErrorData } from '@app/services/errorUtils'; import { ResponseHandler } from '@app/utils/toolResponseProcessor'; import { createChildStub, generateProcessedFileMetadata } from '@app/contexts/file/fileActions'; import { ToolOperation } from '@app/types/file'; @@ -51,7 +51,7 @@ interface BaseToolOperationConfig { responseHandler?: ResponseHandler; /** Extract user-friendly error messages from API errors */ - getErrorMessage?: (error: any) => string; + getErrorMessage?: (error: unknown) => string; /** Default parameter values for automation */ defaultParameters?: TParams; @@ -145,7 +145,7 @@ export const useToolOperation = ( config: ToolOperationConfig ): ToolOperationHook => { const { t } = useTranslation(); - const { addFiles, consumeFiles, undoConsumeFiles, selectors } = useFileContext(); + const { consumeFiles, undoConsumeFiles, selectors } = useFileContext(); // Composed hooks const { state, actions } = useToolState(); @@ -171,17 +171,17 @@ export const useToolOperation = ( } // Handle zero-byte inputs explicitly: mark as error and continue with others - const zeroByteFiles = selectedFiles.filter(file => (file as any)?.size === 0); + const zeroByteFiles = selectedFiles.filter(file => file.size === 0); if (zeroByteFiles.length > 0) { - try { - for (const f of zeroByteFiles) { - (fileActions.markFileError as any)((f as any).fileId); + for (const f of zeroByteFiles) { + try { + fileActions.markFileError(f.fileId); + } catch (error) { + console.warn('Failed to mark zero-byte file as error', error); } - } catch (e) { - console.log('markFileError', e); } } - const validFiles = selectedFiles.filter(file => (file as any)?.size > 0); + const validFiles = selectedFiles.filter(file => file.size > 0); if (validFiles.length === 0) { actions.setError(t('noValidFiles', 'No valid files to process')); return; @@ -197,18 +197,22 @@ export const useToolOperation = ( actions.setStatus('Processing files...'); // Listen for global error file id events from HTTP interceptor during this run - let externalErrorFileIds: string[] = []; - const errorListener = (e: Event) => { - const detail = (e as CustomEvent)?.detail as any; - if (detail?.fileIds) { - externalErrorFileIds = Array.isArray(detail.fileIds) ? detail.fileIds : []; + let externalErrorFileIds: FileId[] = []; + const errorListener: EventListener = (event) => { + const detail = (event as CustomEvent<{ fileIds?: unknown }>).detail; + if (!detail || !Array.isArray(detail.fileIds)) { + externalErrorFileIds = []; + return; } + externalErrorFileIds = detail.fileIds + .filter((id): id is string => typeof id === 'string') + .map(id => id as FileId); }; - window.addEventListener(FILE_EVENTS.markError, errorListener as EventListener); + window.addEventListener(FILE_EVENTS.markError, errorListener); - try { + try { let processedFiles: File[]; - let successSourceIds: string[] = []; + let successSourceIds: FileId[] = []; // Use original files directly (no PDF metadata injection - history stored in IndexedDB) const filesForAPI = extractFiles(validFiles); @@ -230,10 +234,10 @@ export const useToolOperation = ( apiCallsConfig, actions.setProgress, actions.setStatus, - fileActions.markFileError as any + fileActions.markFileError ); processedFiles = result.outputFiles; - successSourceIds = result.successSourceIds as any; + successSourceIds = result.successSourceIds; console.debug('[useToolOperation] Multi-file results', { outputFiles: processedFiles.length, successSources: result.successSourceIds.length }); break; } @@ -261,7 +265,7 @@ export const useToolOperation = ( processedFiles = await extractZipFiles(response.data); } // Assume all inputs succeeded together unless server provided an error earlier - successSourceIds = validFiles.map(f => (f as any).fileId) as any; + successSourceIds = validFiles.map(f => f.fileId); break; } @@ -269,12 +273,12 @@ export const useToolOperation = ( actions.setStatus('Processing files...'); processedFiles = await config.customProcessor(params, filesForAPI); // Try to map outputs back to inputs by filename (before extension) - const inputBaseNames = new Map(); + const inputBaseNames = new Map(); for (const f of validFiles) { const base = (f.name || '').replace(/\.[^.]+$/, '').toLowerCase(); - inputBaseNames.set(base, (f as any).fileId); + inputBaseNames.set(base, f.fileId); } - const mappedSuccess: string[] = []; + const mappedSuccess: FileId[] = []; for (const out of processedFiles) { const base = (out.name || '').replace(/\.[^.]+$/, '').toLowerCase(); const id = inputBaseNames.get(base); @@ -282,9 +286,9 @@ export const useToolOperation = ( } // Fallback to naive alignment if names don't match if (mappedSuccess.length === 0) { - successSourceIds = validFiles.slice(0, processedFiles.length).map(f => (f as any).fileId) as any; + successSourceIds = validFiles.slice(0, processedFiles.length).map(f => f.fileId); } else { - successSourceIds = mappedSuccess as any; + successSourceIds = mappedSuccess; } break; } @@ -292,31 +296,43 @@ export const useToolOperation = ( // Normalize error flags across tool types: mark failures, clear successes try { - const allInputIds = validFiles.map(f => (f as any).fileId) as unknown as string[]; - const okSet = new Set((successSourceIds as unknown as string[]) || []); + const allInputIds = validFiles.map(f => f.fileId); + const okSet = new Set(successSourceIds); // Clear errors on successes for (const okId of okSet) { - try { (fileActions.clearFileError as any)(okId); } catch (_e) { void _e; } + try { + fileActions.clearFileError(okId); + } catch (clearError) { + console.debug('clearFileError', clearError); + } } // Mark errors on inputs that didn't succeed for (const id of allInputIds) { if (!okSet.has(id)) { - try { (fileActions.markFileError as any)(id); } catch (_e) { void _e; } + try { + fileActions.markFileError(id); + } catch (markError) { + console.debug('markFileError', markError); + } } } - } catch (_e) { void _e; } + } catch (normalizationError) { + console.debug('Failed to normalize file error state', normalizationError); + } if (externalErrorFileIds.length > 0) { // If backend told us which sources failed, prefer that mapping successSourceIds = validFiles - .map(f => (f as any).fileId) - .filter(id => !externalErrorFileIds.includes(id)) as any; + .map(f => f.fileId) + .filter(id => !externalErrorFileIds.includes(id)); // Also mark failed IDs immediately try { for (const badId of externalErrorFileIds) { - (fileActions.markFileError as any)(badId); + fileActions.markFileError(badId); } - } catch (_e) { void _e; } + } catch (markError) { + console.debug('markFileError', markError); + } } if (processedFiles.length > 0) { @@ -363,8 +379,8 @@ export const useToolOperation = ( ); // Always create child stubs linking back to the successful source inputs const successInputStubs = successSourceIds - .map((id) => selectors.getStirlingFileStub(id as any)) - .filter(Boolean) as StirlingFileStub[]; + .map(id => selectors.getStirlingFileStub(id)) + .filter((stub): stub is StirlingFileStub => Boolean(stub)); if (successInputStubs.length !== processedFiles.length) { console.warn('[useToolOperation] Mismatch successInputStubs vs outputs', { @@ -389,7 +405,7 @@ export const useToolOperation = ( return createStirlingFile(file, childStub.id); }); // Build consumption arrays aligned to the successful source IDs - const toConsumeInputIds = successSourceIds.filter((id: string) => inputFileIds.includes(id as any)) as unknown as FileId[]; + const toConsumeInputIds = successSourceIds.filter(id => inputFileIds.includes(id)); // Outputs and stubs are already ordered by success sequence console.debug('[useToolOperation] Consuming files', { inputCount: inputFileIds.length, toConsume: toConsumeInputIds.length }); const outputFileIds = await consumeFiles(toConsumeInputIds, outputStirlingFiles, outputStirlingFileStubs); @@ -403,45 +419,41 @@ export const useToolOperation = ( } - } catch (error: any) { + } catch (error: unknown) { // Centralized 422 handler: mark provided IDs in errorFileIds try { - const status = (error?.response?.status as number | undefined); - if (status === 422) { - const payload = error?.response?.data; - let parsed: any = payload; - if (typeof payload === 'string') { - try { parsed = JSON.parse(payload); } catch { parsed = payload; } - } else if (payload && typeof (payload as any).text === 'function') { - // Blob or Response-like object from axios when responseType='blob' - const text = await (payload as Blob).text(); - try { parsed = JSON.parse(text); } catch { parsed = text; } - } - let ids: string[] | undefined = Array.isArray(parsed?.errorFileIds) ? parsed.errorFileIds : undefined; - if (!ids && typeof parsed === 'string') { - const match = parsed.match(/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}/g); - if (match && match.length > 0) ids = Array.from(new Set(match)); - } + const axiosResponse = (typeof error === 'object' && error !== null && 'response' in error) + ? (error as { response?: { status?: number; data?: unknown } }).response + : undefined; + if (axiosResponse?.status === 422) { + const normalized = await normalizeAxiosErrorData(axiosResponse.data); + const ids = extractErrorFileIds(normalized); if (ids && ids.length > 0) { for (const badId of ids) { - try { (fileActions.markFileError as any)(badId); } catch (_e) { void _e; } + try { + fileActions.markFileError(badId as FileId); + } catch (markError) { + console.debug('markFileError', markError); + } } actions.setStatus('Process failed due to invalid/corrupted file(s)'); // Avoid duplicating toast messaging here return; } } - } catch (_e) { void _e; } + } catch (handlerError) { + console.debug('Failed to extract error file IDs', handlerError); + } const errorMessage = config.getErrorMessage?.(error) || extractErrorMessage(error); actions.setError(errorMessage); actions.setStatus(''); } finally { - window.removeEventListener(FILE_EVENTS.markError, errorListener as EventListener); + window.removeEventListener(FILE_EVENTS.markError, errorListener); actions.setLoading(false); actions.setProgress(null); } - }, [t, config, actions, addFiles, consumeFiles, processFiles, generateThumbnails, createDownloadInfo, cleanupBlobUrls, extractZipFiles]); + }, [t, config, actions, consumeFiles, processFiles, generateThumbnails, createDownloadInfo, cleanupBlobUrls, extractZipFiles, fileActions, selectors]); const cancelOperation = useCallback(() => { cancelApiCalls(); @@ -495,16 +507,18 @@ export const useToolOperation = ( // Show success message actions.setStatus(t('undoSuccess', 'Operation undone successfully')); - } catch (error: any) { + } catch (error: unknown) { let errorMessage = extractErrorMessage(error); // Provide more specific error messages based on error type - if (error.message?.includes('Mismatch between input files')) { - errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted'); - } else if (error.message?.includes('IndexedDB')) { - errorMessage = t('undoStorageError', 'Undo completed but some files could not be saved to storage'); - } else if (error.name === 'QuotaExceededError') { - errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space'); + if (error instanceof Error) { + if (error.message.includes('Mismatch between input files')) { + errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted'); + } else if (error.message.includes('IndexedDB')) { + errorMessage = t('undoStorageError', 'Undo completed but some files could not be saved to storage'); + } else if (error.name === 'QuotaExceededError') { + errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space'); + } } actions.setError(`${t('undoFailed', 'Failed to undo operation')}: ${errorMessage}`);