mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2026-03-04 02:20:19 +01:00
Stirling 2.0 (#3928)
# Description of Changes <!-- File context for managing files between tools and views Optimisation for large files Updated Split to work with new file system and match Matts stepped design closer --> --- ## Checklist ### General - [ ] I have read the [Contribution Guidelines](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/CONTRIBUTING.md) - [ ] I have read the [Stirling-PDF Developer Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/DeveloperGuide.md) (if applicable) - [ ] I have read the [How to add new languages to Stirling-PDF](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/HowToAddNewLanguage.md) (if applicable) - [ ] I have performed a self-review of my own code - [ ] My changes generate no new warnings ### Documentation - [ ] I have updated relevant docs on [Stirling-PDF's doc repo](https://github.com/Stirling-Tools/Stirling-Tools.github.io/blob/main/docs/) (if functionality has heavily changed) - [ ] I have read the section [Add New Translation Tags](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/HowToAddNewLanguage.md#add-new-translation-tags) (for new translation tags only) ### UI Changes (if applicable) - [ ] Screenshots or videos demonstrating the UI changes are attached (e.g., as comments or direct attachments in the PR) ### Testing (if applicable) - [ ] I have tested my changes locally. Refer to the [Testing Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/DeveloperGuide.md#6-testing) for more details. --------- Co-authored-by: Anthony Stirling <77850077+Frooodle@users.noreply.github.com>
This commit is contained in:
67
frontend/src/hooks/tools/shared/useOperationResults.ts
Normal file
67
frontend/src/hooks/tools/shared/useOperationResults.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
|
||||
export interface OperationResult {
|
||||
files: File[];
|
||||
thumbnails: string[];
|
||||
isGeneratingThumbnails: boolean;
|
||||
}
|
||||
|
||||
export interface OperationResultsHook {
|
||||
results: OperationResult;
|
||||
downloadUrl: string | null;
|
||||
status: string;
|
||||
errorMessage: string | null;
|
||||
isLoading: boolean;
|
||||
|
||||
setResults: (results: OperationResult) => void;
|
||||
setDownloadUrl: (url: string | null) => void;
|
||||
setStatus: (status: string) => void;
|
||||
setErrorMessage: (error: string | null) => void;
|
||||
setIsLoading: (loading: boolean) => void;
|
||||
|
||||
resetResults: () => void;
|
||||
clearError: () => void;
|
||||
}
|
||||
|
||||
const initialResults: OperationResult = {
|
||||
files: [],
|
||||
thumbnails: [],
|
||||
isGeneratingThumbnails: false,
|
||||
};
|
||||
|
||||
export const useOperationResults = (): OperationResultsHook => {
|
||||
const [results, setResults] = useState<OperationResult>(initialResults);
|
||||
const [downloadUrl, setDownloadUrl] = useState<string | null>(null);
|
||||
const [status, setStatus] = useState('');
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const resetResults = useCallback(() => {
|
||||
setResults(initialResults);
|
||||
setDownloadUrl(null);
|
||||
setStatus('');
|
||||
setErrorMessage(null);
|
||||
setIsLoading(false);
|
||||
}, []);
|
||||
|
||||
const clearError = useCallback(() => {
|
||||
setErrorMessage(null);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
results,
|
||||
downloadUrl,
|
||||
status,
|
||||
errorMessage,
|
||||
isLoading,
|
||||
|
||||
setResults,
|
||||
setDownloadUrl,
|
||||
setStatus,
|
||||
setErrorMessage,
|
||||
setIsLoading,
|
||||
|
||||
resetResults,
|
||||
clearError,
|
||||
};
|
||||
};
|
||||
242
frontend/src/hooks/tools/split/useSplitOperation.ts
Normal file
242
frontend/src/hooks/tools/split/useSplitOperation.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import axios from 'axios';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useFileContext } from '../../../contexts/FileContext';
|
||||
import { FileOperation } from '../../../types/fileContext';
|
||||
import { zipFileService } from '../../../services/zipFileService';
|
||||
import { generateThumbnailForFile } from '../../../utils/thumbnailUtils';
|
||||
import { SplitParameters } from '../../../components/tools/split/SplitSettings';
|
||||
import { SPLIT_MODES, ENDPOINTS, type SplitMode } from '../../../constants/splitConstants';
|
||||
|
||||
export interface SplitOperationHook {
|
||||
executeOperation: (
|
||||
mode: SplitMode | '',
|
||||
parameters: SplitParameters,
|
||||
selectedFiles: File[]
|
||||
) => Promise<void>;
|
||||
|
||||
// Flattened result properties for cleaner access
|
||||
files: File[];
|
||||
thumbnails: string[];
|
||||
isGeneratingThumbnails: boolean;
|
||||
downloadUrl: string | null;
|
||||
status: string;
|
||||
errorMessage: string | null;
|
||||
isLoading: boolean;
|
||||
|
||||
// Result management functions
|
||||
resetResults: () => void;
|
||||
clearError: () => void;
|
||||
}
|
||||
|
||||
export const useSplitOperation = (): SplitOperationHook => {
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
recordOperation,
|
||||
markOperationApplied,
|
||||
markOperationFailed,
|
||||
addFiles
|
||||
} = useFileContext();
|
||||
|
||||
// Internal state management (replacing useOperationResults)
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
const [thumbnails, setThumbnails] = useState<string[]>([]);
|
||||
const [isGeneratingThumbnails, setIsGeneratingThumbnails] = useState(false);
|
||||
const [downloadUrl, setDownloadUrl] = useState<string | null>(null);
|
||||
const [status, setStatus] = useState('');
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const buildFormData = useCallback((
|
||||
mode: SplitMode | '',
|
||||
parameters: SplitParameters,
|
||||
selectedFiles: File[]
|
||||
) => {
|
||||
const formData = new FormData();
|
||||
|
||||
selectedFiles.forEach(file => {
|
||||
formData.append("fileInput", file);
|
||||
});
|
||||
|
||||
if (!mode) {
|
||||
throw new Error('Split mode is required');
|
||||
}
|
||||
|
||||
let endpoint = "";
|
||||
|
||||
switch (mode) {
|
||||
case SPLIT_MODES.BY_PAGES:
|
||||
formData.append("pageNumbers", parameters.pages);
|
||||
endpoint = "/api/v1/general/split-pages";
|
||||
break;
|
||||
case SPLIT_MODES.BY_SECTIONS:
|
||||
formData.append("horizontalDivisions", parameters.hDiv);
|
||||
formData.append("verticalDivisions", parameters.vDiv);
|
||||
formData.append("merge", parameters.merge.toString());
|
||||
endpoint = "/api/v1/general/split-pdf-by-sections";
|
||||
break;
|
||||
case SPLIT_MODES.BY_SIZE_OR_COUNT:
|
||||
formData.append(
|
||||
"splitType",
|
||||
parameters.splitType === "size" ? "0" : parameters.splitType === "pages" ? "1" : "2"
|
||||
);
|
||||
formData.append("splitValue", parameters.splitValue);
|
||||
endpoint = "/api/v1/general/split-by-size-or-count";
|
||||
break;
|
||||
case SPLIT_MODES.BY_CHAPTERS:
|
||||
formData.append("bookmarkLevel", parameters.bookmarkLevel);
|
||||
formData.append("includeMetadata", parameters.includeMetadata.toString());
|
||||
formData.append("allowDuplicates", parameters.allowDuplicates.toString());
|
||||
endpoint = "/api/v1/general/split-pdf-by-chapters";
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown split mode: ${mode}`);
|
||||
}
|
||||
|
||||
return { formData, endpoint };
|
||||
}, []);
|
||||
|
||||
const createOperation = useCallback((
|
||||
mode: SplitMode | '',
|
||||
parameters: SplitParameters,
|
||||
selectedFiles: File[]
|
||||
): { operation: FileOperation; operationId: string; fileId: string } => {
|
||||
const operationId = `split-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
const fileId = selectedFiles[0].name;
|
||||
|
||||
const operation: FileOperation = {
|
||||
id: operationId,
|
||||
type: 'split',
|
||||
timestamp: Date.now(),
|
||||
fileIds: selectedFiles.map(f => f.name),
|
||||
status: 'pending',
|
||||
metadata: {
|
||||
originalFileName: selectedFiles[0].name,
|
||||
parameters: {
|
||||
mode,
|
||||
pages: mode === SPLIT_MODES.BY_PAGES ? parameters.pages : undefined,
|
||||
hDiv: mode === SPLIT_MODES.BY_SECTIONS ? parameters.hDiv : undefined,
|
||||
vDiv: mode === SPLIT_MODES.BY_SECTIONS ? parameters.vDiv : undefined,
|
||||
merge: mode === SPLIT_MODES.BY_SECTIONS ? parameters.merge : undefined,
|
||||
splitType: mode === SPLIT_MODES.BY_SIZE_OR_COUNT ? parameters.splitType : undefined,
|
||||
splitValue: mode === SPLIT_MODES.BY_SIZE_OR_COUNT ? parameters.splitValue : undefined,
|
||||
bookmarkLevel: mode === SPLIT_MODES.BY_CHAPTERS ? parameters.bookmarkLevel : undefined,
|
||||
includeMetadata: mode === SPLIT_MODES.BY_CHAPTERS ? parameters.includeMetadata : undefined,
|
||||
allowDuplicates: mode === SPLIT_MODES.BY_CHAPTERS ? parameters.allowDuplicates : undefined,
|
||||
},
|
||||
fileSize: selectedFiles[0].size
|
||||
}
|
||||
};
|
||||
|
||||
return { operation, operationId, fileId };
|
||||
}, []);
|
||||
|
||||
const processResults = useCallback(async (blob: Blob) => {
|
||||
try {
|
||||
const zipFile = new File([blob], "split_result.zip", { type: "application/zip" });
|
||||
const extractionResult = await zipFileService.extractPdfFiles(zipFile);
|
||||
|
||||
if (extractionResult.success && extractionResult.extractedFiles.length > 0) {
|
||||
// Set local state for preview
|
||||
setFiles(extractionResult.extractedFiles);
|
||||
setThumbnails([]);
|
||||
setIsGeneratingThumbnails(true);
|
||||
|
||||
// Add extracted files to FileContext for future use
|
||||
await addFiles(extractionResult.extractedFiles);
|
||||
|
||||
const thumbnails = await Promise.all(
|
||||
extractionResult.extractedFiles.map(async (file) => {
|
||||
try {
|
||||
return await generateThumbnailForFile(file);
|
||||
} catch (error) {
|
||||
console.warn(`Failed to generate thumbnail for ${file.name}:`, error);
|
||||
return '';
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
setThumbnails(thumbnails);
|
||||
setIsGeneratingThumbnails(false);
|
||||
}
|
||||
} catch (extractError) {
|
||||
console.warn('Failed to extract files for preview:', extractError);
|
||||
}
|
||||
}, [addFiles]);
|
||||
|
||||
const executeOperation = useCallback(async (
|
||||
mode: SplitMode | '',
|
||||
parameters: SplitParameters,
|
||||
selectedFiles: File[]
|
||||
) => {
|
||||
if (selectedFiles.length === 0) {
|
||||
setStatus(t("noFileSelected"));
|
||||
return;
|
||||
}
|
||||
|
||||
const { operation, operationId, fileId } = createOperation(mode, parameters, selectedFiles);
|
||||
const { formData, endpoint } = buildFormData(mode, parameters, selectedFiles);
|
||||
|
||||
recordOperation(fileId, operation);
|
||||
|
||||
setStatus(t("loading"));
|
||||
setIsLoading(true);
|
||||
setErrorMessage(null);
|
||||
|
||||
try {
|
||||
const response = await axios.post(endpoint, formData, { responseType: "blob" });
|
||||
const blob = new Blob([response.data], { type: "application/zip" });
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
|
||||
setDownloadUrl(url);
|
||||
setStatus(t("downloadComplete"));
|
||||
|
||||
await processResults(blob);
|
||||
markOperationApplied(fileId, operationId);
|
||||
} catch (error: any) {
|
||||
console.error(error);
|
||||
let errorMsg = t("error.pdfPassword", "An error occurred while splitting the PDF.");
|
||||
if (error.response?.data && typeof error.response.data === 'string') {
|
||||
errorMsg = error.response.data;
|
||||
} else if (error.message) {
|
||||
errorMsg = error.message;
|
||||
}
|
||||
setErrorMessage(errorMsg);
|
||||
setStatus(t("error._value", "Split failed."));
|
||||
markOperationFailed(fileId, operationId, errorMsg);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [t, createOperation, buildFormData, recordOperation, markOperationApplied, markOperationFailed, processResults]);
|
||||
|
||||
const resetResults = useCallback(() => {
|
||||
setFiles([]);
|
||||
setThumbnails([]);
|
||||
setIsGeneratingThumbnails(false);
|
||||
setDownloadUrl(null);
|
||||
setStatus('');
|
||||
setErrorMessage(null);
|
||||
setIsLoading(false);
|
||||
}, []);
|
||||
|
||||
const clearError = useCallback(() => {
|
||||
setErrorMessage(null);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
executeOperation,
|
||||
|
||||
// Flattened result properties for cleaner access
|
||||
files,
|
||||
thumbnails,
|
||||
isGeneratingThumbnails,
|
||||
downloadUrl,
|
||||
status,
|
||||
errorMessage,
|
||||
isLoading,
|
||||
|
||||
// Result management functions
|
||||
resetResults,
|
||||
clearError,
|
||||
};
|
||||
};
|
||||
71
frontend/src/hooks/tools/split/useSplitParameters.ts
Normal file
71
frontend/src/hooks/tools/split/useSplitParameters.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useState } from 'react';
|
||||
import { SPLIT_MODES, SPLIT_TYPES, ENDPOINTS, type SplitMode, type SplitType } from '../../../constants/splitConstants';
|
||||
import { SplitParameters } from '../../../components/tools/split/SplitSettings';
|
||||
|
||||
export interface SplitParametersHook {
|
||||
mode: SplitMode | '';
|
||||
parameters: SplitParameters;
|
||||
setMode: (mode: SplitMode | '') => void;
|
||||
updateParameter: (parameter: keyof SplitParameters, value: string | boolean) => void;
|
||||
resetParameters: () => void;
|
||||
validateParameters: () => boolean;
|
||||
getEndpointName: () => string;
|
||||
}
|
||||
|
||||
const initialParameters: SplitParameters = {
|
||||
pages: '',
|
||||
hDiv: '2',
|
||||
vDiv: '2',
|
||||
merge: false,
|
||||
splitType: SPLIT_TYPES.SIZE,
|
||||
splitValue: '',
|
||||
bookmarkLevel: '1',
|
||||
includeMetadata: false,
|
||||
allowDuplicates: false,
|
||||
};
|
||||
|
||||
export const useSplitParameters = (): SplitParametersHook => {
|
||||
const [mode, setMode] = useState<SplitMode | ''>('');
|
||||
const [parameters, setParameters] = useState<SplitParameters>(initialParameters);
|
||||
|
||||
const updateParameter = (parameter: keyof SplitParameters, value: string | boolean) => {
|
||||
setParameters(prev => ({ ...prev, [parameter]: value }));
|
||||
};
|
||||
|
||||
const resetParameters = () => {
|
||||
setParameters(initialParameters);
|
||||
setMode('');
|
||||
};
|
||||
|
||||
const validateParameters = () => {
|
||||
if (!mode) return false;
|
||||
|
||||
switch (mode) {
|
||||
case SPLIT_MODES.BY_PAGES:
|
||||
return parameters.pages.trim() !== "";
|
||||
case SPLIT_MODES.BY_SECTIONS:
|
||||
return parameters.hDiv !== "" && parameters.vDiv !== "";
|
||||
case SPLIT_MODES.BY_SIZE_OR_COUNT:
|
||||
return parameters.splitValue.trim() !== "";
|
||||
case SPLIT_MODES.BY_CHAPTERS:
|
||||
return parameters.bookmarkLevel !== "";
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const getEndpointName = () => {
|
||||
if (!mode) return ENDPOINTS[SPLIT_MODES.BY_PAGES];
|
||||
return ENDPOINTS[mode as SplitMode];
|
||||
};
|
||||
|
||||
return {
|
||||
mode,
|
||||
parameters,
|
||||
setMode,
|
||||
updateParameter,
|
||||
resetParameters,
|
||||
validateParameters,
|
||||
getEndpointName,
|
||||
};
|
||||
};
|
||||
312
frontend/src/hooks/useEnhancedProcessedFiles.ts
Normal file
312
frontend/src/hooks/useEnhancedProcessedFiles.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { ProcessedFile, ProcessingState, ProcessingConfig } from '../types/processing';
|
||||
import { enhancedPDFProcessingService } from '../services/enhancedPDFProcessingService';
|
||||
import { FileHasher } from '../utils/fileHash';
|
||||
|
||||
interface UseEnhancedProcessedFilesResult {
|
||||
processedFiles: Map<File, ProcessedFile>;
|
||||
processingStates: Map<string, ProcessingState>;
|
||||
isProcessing: boolean;
|
||||
hasProcessingErrors: boolean;
|
||||
processingProgress: {
|
||||
overall: number;
|
||||
fileProgress: Map<string, number>;
|
||||
estimatedTimeRemaining: number;
|
||||
};
|
||||
cacheStats: {
|
||||
entries: number;
|
||||
totalSizeBytes: number;
|
||||
maxSizeBytes: number;
|
||||
};
|
||||
metrics: {
|
||||
totalFiles: number;
|
||||
completedFiles: number;
|
||||
failedFiles: number;
|
||||
averageProcessingTime: number;
|
||||
cacheHitRate: number;
|
||||
};
|
||||
actions: {
|
||||
cancelProcessing: (fileKey: string) => void;
|
||||
retryProcessing: (file: File) => void;
|
||||
clearCache: () => void;
|
||||
};
|
||||
}
|
||||
|
||||
export function useEnhancedProcessedFiles(
|
||||
activeFiles: File[],
|
||||
config?: Partial<ProcessingConfig>
|
||||
): UseEnhancedProcessedFilesResult {
|
||||
const [processedFiles, setProcessedFiles] = useState<Map<File, ProcessedFile>>(new Map());
|
||||
const fileHashMapRef = useRef<Map<File, string>>(new Map()); // Use ref to avoid state update loops
|
||||
const [processingStates, setProcessingStates] = useState<Map<string, ProcessingState>>(new Map());
|
||||
|
||||
// Subscribe to processing state changes once
|
||||
useEffect(() => {
|
||||
const unsubscribe = enhancedPDFProcessingService.onProcessingChange(setProcessingStates);
|
||||
return unsubscribe;
|
||||
}, []);
|
||||
|
||||
// Process files when activeFiles changes
|
||||
useEffect(() => {
|
||||
console.log('useEnhancedProcessedFiles: activeFiles changed', activeFiles.length, 'files');
|
||||
|
||||
if (activeFiles.length === 0) {
|
||||
console.log('useEnhancedProcessedFiles: No active files, clearing processed cache');
|
||||
setProcessedFiles(new Map());
|
||||
// Clear any ongoing processing when no files
|
||||
enhancedPDFProcessingService.clearAllProcessing();
|
||||
return;
|
||||
}
|
||||
|
||||
const processFiles = async () => {
|
||||
const newProcessedFiles = new Map<File, ProcessedFile>();
|
||||
|
||||
for (const file of activeFiles) {
|
||||
// Generate hash for this file
|
||||
const fileHash = await FileHasher.generateHybridHash(file);
|
||||
fileHashMapRef.current.set(file, fileHash);
|
||||
|
||||
// First, check if we have this exact File object cached
|
||||
let existing = processedFiles.get(file);
|
||||
|
||||
// If not found by File object, try to find by hash in case File was recreated
|
||||
if (!existing) {
|
||||
for (const [cachedFile, processed] of processedFiles.entries()) {
|
||||
const cachedHash = fileHashMapRef.current.get(cachedFile);
|
||||
if (cachedHash === fileHash) {
|
||||
existing = processed;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
newProcessedFiles.set(file, existing);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const processed = await enhancedPDFProcessingService.processFile(file, config);
|
||||
if (processed) {
|
||||
newProcessedFiles.set(file, processed);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to start processing for ${file.name}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Only update if the content actually changed
|
||||
const hasChanged = newProcessedFiles.size !== processedFiles.size ||
|
||||
Array.from(newProcessedFiles.keys()).some(file => !processedFiles.has(file));
|
||||
|
||||
if (hasChanged) {
|
||||
setProcessedFiles(newProcessedFiles);
|
||||
}
|
||||
};
|
||||
|
||||
processFiles();
|
||||
}, [activeFiles]); // Only depend on activeFiles to avoid infinite loops
|
||||
|
||||
// Listen for processing completion
|
||||
useEffect(() => {
|
||||
const checkForCompletedFiles = async () => {
|
||||
let hasNewFiles = false;
|
||||
const updatedFiles = new Map(processedFiles);
|
||||
|
||||
// Generate file keys for all files first
|
||||
const fileKeyPromises = activeFiles.map(async (file) => ({
|
||||
file,
|
||||
key: await FileHasher.generateHybridHash(file)
|
||||
}));
|
||||
|
||||
const fileKeyPairs = await Promise.all(fileKeyPromises);
|
||||
|
||||
for (const { file, key } of fileKeyPairs) {
|
||||
// Only check files that don't have processed results yet
|
||||
if (!updatedFiles.has(file)) {
|
||||
const processingState = processingStates.get(key);
|
||||
|
||||
// Check for both processing and recently completed files
|
||||
// This ensures we catch completed files before they're cleaned up
|
||||
if (processingState?.status === 'processing' || processingState?.status === 'completed') {
|
||||
try {
|
||||
const processed = await enhancedPDFProcessingService.processFile(file, config);
|
||||
if (processed) {
|
||||
updatedFiles.set(file, processed);
|
||||
hasNewFiles = true;
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors in completion check
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasNewFiles) {
|
||||
setProcessedFiles(updatedFiles);
|
||||
}
|
||||
};
|
||||
|
||||
// Check every 500ms for completed processing
|
||||
const interval = setInterval(checkForCompletedFiles, 500);
|
||||
return () => clearInterval(interval);
|
||||
}, [activeFiles, processingStates]);
|
||||
|
||||
|
||||
// Cleanup when activeFiles changes
|
||||
useEffect(() => {
|
||||
const currentFiles = new Set(activeFiles);
|
||||
const previousFiles = Array.from(processedFiles.keys());
|
||||
const removedFiles = previousFiles.filter(file => !currentFiles.has(file));
|
||||
|
||||
if (removedFiles.length > 0) {
|
||||
// Clean up processing service cache
|
||||
enhancedPDFProcessingService.cleanup(removedFiles);
|
||||
|
||||
// Update local state
|
||||
setProcessedFiles(prev => {
|
||||
const updated = new Map();
|
||||
for (const [file, processed] of prev) {
|
||||
if (currentFiles.has(file)) {
|
||||
updated.set(file, processed);
|
||||
}
|
||||
}
|
||||
return updated;
|
||||
});
|
||||
}
|
||||
}, [activeFiles]);
|
||||
|
||||
// Calculate derived state
|
||||
const isProcessing = processingStates.size > 0;
|
||||
const hasProcessingErrors = Array.from(processingStates.values()).some(state => state.status === 'error');
|
||||
|
||||
// Calculate overall progress
|
||||
const processingProgress = calculateProcessingProgress(processingStates);
|
||||
|
||||
// Get cache stats and metrics
|
||||
const cacheStats = enhancedPDFProcessingService.getCacheStats();
|
||||
const metrics = enhancedPDFProcessingService.getMetrics();
|
||||
|
||||
// Action handlers
|
||||
const actions = {
|
||||
cancelProcessing: (fileKey: string) => {
|
||||
enhancedPDFProcessingService.cancelProcessing(fileKey);
|
||||
},
|
||||
|
||||
retryProcessing: async (file: File) => {
|
||||
try {
|
||||
await enhancedPDFProcessingService.processFile(file, config);
|
||||
} catch (error) {
|
||||
console.error(`Failed to retry processing for ${file.name}:`, error);
|
||||
}
|
||||
},
|
||||
|
||||
clearCache: () => {
|
||||
enhancedPDFProcessingService.clearAll();
|
||||
}
|
||||
};
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
enhancedPDFProcessingService.clearAllProcessing();
|
||||
};
|
||||
}, []);
|
||||
|
||||
return {
|
||||
processedFiles,
|
||||
processingStates,
|
||||
isProcessing,
|
||||
hasProcessingErrors,
|
||||
processingProgress,
|
||||
cacheStats,
|
||||
metrics,
|
||||
actions
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall processing progress from individual file states
|
||||
*/
|
||||
function calculateProcessingProgress(states: Map<string, ProcessingState>): {
|
||||
overall: number;
|
||||
fileProgress: Map<string, number>;
|
||||
estimatedTimeRemaining: number;
|
||||
} {
|
||||
if (states.size === 0) {
|
||||
return {
|
||||
overall: 100,
|
||||
fileProgress: new Map(),
|
||||
estimatedTimeRemaining: 0
|
||||
};
|
||||
}
|
||||
|
||||
const fileProgress = new Map<string, number>();
|
||||
let totalProgress = 0;
|
||||
let totalEstimatedTime = 0;
|
||||
|
||||
for (const [fileKey, state] of states) {
|
||||
fileProgress.set(fileKey, state.progress);
|
||||
totalProgress += state.progress;
|
||||
totalEstimatedTime += state.estimatedTimeRemaining || 0;
|
||||
}
|
||||
|
||||
const overall = totalProgress / states.size;
|
||||
const estimatedTimeRemaining = totalEstimatedTime;
|
||||
|
||||
return {
|
||||
overall,
|
||||
fileProgress,
|
||||
estimatedTimeRemaining
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for getting a single processed file with enhanced features
|
||||
*/
|
||||
export function useEnhancedProcessedFile(
|
||||
file: File | null,
|
||||
config?: Partial<ProcessingConfig>
|
||||
): {
|
||||
processedFile: ProcessedFile | null;
|
||||
isProcessing: boolean;
|
||||
processingState: ProcessingState | null;
|
||||
error: string | null;
|
||||
canRetry: boolean;
|
||||
actions: {
|
||||
cancel: () => void;
|
||||
retry: () => void;
|
||||
};
|
||||
} {
|
||||
const result = useEnhancedProcessedFiles(file ? [file] : [], config);
|
||||
|
||||
const processedFile = file ? result.processedFiles.get(file) || null : null;
|
||||
// Note: This is async but we can't await in hook return - consider refactoring if needed
|
||||
const fileKey = file ? '' : '';
|
||||
const processingState = fileKey ? result.processingStates.get(fileKey) || null : null;
|
||||
const isProcessing = !!processingState;
|
||||
const error = processingState?.error?.message || null;
|
||||
const canRetry = processingState?.error?.recoverable || false;
|
||||
|
||||
const actions = {
|
||||
cancel: () => {
|
||||
if (fileKey) {
|
||||
result.actions.cancelProcessing(fileKey);
|
||||
}
|
||||
},
|
||||
retry: () => {
|
||||
if (file) {
|
||||
result.actions.retryProcessing(file);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
processedFile,
|
||||
isProcessing,
|
||||
processingState,
|
||||
error,
|
||||
canRetry,
|
||||
actions
|
||||
};
|
||||
}
|
||||
122
frontend/src/hooks/useFileManager.ts
Normal file
122
frontend/src/hooks/useFileManager.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
import { fileStorage } from '../services/fileStorage';
|
||||
import { FileWithUrl } from '../types/file';
|
||||
|
||||
export const useFileManager = () => {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const convertToFile = useCallback(async (fileWithUrl: FileWithUrl): Promise<File> => {
|
||||
if (fileWithUrl.url && fileWithUrl.url.startsWith('blob:')) {
|
||||
const response = await fetch(fileWithUrl.url);
|
||||
const data = await response.arrayBuffer();
|
||||
const file = new File([data], fileWithUrl.name, {
|
||||
type: fileWithUrl.type || 'application/pdf',
|
||||
lastModified: fileWithUrl.lastModified || Date.now()
|
||||
});
|
||||
// Preserve the ID if it exists
|
||||
if (fileWithUrl.id) {
|
||||
Object.defineProperty(file, 'id', { value: fileWithUrl.id, writable: false });
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
// Always use ID first, fallback to name only if ID doesn't exist
|
||||
const lookupKey = fileWithUrl.id || fileWithUrl.name;
|
||||
const storedFile = await fileStorage.getFile(lookupKey);
|
||||
if (storedFile) {
|
||||
const file = new File([storedFile.data], storedFile.name, {
|
||||
type: storedFile.type,
|
||||
lastModified: storedFile.lastModified
|
||||
});
|
||||
// Add the ID to the file object
|
||||
Object.defineProperty(file, 'id', { value: storedFile.id, writable: false });
|
||||
return file;
|
||||
}
|
||||
|
||||
throw new Error('File not found in storage');
|
||||
}, []);
|
||||
|
||||
const loadRecentFiles = useCallback(async (): Promise<FileWithUrl[]> => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const files = await fileStorage.getAllFiles();
|
||||
const sortedFiles = files.sort((a, b) => (b.lastModified || 0) - (a.lastModified || 0));
|
||||
return sortedFiles;
|
||||
} catch (error) {
|
||||
console.error('Failed to load recent files:', error);
|
||||
return [];
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleRemoveFile = useCallback(async (index: number, files: FileWithUrl[], setFiles: (files: FileWithUrl[]) => void) => {
|
||||
const file = files[index];
|
||||
try {
|
||||
await fileStorage.deleteFile(file.id || file.name);
|
||||
setFiles(files.filter((_, i) => i !== index));
|
||||
} catch (error) {
|
||||
console.error('Failed to remove file:', error);
|
||||
throw error;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const storeFile = useCallback(async (file: File) => {
|
||||
try {
|
||||
const storedFile = await fileStorage.storeFile(file);
|
||||
// Add the ID to the file object
|
||||
Object.defineProperty(file, 'id', { value: storedFile.id, writable: false });
|
||||
return storedFile;
|
||||
} catch (error) {
|
||||
console.error('Failed to store file:', error);
|
||||
throw error;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const createFileSelectionHandlers = useCallback((
|
||||
selectedFiles: string[],
|
||||
setSelectedFiles: (files: string[]) => void
|
||||
) => {
|
||||
const toggleSelection = (fileId: string) => {
|
||||
setSelectedFiles(
|
||||
selectedFiles.includes(fileId)
|
||||
? selectedFiles.filter(id => id !== fileId)
|
||||
: [...selectedFiles, fileId]
|
||||
);
|
||||
};
|
||||
|
||||
const clearSelection = () => {
|
||||
setSelectedFiles([]);
|
||||
};
|
||||
|
||||
const selectMultipleFiles = async (files: FileWithUrl[], onFilesSelect: (files: File[]) => void) => {
|
||||
if (selectedFiles.length === 0) return;
|
||||
|
||||
try {
|
||||
const selectedFileObjects = files.filter(f => selectedFiles.includes(f.id || f.name));
|
||||
const filePromises = selectedFileObjects.map(convertToFile);
|
||||
const convertedFiles = await Promise.all(filePromises);
|
||||
onFilesSelect(convertedFiles);
|
||||
clearSelection();
|
||||
} catch (error) {
|
||||
console.error('Failed to load selected files:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
toggleSelection,
|
||||
clearSelection,
|
||||
selectMultipleFiles
|
||||
};
|
||||
}, [convertToFile]);
|
||||
|
||||
return {
|
||||
loading,
|
||||
convertToFile,
|
||||
loadRecentFiles,
|
||||
handleRemoveFile,
|
||||
storeFile,
|
||||
createFileSelectionHandlers
|
||||
};
|
||||
};
|
||||
30
frontend/src/hooks/useMemoryManagement.ts
Normal file
30
frontend/src/hooks/useMemoryManagement.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useFileContext } from '../contexts/FileContext';
|
||||
|
||||
/**
|
||||
* Hook for components that need to register resources with centralized memory management
|
||||
*/
|
||||
export function useMemoryManagement() {
|
||||
const { trackBlobUrl, trackPdfDocument, scheduleCleanup } = useFileContext();
|
||||
|
||||
const registerBlobUrl = useCallback((url: string) => {
|
||||
trackBlobUrl(url);
|
||||
return url;
|
||||
}, [trackBlobUrl]);
|
||||
|
||||
const registerPdfDocument = useCallback((fileId: string, pdfDoc: any) => {
|
||||
trackPdfDocument(fileId, pdfDoc);
|
||||
return pdfDoc;
|
||||
}, [trackPdfDocument]);
|
||||
|
||||
const cancelCleanup = useCallback((fileId: string) => {
|
||||
// Cancel scheduled cleanup (user is actively using the file)
|
||||
scheduleCleanup(fileId, -1); // -1 cancels the timer
|
||||
}, [scheduleCleanup]);
|
||||
|
||||
return {
|
||||
registerBlobUrl,
|
||||
registerPdfDocument,
|
||||
cancelCleanup
|
||||
};
|
||||
}
|
||||
@@ -50,18 +50,28 @@ export function usePDFProcessor() {
|
||||
|
||||
const pages: PDFPage[] = [];
|
||||
|
||||
// Generate thumbnails for all pages
|
||||
// Create pages without thumbnails initially - load them lazily
|
||||
for (let i = 1; i <= totalPages; i++) {
|
||||
const thumbnail = await generatePageThumbnail(file, i);
|
||||
pages.push({
|
||||
id: `${file.name}-page-${i}`,
|
||||
pageNumber: i,
|
||||
thumbnail,
|
||||
thumbnail: null, // Will be loaded lazily
|
||||
rotation: 0,
|
||||
selected: false
|
||||
});
|
||||
}
|
||||
|
||||
// Generate thumbnails for first 10 pages immediately for better UX
|
||||
const priorityPages = Math.min(10, totalPages);
|
||||
for (let i = 1; i <= priorityPages; i++) {
|
||||
try {
|
||||
const thumbnail = await generatePageThumbnail(file, i);
|
||||
pages[i - 1].thumbnail = thumbnail;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to generate thumbnail for page ${i}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up
|
||||
pdf.destroy();
|
||||
|
||||
|
||||
125
frontend/src/hooks/useProcessedFiles.ts
Normal file
125
frontend/src/hooks/useProcessedFiles.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { ProcessedFile, ProcessingState } from '../types/processing';
|
||||
import { pdfProcessingService } from '../services/pdfProcessingService';
|
||||
|
||||
interface UseProcessedFilesResult {
|
||||
processedFiles: Map<File, ProcessedFile>;
|
||||
processingStates: Map<string, ProcessingState>;
|
||||
isProcessing: boolean;
|
||||
hasProcessingErrors: boolean;
|
||||
cacheStats: {
|
||||
entries: number;
|
||||
totalSizeBytes: number;
|
||||
maxSizeBytes: number;
|
||||
};
|
||||
}
|
||||
|
||||
export function useProcessedFiles(activeFiles: File[]): UseProcessedFilesResult {
|
||||
const [processedFiles, setProcessedFiles] = useState<Map<File, ProcessedFile>>(new Map());
|
||||
const [processingStates, setProcessingStates] = useState<Map<string, ProcessingState>>(new Map());
|
||||
|
||||
useEffect(() => {
|
||||
// Subscribe to processing state changes
|
||||
const unsubscribe = pdfProcessingService.onProcessingChange(setProcessingStates);
|
||||
|
||||
// Check/start processing for each active file
|
||||
const checkProcessing = async () => {
|
||||
const newProcessedFiles = new Map<File, ProcessedFile>();
|
||||
|
||||
for (const file of activeFiles) {
|
||||
const processed = await pdfProcessingService.getProcessedFile(file);
|
||||
if (processed) {
|
||||
newProcessedFiles.set(file, processed);
|
||||
}
|
||||
}
|
||||
|
||||
setProcessedFiles(newProcessedFiles);
|
||||
};
|
||||
|
||||
checkProcessing();
|
||||
|
||||
return unsubscribe;
|
||||
}, [activeFiles]);
|
||||
|
||||
// Listen for processing completion and update processed files
|
||||
useEffect(() => {
|
||||
const updateProcessedFiles = async () => {
|
||||
const updated = new Map<File, ProcessedFile>();
|
||||
|
||||
for (const file of activeFiles) {
|
||||
const existing = processedFiles.get(file);
|
||||
if (existing) {
|
||||
updated.set(file, existing);
|
||||
} else {
|
||||
// Check if processing just completed
|
||||
const processed = await pdfProcessingService.getProcessedFile(file);
|
||||
if (processed) {
|
||||
updated.set(file, processed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setProcessedFiles(updated);
|
||||
};
|
||||
|
||||
// Small delay to allow processing state to settle
|
||||
const timeoutId = setTimeout(updateProcessedFiles, 100);
|
||||
return () => clearTimeout(timeoutId);
|
||||
}, [processingStates, activeFiles]);
|
||||
|
||||
// Cleanup when activeFiles changes
|
||||
useEffect(() => {
|
||||
const currentFiles = new Set(activeFiles);
|
||||
const previousFiles = Array.from(processedFiles.keys());
|
||||
const removedFiles = previousFiles.filter(file => !currentFiles.has(file));
|
||||
|
||||
if (removedFiles.length > 0) {
|
||||
// Clean up processing service cache
|
||||
pdfProcessingService.cleanup(removedFiles);
|
||||
|
||||
// Update local state
|
||||
setProcessedFiles(prev => {
|
||||
const updated = new Map();
|
||||
for (const [file, processed] of prev) {
|
||||
if (currentFiles.has(file)) {
|
||||
updated.set(file, processed);
|
||||
}
|
||||
}
|
||||
return updated;
|
||||
});
|
||||
}
|
||||
}, [activeFiles]);
|
||||
|
||||
// Derived state
|
||||
const isProcessing = processingStates.size > 0;
|
||||
const hasProcessingErrors = Array.from(processingStates.values()).some(state => state.status === 'error');
|
||||
const cacheStats = pdfProcessingService.getCacheStats();
|
||||
|
||||
return {
|
||||
processedFiles,
|
||||
processingStates,
|
||||
isProcessing,
|
||||
hasProcessingErrors,
|
||||
cacheStats
|
||||
};
|
||||
}
|
||||
|
||||
// Hook for getting a single processed file
|
||||
export function useProcessedFile(file: File | null): {
|
||||
processedFile: ProcessedFile | null;
|
||||
isProcessing: boolean;
|
||||
processingState: ProcessingState | null;
|
||||
} {
|
||||
const result = useProcessedFiles(file ? [file] : []);
|
||||
|
||||
const processedFile = file ? result.processedFiles.get(file) || null : null;
|
||||
const fileKey = file ? pdfProcessingService.generateFileKey(file) : '';
|
||||
const processingState = fileKey ? result.processingStates.get(fileKey) || null : null;
|
||||
const isProcessing = !!processingState;
|
||||
|
||||
return {
|
||||
processedFile,
|
||||
isProcessing,
|
||||
processingState
|
||||
};
|
||||
}
|
||||
56
frontend/src/hooks/useThumbnailGeneration.ts
Normal file
56
frontend/src/hooks/useThumbnailGeneration.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useCallback } from 'react';
|
||||
import { thumbnailGenerationService } from '../services/thumbnailGenerationService';
|
||||
|
||||
/**
|
||||
* Hook for tools that want to use thumbnail generation
|
||||
* Tools can choose whether to include visual features
|
||||
*/
|
||||
export function useThumbnailGeneration() {
|
||||
const generateThumbnails = useCallback(async (
|
||||
pdfArrayBuffer: ArrayBuffer,
|
||||
pageNumbers: number[],
|
||||
options: {
|
||||
scale?: number;
|
||||
quality?: number;
|
||||
batchSize?: number;
|
||||
parallelBatches?: number;
|
||||
} = {},
|
||||
onProgress?: (progress: { completed: number; total: number; thumbnails: any[] }) => void
|
||||
) => {
|
||||
return thumbnailGenerationService.generateThumbnails(
|
||||
pdfArrayBuffer,
|
||||
pageNumbers,
|
||||
options,
|
||||
onProgress
|
||||
);
|
||||
}, []);
|
||||
|
||||
const addThumbnailToCache = useCallback((pageId: string, thumbnail: string) => {
|
||||
thumbnailGenerationService.addThumbnailToCache(pageId, thumbnail);
|
||||
}, []);
|
||||
|
||||
const getThumbnailFromCache = useCallback((pageId: string): string | null => {
|
||||
return thumbnailGenerationService.getThumbnailFromCache(pageId);
|
||||
}, []);
|
||||
|
||||
const getCacheStats = useCallback(() => {
|
||||
return thumbnailGenerationService.getCacheStats();
|
||||
}, []);
|
||||
|
||||
const stopGeneration = useCallback(() => {
|
||||
thumbnailGenerationService.stopGeneration();
|
||||
}, []);
|
||||
|
||||
const destroyThumbnails = useCallback(() => {
|
||||
thumbnailGenerationService.destroy();
|
||||
}, []);
|
||||
|
||||
return {
|
||||
generateThumbnails,
|
||||
addThumbnailToCache,
|
||||
getThumbnailFromCache,
|
||||
getCacheStats,
|
||||
stopGeneration,
|
||||
destroyThumbnails
|
||||
};
|
||||
}
|
||||
96
frontend/src/hooks/useToolManagement.tsx
Normal file
96
frontend/src/hooks/useToolManagement.tsx
Normal file
@@ -0,0 +1,96 @@
|
||||
import React, { useState, useCallback, useMemo, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import AddToPhotosIcon from "@mui/icons-material/AddToPhotos";
|
||||
import ContentCutIcon from "@mui/icons-material/ContentCut";
|
||||
import ZoomInMapIcon from "@mui/icons-material/ZoomInMap";
|
||||
import SplitPdfPanel from "../tools/Split";
|
||||
import CompressPdfPanel from "../tools/Compress";
|
||||
import MergePdfPanel from "../tools/Merge";
|
||||
import { useMultipleEndpointsEnabled } from "./useEndpointConfig";
|
||||
|
||||
type ToolRegistryEntry = {
|
||||
icon: React.ReactNode;
|
||||
name: string;
|
||||
component: React.ComponentType<any>;
|
||||
view: string;
|
||||
};
|
||||
|
||||
type ToolRegistry = {
|
||||
[key: string]: ToolRegistryEntry;
|
||||
};
|
||||
|
||||
const baseToolRegistry = {
|
||||
split: { icon: <ContentCutIcon />, component: SplitPdfPanel, view: "split" },
|
||||
compress: { icon: <ZoomInMapIcon />, component: CompressPdfPanel, view: "viewer" },
|
||||
merge: { icon: <AddToPhotosIcon />, component: MergePdfPanel, view: "pageEditor" },
|
||||
};
|
||||
|
||||
// Tool endpoint mappings
|
||||
const toolEndpoints: Record<string, string[]> = {
|
||||
split: ["split-pages", "split-pdf-by-sections", "split-by-size-or-count", "split-pdf-by-chapters"],
|
||||
compress: ["compress-pdf"],
|
||||
merge: ["merge-pdfs"],
|
||||
};
|
||||
|
||||
|
||||
export const useToolManagement = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const [selectedToolKey, setSelectedToolKey] = useState<string | null>(null);
|
||||
const [toolSelectedFileIds, setToolSelectedFileIds] = useState<string[]>([]);
|
||||
|
||||
const allEndpoints = Array.from(new Set(Object.values(toolEndpoints).flat()));
|
||||
const { endpointStatus, loading: endpointsLoading } = useMultipleEndpointsEnabled(allEndpoints);
|
||||
|
||||
const isToolAvailable = useCallback((toolKey: string): boolean => {
|
||||
if (endpointsLoading) return true;
|
||||
const endpoints = toolEndpoints[toolKey] || [];
|
||||
return endpoints.some(endpoint => endpointStatus[endpoint] === true);
|
||||
}, [endpointsLoading, endpointStatus]);
|
||||
|
||||
const toolRegistry: ToolRegistry = useMemo(() => {
|
||||
const availableToolRegistry: ToolRegistry = {};
|
||||
Object.keys(baseToolRegistry).forEach(toolKey => {
|
||||
if (isToolAvailable(toolKey)) {
|
||||
availableToolRegistry[toolKey] = {
|
||||
...baseToolRegistry[toolKey as keyof typeof baseToolRegistry],
|
||||
name: t(`home.${toolKey}.title`, toolKey.charAt(0).toUpperCase() + toolKey.slice(1))
|
||||
};
|
||||
}
|
||||
});
|
||||
return availableToolRegistry;
|
||||
}, [t, isToolAvailable]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!endpointsLoading && selectedToolKey && !toolRegistry[selectedToolKey]) {
|
||||
const firstAvailableTool = Object.keys(toolRegistry)[0];
|
||||
if (firstAvailableTool) {
|
||||
setSelectedToolKey(firstAvailableTool);
|
||||
} else {
|
||||
setSelectedToolKey(null);
|
||||
}
|
||||
}
|
||||
}, [endpointsLoading, selectedToolKey, toolRegistry]);
|
||||
|
||||
const selectTool = useCallback((toolKey: string) => {
|
||||
setSelectedToolKey(toolKey);
|
||||
}, []);
|
||||
|
||||
const clearToolSelection = useCallback(() => {
|
||||
setSelectedToolKey(null);
|
||||
}, []);
|
||||
|
||||
const selectedTool = selectedToolKey ? toolRegistry[selectedToolKey] : null;
|
||||
|
||||
return {
|
||||
selectedToolKey,
|
||||
selectedTool,
|
||||
toolSelectedFileIds,
|
||||
toolRegistry,
|
||||
|
||||
selectTool,
|
||||
clearToolSelection,
|
||||
setToolSelectedFileIds,
|
||||
|
||||
};
|
||||
};
|
||||
51
frontend/src/hooks/useToolParameters.ts
Normal file
51
frontend/src/hooks/useToolParameters.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* React hooks for tool parameter management (URL logic removed)
|
||||
*/
|
||||
|
||||
import { useCallback, useMemo } from 'react';
|
||||
|
||||
type ToolParameterValues = Record<string, any>;
|
||||
|
||||
/**
|
||||
* Register tool parameters and get current values
|
||||
*/
|
||||
export function useToolParameters(
|
||||
toolName: string,
|
||||
parameters: Record<string, any>
|
||||
): [ToolParameterValues, (updates: Partial<ToolParameterValues>) => void] {
|
||||
|
||||
// Return empty values and noop updater
|
||||
const currentValues = useMemo(() => ({}), []);
|
||||
const updateParameters = useCallback(() => {}, []);
|
||||
|
||||
return [currentValues, updateParameters];
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing a single tool parameter
|
||||
*/
|
||||
export function useToolParameter<T = any>(
|
||||
toolName: string,
|
||||
paramName: string,
|
||||
definition: any
|
||||
): [T, (value: T) => void] {
|
||||
const [allParams, updateParams] = useToolParameters(toolName, { [paramName]: definition });
|
||||
|
||||
const value = allParams[paramName] as T;
|
||||
|
||||
const setValue = useCallback((newValue: T) => {
|
||||
updateParams({ [paramName]: newValue });
|
||||
}, [paramName, updateParams]);
|
||||
|
||||
return [value, setValue];
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for getting/setting global parameters (zoom, page, etc.)
|
||||
*/
|
||||
export function useGlobalParameters() {
|
||||
const currentValues = useMemo(() => ({}), []);
|
||||
const updateParameters = useCallback(() => {}, []);
|
||||
|
||||
return [currentValues, updateParameters];
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import { useEffect } from "react";
|
||||
|
||||
// Tool parameter definitions (shortened URLs)
|
||||
const TOOL_PARAMS = {
|
||||
split: [
|
||||
"mode", "p", "hd", "vd", "m",
|
||||
"type", "val", "level", "meta", "dupes"
|
||||
],
|
||||
compress: [
|
||||
"level", "gray", "rmeta", "size", "agg"
|
||||
],
|
||||
merge: [
|
||||
"order", "rdupes"
|
||||
]
|
||||
};
|
||||
|
||||
// Extract params for a specific tool from URL
|
||||
function getToolParams(toolKey: string, searchParams: URLSearchParams) {
|
||||
switch (toolKey) {
|
||||
case "split":
|
||||
return {
|
||||
mode: searchParams.get("mode") || "byPages",
|
||||
pages: searchParams.get("p") || "",
|
||||
hDiv: searchParams.get("hd") || "",
|
||||
vDiv: searchParams.get("vd") || "",
|
||||
merge: searchParams.get("m") === "true",
|
||||
splitType: searchParams.get("type") || "size",
|
||||
splitValue: searchParams.get("val") || "",
|
||||
bookmarkLevel: searchParams.get("level") || "0",
|
||||
includeMetadata: searchParams.get("meta") === "true",
|
||||
allowDuplicates: searchParams.get("dupes") === "true",
|
||||
};
|
||||
case "compress":
|
||||
return {
|
||||
compressionLevel: parseInt(searchParams.get("level") || "5"),
|
||||
grayscale: searchParams.get("gray") === "true",
|
||||
removeMetadata: searchParams.get("rmeta") === "true",
|
||||
expectedSize: searchParams.get("size") || "",
|
||||
aggressive: searchParams.get("agg") === "true",
|
||||
};
|
||||
case "merge":
|
||||
return {
|
||||
order: searchParams.get("order") || "default",
|
||||
removeDuplicates: searchParams.get("rdupes") === "true",
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
// Update tool-specific params in URL
|
||||
function updateToolParams(toolKey: string, searchParams: URLSearchParams, setSearchParams: any, newParams: any) {
|
||||
const params = new URLSearchParams(searchParams);
|
||||
|
||||
// Clear tool-specific params
|
||||
if (toolKey === "split") {
|
||||
["mode", "p", "hd", "vd", "m", "type", "val", "level", "meta", "dupes"].forEach((k) => params.delete(k));
|
||||
// Set new split params
|
||||
const merged = { ...getToolParams("split", searchParams), ...newParams };
|
||||
params.set("mode", merged.mode);
|
||||
if (merged.mode === "byPages") params.set("p", merged.pages);
|
||||
else if (merged.mode === "bySections") {
|
||||
params.set("hd", merged.hDiv);
|
||||
params.set("vd", merged.vDiv);
|
||||
params.set("m", String(merged.merge));
|
||||
} else if (merged.mode === "bySizeOrCount") {
|
||||
params.set("type", merged.splitType);
|
||||
params.set("val", merged.splitValue);
|
||||
} else if (merged.mode === "byChapters") {
|
||||
params.set("level", merged.bookmarkLevel);
|
||||
params.set("meta", String(merged.includeMetadata));
|
||||
params.set("dupes", String(merged.allowDuplicates));
|
||||
}
|
||||
} else if (toolKey === "compress") {
|
||||
["level", "gray", "rmeta", "size", "agg"].forEach((k) => params.delete(k));
|
||||
const merged = { ...getToolParams("compress", searchParams), ...newParams };
|
||||
params.set("level", String(merged.compressionLevel));
|
||||
params.set("gray", String(merged.grayscale));
|
||||
params.set("rmeta", String(merged.removeMetadata));
|
||||
if (merged.expectedSize) params.set("size", merged.expectedSize);
|
||||
params.set("agg", String(merged.aggressive));
|
||||
} else if (toolKey === "merge") {
|
||||
["order", "rdupes"].forEach((k) => params.delete(k));
|
||||
const merged = { ...getToolParams("merge", searchParams), ...newParams };
|
||||
params.set("order", merged.order);
|
||||
params.set("rdupes", String(merged.removeDuplicates));
|
||||
}
|
||||
|
||||
setSearchParams(params, { replace: true });
|
||||
}
|
||||
|
||||
export function useToolParams(selectedToolKey: string, currentView: string) {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const toolParams = getToolParams(selectedToolKey, searchParams);
|
||||
|
||||
const updateParams = (newParams: any) =>
|
||||
updateToolParams(selectedToolKey, searchParams, setSearchParams, newParams);
|
||||
|
||||
// Update URL when core state changes
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(searchParams);
|
||||
|
||||
// Remove all tool-specific params except for the current tool
|
||||
Object.entries(TOOL_PARAMS).forEach(([tool, keys]) => {
|
||||
if (tool !== selectedToolKey) {
|
||||
keys.forEach((k) => params.delete(k));
|
||||
}
|
||||
});
|
||||
|
||||
// Collect all params except 'v'
|
||||
const entries = Array.from(params.entries()).filter(([key]) => key !== "v");
|
||||
|
||||
// Rebuild params with 'v' first
|
||||
const newParams = new URLSearchParams();
|
||||
newParams.set("v", currentView);
|
||||
newParams.set("t", selectedToolKey);
|
||||
entries.forEach(([key, value]) => {
|
||||
if (key !== "t") newParams.set(key, value);
|
||||
});
|
||||
|
||||
setSearchParams(newParams, { replace: true });
|
||||
}, [selectedToolKey, currentView, setSearchParams]);
|
||||
|
||||
return {
|
||||
toolParams,
|
||||
updateParams,
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user