mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2026-03-19 02:22:11 +01:00
Feature/v2/automate (#4248)
* automate feature * Moved all providers to app level to simplify homepage * Circular dependency fixes * You will see that now toolRegistry gets a tool config and a tool settings object. These enable automate to run the tools using as much static code as possible. --------- Co-authored-by: Connor Yoh <connor@stirlingpdf.com>
This commit is contained in:
157
frontend/src/utils/automationExecutor.ts
Normal file
157
frontend/src/utils/automationExecutor.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import axios from 'axios';
|
||||
import { ToolRegistry } from '../data/toolsTaxonomy';
|
||||
import { AutomationConfig, AutomationExecutionCallbacks } from '../types/automation';
|
||||
import { AUTOMATION_CONSTANTS } from '../constants/automation';
|
||||
import { AutomationFileProcessor } from './automationFileProcessor';
|
||||
import { ResourceManager } from './resourceManager';
|
||||
|
||||
|
||||
/**
|
||||
* Execute a tool operation directly without using React hooks
|
||||
*/
|
||||
export const executeToolOperation = async (
|
||||
operationName: string,
|
||||
parameters: any,
|
||||
files: File[],
|
||||
toolRegistry: ToolRegistry
|
||||
): Promise<File[]> => {
|
||||
console.log(`🔧 Executing tool: ${operationName}`, { parameters, fileCount: files.length });
|
||||
|
||||
const config = toolRegistry[operationName]?.operationConfig;
|
||||
if (!config) {
|
||||
console.error(`❌ Tool operation not supported: ${operationName}`);
|
||||
throw new Error(`Tool operation not supported: ${operationName}`);
|
||||
}
|
||||
|
||||
console.log(`📋 Using config:`, config);
|
||||
|
||||
try {
|
||||
// Check if tool uses custom processor (like Convert tool)
|
||||
if (config.customProcessor) {
|
||||
console.log(`🎯 Using custom processor for ${config.operationType}`);
|
||||
const resultFiles = await config.customProcessor(parameters, files);
|
||||
console.log(`✅ Custom processor returned ${resultFiles.length} files`);
|
||||
return resultFiles;
|
||||
}
|
||||
|
||||
if (config.multiFileEndpoint) {
|
||||
// Multi-file processing - single API call with all files
|
||||
const endpoint = typeof config.endpoint === 'function'
|
||||
? config.endpoint(parameters)
|
||||
: config.endpoint;
|
||||
|
||||
console.log(`🌐 Making multi-file request to: ${endpoint}`);
|
||||
const formData = (config.buildFormData as (params: any, files: File[]) => FormData)(parameters, files);
|
||||
console.log(`📤 FormData entries:`, Array.from(formData.entries()));
|
||||
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: 'blob',
|
||||
timeout: AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
console.log(`📥 Response status: ${response.status}, size: ${response.data.size} bytes`);
|
||||
|
||||
// Multi-file responses are typically ZIP files, but may be single files
|
||||
const result = await AutomationFileProcessor.extractAutomationZipFiles(response.data);
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
console.warn(`⚠️ File processing warnings:`, result.errors);
|
||||
}
|
||||
|
||||
console.log(`📁 Processed ${result.files.length} files from response`);
|
||||
return result.files;
|
||||
|
||||
} else {
|
||||
// Single-file processing - separate API call per file
|
||||
console.log(`🔄 Processing ${files.length} files individually`);
|
||||
const resultFiles: File[] = [];
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
const endpoint = typeof config.endpoint === 'function'
|
||||
? config.endpoint(parameters)
|
||||
: config.endpoint;
|
||||
|
||||
console.log(`🌐 Making single-file request ${i+1}/${files.length} to: ${endpoint} for file: ${file.name}`);
|
||||
const formData = (config.buildFormData as (params: any, file: File) => FormData)(parameters, file);
|
||||
console.log(`📤 FormData entries:`, Array.from(formData.entries()));
|
||||
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: 'blob',
|
||||
timeout: AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
console.log(`📥 Response ${i+1} status: ${response.status}, size: ${response.data.size} bytes`);
|
||||
|
||||
// Create result file
|
||||
const resultFile = ResourceManager.createResultFile(
|
||||
response.data,
|
||||
file.name,
|
||||
AUTOMATION_CONSTANTS.FILE_PREFIX
|
||||
);
|
||||
resultFiles.push(resultFile);
|
||||
console.log(`✅ Created result file: ${resultFile.name}`);
|
||||
}
|
||||
|
||||
console.log(`🎉 Single-file processing complete: ${resultFiles.length} files`);
|
||||
return resultFiles;
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
console.error(`Tool operation ${operationName} failed:`, error);
|
||||
throw new Error(`${operationName} operation failed: ${error.response?.data || error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute an entire automation sequence
|
||||
*/
|
||||
export const executeAutomationSequence = async (
|
||||
automation: any,
|
||||
initialFiles: File[],
|
||||
toolRegistry: ToolRegistry,
|
||||
onStepStart?: (stepIndex: number, operationName: string) => void,
|
||||
onStepComplete?: (stepIndex: number, resultFiles: File[]) => void,
|
||||
onStepError?: (stepIndex: number, error: string) => void
|
||||
): Promise<File[]> => {
|
||||
console.log(`🚀 Starting automation sequence: ${automation.name || 'Unnamed'}`);
|
||||
console.log(`📁 Initial files: ${initialFiles.length}`);
|
||||
console.log(`🔧 Operations: ${automation.operations?.length || 0}`);
|
||||
|
||||
if (!automation?.operations || automation.operations.length === 0) {
|
||||
throw new Error('No operations in automation');
|
||||
}
|
||||
|
||||
let currentFiles = [...initialFiles];
|
||||
|
||||
for (let i = 0; i < automation.operations.length; i++) {
|
||||
const operation = automation.operations[i];
|
||||
|
||||
console.log(`📋 Step ${i + 1}/${automation.operations.length}: ${operation.operation}`);
|
||||
console.log(`📄 Input files: ${currentFiles.length}`);
|
||||
console.log(`⚙️ Parameters:`, operation.parameters || {});
|
||||
|
||||
try {
|
||||
onStepStart?.(i, operation.operation);
|
||||
|
||||
const resultFiles = await executeToolOperation(
|
||||
operation.operation,
|
||||
operation.parameters || {},
|
||||
currentFiles,
|
||||
toolRegistry
|
||||
);
|
||||
|
||||
console.log(`✅ Step ${i + 1} completed: ${resultFiles.length} result files`);
|
||||
currentFiles = resultFiles;
|
||||
onStepComplete?.(i, resultFiles);
|
||||
|
||||
} catch (error: any) {
|
||||
console.error(`❌ Step ${i + 1} failed:`, error);
|
||||
onStepError?.(i, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`🎉 Automation sequence completed: ${currentFiles.length} final files`);
|
||||
return currentFiles;
|
||||
};
|
||||
186
frontend/src/utils/automationFileProcessor.ts
Normal file
186
frontend/src/utils/automationFileProcessor.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* File processing utilities specifically for automation workflows
|
||||
*/
|
||||
|
||||
import axios, { AxiosResponse } from 'axios';
|
||||
import { zipFileService } from '../services/zipFileService';
|
||||
import { ResourceManager } from './resourceManager';
|
||||
import { AUTOMATION_CONSTANTS } from '../constants/automation';
|
||||
|
||||
export interface AutomationProcessingOptions {
|
||||
timeout?: number;
|
||||
responseType?: 'blob' | 'json';
|
||||
}
|
||||
|
||||
export interface AutomationProcessingResult {
|
||||
success: boolean;
|
||||
files: File[];
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
export class AutomationFileProcessor {
|
||||
/**
|
||||
* Check if a blob is a ZIP file by examining its header
|
||||
*/
|
||||
static isZipFile(blob: Blob): boolean {
|
||||
// This is a simple check - in a real implementation you might want to read the first few bytes
|
||||
// For now, we'll rely on the extraction attempt and fallback
|
||||
return blob.type === 'application/zip' || blob.type === 'application/x-zip-compressed';
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract files from a ZIP blob during automation execution, with fallback for non-ZIP files
|
||||
*/
|
||||
static async extractAutomationZipFiles(blob: Blob): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const zipFile = ResourceManager.createTimestampedFile(
|
||||
blob,
|
||||
AUTOMATION_CONSTANTS.RESPONSE_ZIP_PREFIX,
|
||||
'.zip',
|
||||
'application/zip'
|
||||
);
|
||||
|
||||
const result = await zipFileService.extractPdfFiles(zipFile);
|
||||
|
||||
if (!result.success || result.extractedFiles.length === 0) {
|
||||
// Fallback: treat as single PDF file
|
||||
const fallbackFile = ResourceManager.createTimestampedFile(
|
||||
blob,
|
||||
AUTOMATION_CONSTANTS.RESULT_FILE_PREFIX,
|
||||
'.pdf'
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: [fallbackFile],
|
||||
errors: [`ZIP extraction failed, treated as single file: ${result.errors?.join(', ') || 'Unknown error'}`]
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: result.extractedFiles,
|
||||
errors: []
|
||||
};
|
||||
} catch (error) {
|
||||
console.warn('Failed to extract automation ZIP files, falling back to single file:', error);
|
||||
// Fallback: treat as single PDF file
|
||||
const fallbackFile = ResourceManager.createTimestampedFile(
|
||||
blob,
|
||||
AUTOMATION_CONSTANTS.RESULT_FILE_PREFIX,
|
||||
'.pdf'
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: [fallbackFile],
|
||||
errors: [`ZIP extraction failed, treated as single file: ${error}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single file through an automation step
|
||||
*/
|
||||
static async processAutomationSingleFile(
|
||||
endpoint: string,
|
||||
formData: FormData,
|
||||
originalFileName: string,
|
||||
options: AutomationProcessingOptions = {}
|
||||
): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: options.responseType || 'blob',
|
||||
timeout: options.timeout || AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed - HTTP ${response.status}: ${response.statusText}`]
|
||||
};
|
||||
}
|
||||
|
||||
const resultFile = ResourceManager.createResultFile(
|
||||
response.data,
|
||||
originalFileName,
|
||||
AUTOMATION_CONSTANTS.FILE_PREFIX
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: [resultFile],
|
||||
errors: []
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed: ${error.response?.data || error.message}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple files through an automation step
|
||||
*/
|
||||
static async processAutomationMultipleFiles(
|
||||
endpoint: string,
|
||||
formData: FormData,
|
||||
options: AutomationProcessingOptions = {}
|
||||
): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: options.responseType || 'blob',
|
||||
timeout: options.timeout || AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed - HTTP ${response.status}: ${response.statusText}`]
|
||||
};
|
||||
}
|
||||
|
||||
// Multi-file responses are typically ZIP files
|
||||
return await this.extractAutomationZipFiles(response.data);
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed: ${error.response?.data || error.message}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build form data for automation tool operations
|
||||
*/
|
||||
static buildAutomationFormData(
|
||||
parameters: Record<string, any>,
|
||||
files: File | File[],
|
||||
fileFieldName: string = 'fileInput'
|
||||
): FormData {
|
||||
const formData = new FormData();
|
||||
|
||||
// Add files
|
||||
if (Array.isArray(files)) {
|
||||
files.forEach(file => formData.append(fileFieldName, file));
|
||||
} else {
|
||||
formData.append(fileFieldName, files);
|
||||
}
|
||||
|
||||
// Add parameters
|
||||
Object.entries(parameters).forEach(([key, value]) => {
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(item => formData.append(key, item));
|
||||
} else if (value !== undefined && value !== null) {
|
||||
formData.append(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
return formData;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
import {
|
||||
CONVERSION_ENDPOINTS,
|
||||
ENDPOINT_NAMES,
|
||||
EXTENSION_TO_ENDPOINT
|
||||
EXTENSION_TO_ENDPOINT,
|
||||
CONVERSION_MATRIX,
|
||||
TO_FORMAT_OPTIONS
|
||||
} from '../constants/convertConstants';
|
||||
|
||||
/**
|
||||
@@ -56,4 +58,33 @@ export const isImageFormat = (extension: string): boolean => {
|
||||
*/
|
||||
export const isWebFormat = (extension: string): boolean => {
|
||||
return ['html', 'zip'].includes(extension.toLowerCase());
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets available target extensions for a given source extension
|
||||
* Extracted from useConvertParameters to be reusable in automation settings
|
||||
*/
|
||||
export const getAvailableToExtensions = (fromExtension: string): Array<{value: string, label: string, group: string}> => {
|
||||
if (!fromExtension) return [];
|
||||
|
||||
// Handle dynamic format identifiers (file-<extension>)
|
||||
if (fromExtension.startsWith('file-')) {
|
||||
// Dynamic format - use 'any' conversion options (file-to-pdf)
|
||||
const supportedExtensions = CONVERSION_MATRIX['any'] || [];
|
||||
return TO_FORMAT_OPTIONS.filter(option =>
|
||||
supportedExtensions.includes(option.value)
|
||||
);
|
||||
}
|
||||
|
||||
let supportedExtensions = CONVERSION_MATRIX[fromExtension] || [];
|
||||
|
||||
// If no explicit conversion exists, but file-to-pdf might be available,
|
||||
// fall back to 'any' conversion (which converts unknown files to PDF via file-to-pdf)
|
||||
if (supportedExtensions.length === 0 && fromExtension !== 'any') {
|
||||
supportedExtensions = CONVERSION_MATRIX['any'] || [];
|
||||
}
|
||||
|
||||
return TO_FORMAT_OPTIONS.filter(option =>
|
||||
supportedExtensions.includes(option.value)
|
||||
);
|
||||
};
|
||||
71
frontend/src/utils/resourceManager.ts
Normal file
71
frontend/src/utils/resourceManager.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* Utilities for managing file resources and blob URLs
|
||||
*/
|
||||
|
||||
import { useCallback } from 'react';
|
||||
import { AUTOMATION_CONSTANTS } from '../constants/automation';
|
||||
|
||||
export class ResourceManager {
|
||||
private static blobUrls = new Set<string>();
|
||||
|
||||
/**
|
||||
* Create a blob URL and track it for cleanup
|
||||
*/
|
||||
static createBlobUrl(blob: Blob): string {
|
||||
const url = URL.createObjectURL(blob);
|
||||
this.blobUrls.add(url);
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a specific blob URL
|
||||
*/
|
||||
static revokeBlobUrl(url: string): void {
|
||||
if (this.blobUrls.has(url)) {
|
||||
URL.revokeObjectURL(url);
|
||||
this.blobUrls.delete(url);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke all tracked blob URLs
|
||||
*/
|
||||
static revokeAllBlobUrls(): void {
|
||||
this.blobUrls.forEach(url => URL.revokeObjectURL(url));
|
||||
this.blobUrls.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a File with proper naming convention
|
||||
*/
|
||||
static createResultFile(
|
||||
data: BlobPart,
|
||||
originalName: string,
|
||||
prefix: string = AUTOMATION_CONSTANTS.PROCESSED_FILE_PREFIX,
|
||||
type: string = 'application/pdf'
|
||||
): File {
|
||||
return new File([data], `${prefix}${originalName}`, { type });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a timestamped file for responses
|
||||
*/
|
||||
static createTimestampedFile(
|
||||
data: BlobPart,
|
||||
prefix: string,
|
||||
extension: string = '.pdf',
|
||||
type: string = 'application/pdf'
|
||||
): File {
|
||||
const timestamp = Date.now();
|
||||
return new File([data], `${prefix}${timestamp}${extension}`, { type });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for automatic cleanup on component unmount
|
||||
*/
|
||||
export function useResourceCleanup(): () => void {
|
||||
return useCallback(() => {
|
||||
ResourceManager.revokeAllBlobUrls();
|
||||
}, []);
|
||||
}
|
||||
@@ -3,12 +3,7 @@
|
||||
* Provides clean URL routing for the V2 tool system
|
||||
*/
|
||||
|
||||
import { ModeType } from '../contexts/NavigationContext';
|
||||
|
||||
export interface ToolRoute {
|
||||
mode: ModeType;
|
||||
toolKey?: string;
|
||||
}
|
||||
import { ModeType, isValidMode as isValidModeType, getDefaultMode, ToolRoute } from '../types/navigation';
|
||||
|
||||
/**
|
||||
* Parse the current URL to extract tool routing information
|
||||
@@ -45,7 +40,7 @@ export function parseToolRoute(): ToolRoute {
|
||||
|
||||
// Check for query parameter fallback (e.g., ?tool=split)
|
||||
const toolParam = searchParams.get('tool');
|
||||
if (toolParam && isValidMode(toolParam)) {
|
||||
if (toolParam && isValidModeType(toolParam)) {
|
||||
return {
|
||||
mode: toolParam as ModeType,
|
||||
toolKey: toolParam
|
||||
@@ -54,7 +49,8 @@ export function parseToolRoute(): ToolRoute {
|
||||
|
||||
// Default to page editor for home page
|
||||
return {
|
||||
mode: 'pageEditor'
|
||||
mode: getDefaultMode(),
|
||||
toolKey: null
|
||||
};
|
||||
}
|
||||
|
||||
@@ -137,16 +133,7 @@ export function getToolDisplayName(toolKey: string): string {
|
||||
return displayNames[toolKey] || toolKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a mode is valid
|
||||
*/
|
||||
function isValidMode(mode: string): mode is ModeType {
|
||||
const validModes: ModeType[] = [
|
||||
'viewer', 'pageEditor', 'fileEditor', 'merge', 'split',
|
||||
'compress', 'ocr', 'convert', 'addPassword', 'changePermissions', 'sanitize'
|
||||
];
|
||||
return validModes.includes(mode as ModeType);
|
||||
}
|
||||
// Note: isValidMode is now imported from types/navigation.ts
|
||||
|
||||
/**
|
||||
* Generate shareable URL for current tool state
|
||||
|
||||
Reference in New Issue
Block a user