mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2026-03-04 02:20:19 +01:00
Restructure frontend code to allow for extensions (#4721)
# Description of Changes Move frontend code into `core` folder and add infrastructure for `proprietary` folder to include premium, non-OSS features
This commit is contained in:
229
frontend/src/core/utils/automationExecutor.ts
Normal file
229
frontend/src/core/utils/automationExecutor.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import axios from 'axios';
|
||||
import { ToolRegistry } from '@app/data/toolsTaxonomy';
|
||||
import { ToolId } from '@app/types/toolId';
|
||||
import { AUTOMATION_CONSTANTS } from '@app/constants/automation';
|
||||
import { AutomationFileProcessor } from '@app/utils/automationFileProcessor';
|
||||
import { ToolType } from '@app/hooks/tools/shared/useToolOperation';
|
||||
import { processResponse } from '@app/utils/toolResponseProcessor';
|
||||
|
||||
/**
|
||||
* Process multi-file tool response (handles ZIP or single PDF responses)
|
||||
*/
|
||||
const processMultiFileResponse = async (
|
||||
responseData: Blob,
|
||||
responseHeaders: any,
|
||||
files: File[],
|
||||
filePrefix: string,
|
||||
preserveBackendFilename?: boolean
|
||||
): Promise<File[]> => {
|
||||
// Multi-file responses are typically ZIP files, but may be single files (e.g. split with merge=true)
|
||||
if (responseData.type === 'application/pdf' ||
|
||||
(responseHeaders && responseHeaders['content-type'] === 'application/pdf')) {
|
||||
// Single PDF response - use processResponse to respect preserveBackendFilename
|
||||
const processedFiles = await processResponse(
|
||||
responseData,
|
||||
files,
|
||||
filePrefix,
|
||||
undefined,
|
||||
preserveBackendFilename ? responseHeaders : undefined
|
||||
);
|
||||
return processedFiles;
|
||||
} else {
|
||||
// ZIP response
|
||||
const result = await AutomationFileProcessor.extractAutomationZipFiles(responseData);
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
console.warn(`⚠️ File processing warnings:`, result.errors);
|
||||
}
|
||||
|
||||
// Apply prefix to files, replacing any existing prefix
|
||||
const processedFiles = filePrefix && !preserveBackendFilename
|
||||
? result.files.map(file => {
|
||||
const nameWithoutPrefix = file.name.replace(/^[^_]*_/, '');
|
||||
return new File([file], `${filePrefix}${nameWithoutPrefix}`, { type: file.type });
|
||||
})
|
||||
: result.files;
|
||||
|
||||
return processedFiles;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Core execution function for API requests
|
||||
*/
|
||||
const executeApiRequest = async (
|
||||
endpoint: string,
|
||||
formData: FormData,
|
||||
files: File[],
|
||||
filePrefix: string,
|
||||
preserveBackendFilename?: boolean
|
||||
): Promise<File[]> => {
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: 'blob',
|
||||
timeout: AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
return await processMultiFileResponse(
|
||||
response.data,
|
||||
response.headers,
|
||||
files,
|
||||
filePrefix,
|
||||
preserveBackendFilename
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute single-file tool operation (processes files one at a time)
|
||||
*/
|
||||
const executeSingleFileOperation = async (
|
||||
config: any,
|
||||
parameters: any,
|
||||
files: File[],
|
||||
filePrefix: string
|
||||
): Promise<File[]> => {
|
||||
const resultFiles: File[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
const endpoint = typeof config.endpoint === 'function'
|
||||
? config.endpoint(parameters)
|
||||
: config.endpoint;
|
||||
|
||||
const formData = (config.buildFormData as (params: any, file: File) => FormData)(parameters, file);
|
||||
|
||||
const processedFiles = await executeApiRequest(
|
||||
endpoint,
|
||||
formData,
|
||||
[file],
|
||||
filePrefix,
|
||||
config.preserveBackendFilename
|
||||
);
|
||||
resultFiles.push(...processedFiles);
|
||||
}
|
||||
|
||||
return resultFiles;
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute multi-file tool operation (processes all files in one request)
|
||||
*/
|
||||
const executeMultiFileOperation = async (
|
||||
config: any,
|
||||
parameters: any,
|
||||
files: File[],
|
||||
filePrefix: string
|
||||
): Promise<File[]> => {
|
||||
const endpoint = typeof config.endpoint === 'function'
|
||||
? config.endpoint(parameters)
|
||||
: config.endpoint;
|
||||
|
||||
const formData = (config.buildFormData as (params: any, files: File[]) => FormData)(parameters, files);
|
||||
|
||||
return await executeApiRequest(
|
||||
endpoint,
|
||||
formData,
|
||||
files,
|
||||
filePrefix,
|
||||
config.preserveBackendFilename
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Execute a tool operation directly without using React hooks
|
||||
*/
|
||||
export const executeToolOperation = async (
|
||||
operationName: string,
|
||||
parameters: any,
|
||||
files: File[],
|
||||
toolRegistry: ToolRegistry
|
||||
): Promise<File[]> => {
|
||||
return executeToolOperationWithPrefix(operationName, parameters, files, toolRegistry, AUTOMATION_CONSTANTS.FILE_PREFIX);
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute a tool operation with custom prefix
|
||||
*/
|
||||
export const executeToolOperationWithPrefix = async (
|
||||
operationName: string,
|
||||
parameters: any,
|
||||
files: File[],
|
||||
toolRegistry: ToolRegistry,
|
||||
filePrefix: string = AUTOMATION_CONSTANTS.FILE_PREFIX
|
||||
): Promise<File[]> => {
|
||||
const config = toolRegistry[operationName as ToolId]?.operationConfig;
|
||||
if (!config) {
|
||||
throw new Error(`Tool operation not supported: ${operationName}`);
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if tool uses custom processor (like Convert tool)
|
||||
if (config.customProcessor) {
|
||||
const resultFiles = await config.customProcessor(parameters, files);
|
||||
return resultFiles;
|
||||
}
|
||||
|
||||
// Execute based on tool type
|
||||
if (config.toolType === ToolType.multiFile) {
|
||||
return await executeMultiFileOperation(config, parameters, files, filePrefix);
|
||||
} else {
|
||||
return await executeSingleFileOperation(config, parameters, files, filePrefix);
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
console.error(`❌ ${operationName} failed:`, error);
|
||||
throw new Error(`${operationName} operation failed: ${error.response?.data || error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute an entire automation sequence
|
||||
*/
|
||||
export const executeAutomationSequence = async (
|
||||
automation: any,
|
||||
initialFiles: File[],
|
||||
toolRegistry: ToolRegistry,
|
||||
onStepStart?: (stepIndex: number, operationName: string) => void,
|
||||
onStepComplete?: (stepIndex: number, resultFiles: File[]) => void,
|
||||
onStepError?: (stepIndex: number, error: string) => void
|
||||
): Promise<File[]> => {
|
||||
console.log(`🚀 Starting automation: ${automation.name || 'Unnamed'}`);
|
||||
console.log(`📁 Input: ${initialFiles.length} file(s)`);
|
||||
|
||||
if (!automation?.operations || automation.operations.length === 0) {
|
||||
throw new Error('No operations in automation');
|
||||
}
|
||||
|
||||
let currentFiles = [...initialFiles];
|
||||
const automationPrefix = automation.name ? `${automation.name}_` : 'automated_';
|
||||
|
||||
for (let i = 0; i < automation.operations.length; i++) {
|
||||
const operation = automation.operations[i];
|
||||
|
||||
console.log(`\n📋 Step ${i + 1}/${automation.operations.length}: ${operation.operation}`);
|
||||
console.log(` Input: ${currentFiles.length} file(s)`);
|
||||
|
||||
try {
|
||||
onStepStart?.(i, operation.operation);
|
||||
|
||||
const resultFiles = await executeToolOperationWithPrefix(
|
||||
operation.operation,
|
||||
operation.parameters || {},
|
||||
currentFiles,
|
||||
toolRegistry,
|
||||
i === automation.operations.length - 1 ? automationPrefix : '' // Only add prefix to final step
|
||||
);
|
||||
|
||||
console.log(`✅ Step ${i + 1} completed: ${resultFiles.length} result files`);
|
||||
currentFiles = resultFiles;
|
||||
onStepComplete?.(i, resultFiles);
|
||||
|
||||
} catch (error: any) {
|
||||
console.error(`❌ Step ${i + 1} failed:`, error);
|
||||
onStepError?.(i, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n🎉 Automation complete: ${currentFiles.length} file(s)`);
|
||||
return currentFiles;
|
||||
};
|
||||
194
frontend/src/core/utils/automationFileProcessor.ts
Normal file
194
frontend/src/core/utils/automationFileProcessor.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* File processing utilities specifically for automation workflows
|
||||
*/
|
||||
|
||||
import axios from 'axios';
|
||||
import { zipFileService } from '@app/services/zipFileService';
|
||||
import { ResourceManager } from '@app/utils/resourceManager';
|
||||
import { AUTOMATION_CONSTANTS } from '@app/constants/automation';
|
||||
|
||||
export interface AutomationProcessingOptions {
|
||||
timeout?: number;
|
||||
responseType?: 'blob' | 'json';
|
||||
}
|
||||
|
||||
export interface AutomationProcessingResult {
|
||||
success: boolean;
|
||||
files: File[];
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
export class AutomationFileProcessor {
|
||||
/**
|
||||
* Check if a blob is a ZIP file by examining its header
|
||||
*/
|
||||
static isZipFile(blob: Blob): boolean {
|
||||
// This is a simple check - in a real implementation you might want to read the first few bytes
|
||||
// For now, we'll rely on the extraction attempt and fallback
|
||||
return blob.type === 'application/zip' || blob.type === 'application/x-zip-compressed';
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract files from a ZIP blob during automation execution, with fallback for non-ZIP files
|
||||
* Extracts all file types (PDFs, images, etc.) except HTML files which stay zipped
|
||||
*/
|
||||
static async extractAutomationZipFiles(blob: Blob): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const zipFile = ResourceManager.createTimestampedFile(
|
||||
blob,
|
||||
AUTOMATION_CONSTANTS.RESPONSE_ZIP_PREFIX,
|
||||
'.zip',
|
||||
'application/zip'
|
||||
);
|
||||
|
||||
// Check if ZIP contains HTML files - if so, keep as ZIP
|
||||
const containsHtml = await zipFileService.containsHtmlFiles(zipFile);
|
||||
if (containsHtml) {
|
||||
// HTML files should stay zipped - return ZIP as-is
|
||||
return {
|
||||
success: true,
|
||||
files: [zipFile],
|
||||
errors: []
|
||||
};
|
||||
}
|
||||
|
||||
// Extract all files (not just PDFs) - handles images from scanner-image-split, etc.
|
||||
const result = await zipFileService.extractAllFiles(zipFile);
|
||||
|
||||
if (!result.success || result.extractedFiles.length === 0) {
|
||||
// Fallback: keep as ZIP file (might be valid ZIP with extraction issues)
|
||||
return {
|
||||
success: true,
|
||||
files: [zipFile],
|
||||
errors: [`ZIP extraction failed, kept as ZIP: ${result.errors?.join(', ') || 'Unknown error'}`]
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: result.extractedFiles,
|
||||
errors: []
|
||||
};
|
||||
} catch (error) {
|
||||
console.warn('Failed to extract automation ZIP files, keeping as ZIP:', error);
|
||||
// Fallback: keep as ZIP file for next automation step to handle
|
||||
const fallbackFile = ResourceManager.createTimestampedFile(
|
||||
blob,
|
||||
AUTOMATION_CONSTANTS.RESPONSE_ZIP_PREFIX,
|
||||
'.zip',
|
||||
'application/zip'
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: [fallbackFile],
|
||||
errors: [`ZIP extraction failed, kept as ZIP: ${error}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single file through an automation step
|
||||
*/
|
||||
static async processAutomationSingleFile(
|
||||
endpoint: string,
|
||||
formData: FormData,
|
||||
originalFileName: string,
|
||||
options: AutomationProcessingOptions = {}
|
||||
): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: options.responseType || 'blob',
|
||||
timeout: options.timeout || AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed - HTTP ${response.status}: ${response.statusText}`]
|
||||
};
|
||||
}
|
||||
|
||||
const resultFile = ResourceManager.createResultFile(
|
||||
response.data,
|
||||
originalFileName,
|
||||
AUTOMATION_CONSTANTS.FILE_PREFIX
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
files: [resultFile],
|
||||
errors: []
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed: ${error.response?.data || error.message}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple files through an automation step
|
||||
*/
|
||||
static async processAutomationMultipleFiles(
|
||||
endpoint: string,
|
||||
formData: FormData,
|
||||
options: AutomationProcessingOptions = {}
|
||||
): Promise<AutomationProcessingResult> {
|
||||
try {
|
||||
const response = await axios.post(endpoint, formData, {
|
||||
responseType: options.responseType || 'blob',
|
||||
timeout: options.timeout || AUTOMATION_CONSTANTS.OPERATION_TIMEOUT
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed - HTTP ${response.status}: ${response.statusText}`]
|
||||
};
|
||||
}
|
||||
|
||||
// Multi-file responses are typically ZIP files
|
||||
return await this.extractAutomationZipFiles(response.data);
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
files: [],
|
||||
errors: [`Automation step failed: ${error.response?.data || error.message}`]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build form data for automation tool operations
|
||||
*/
|
||||
static buildAutomationFormData(
|
||||
parameters: Record<string, any>,
|
||||
files: File | File[],
|
||||
fileFieldName: string = 'fileInput'
|
||||
): FormData {
|
||||
const formData = new FormData();
|
||||
|
||||
// Add files
|
||||
if (Array.isArray(files)) {
|
||||
files.forEach(file => formData.append(fileFieldName, file));
|
||||
} else {
|
||||
formData.append(fileFieldName, files);
|
||||
}
|
||||
|
||||
// Add parameters
|
||||
Object.entries(parameters).forEach(([key, value]) => {
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(item => formData.append(key, item));
|
||||
} else if (value !== undefined && value !== null) {
|
||||
formData.append(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
return formData;
|
||||
}
|
||||
}
|
||||
77
frontend/src/core/utils/bulkselection/README.md
Normal file
77
frontend/src/core/utils/bulkselection/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
## Bulk Selection Expressions
|
||||
|
||||
### What this does
|
||||
|
||||
- Lets you select pages using compact expressions instead of typing long CSV lists.
|
||||
- Your input expression is preserved exactly as typed; we only expand it under the hood into concrete page numbers based on the current document's page count.
|
||||
- The final selection is always deduplicated, clamped to valid page numbers, and sorted ascending.
|
||||
|
||||
### Basic forms
|
||||
|
||||
- Numbers: `5` selects page 5.
|
||||
- Ranges: `3-7` selects pages 3,4,5,6,7 (inclusive). If the start is greater than the end, it is swapped automatically (e.g., `7-3` → `3-7`).
|
||||
- Lists (OR): `1,3-5,10` selects pages 1,3,4,5,10.
|
||||
|
||||
You can still use the original CSV format. For example, `1,2,3,4,5` (first five pages) continues to work.
|
||||
|
||||
### Logical operators
|
||||
|
||||
- OR (union): `,` or `|` or the word `or`
|
||||
- AND (intersection): `&` or the word `and`
|
||||
- NOT (complement within 1..max): `!term` or `!(group)` or the word `not term` / `not (group)`
|
||||
|
||||
Operator precedence (from highest to lowest):
|
||||
1) `!` (NOT)
|
||||
2) `&` / `and` (AND)
|
||||
3) `,` / `|` / `or` (OR)
|
||||
|
||||
Use parentheses `(...)` to override precedence where needed.
|
||||
|
||||
### Keywords and progressions
|
||||
|
||||
- Keywords (case-insensitive):
|
||||
- `even`: all even pages (2, 4, 6, ...)
|
||||
- `odd`: all odd pages (1, 3, 5, ...)
|
||||
|
||||
- Arithmetic progressions: `k n ± c`, e.g. `2n`, `3n+1`, `4n-1`
|
||||
- `n` starts at 0 (CSS-style: `:nth-child`), then increases by 1 (n = 0,1,2,...). Non-positive results are discarded.
|
||||
- `k` must be a positive integer (≥ 1). `c` can be any integer (including negative).
|
||||
- Examples:
|
||||
- `2n` → 0,2,4,6,... → becomes 2,4,6,... after discarding non-positive
|
||||
- `2n-1` → -1,1,3,5,... → becomes 1,3,5,... (odd)
|
||||
- `3n+1` → 1,4,7,10,13,...
|
||||
|
||||
All selections are automatically limited to the current document's valid page numbers `[1..maxPages]`.
|
||||
|
||||
### Parentheses
|
||||
|
||||
- Group with parentheses to control evaluation order and combine NOT with groups.
|
||||
- Examples:
|
||||
- `1-10 & (even, 15)` → even pages 2,4,6,8,10 (15 is outside 1-10)
|
||||
- `!(1-5, odd)` → remove pages 1..5 and all odd pages; for a 10-page doc this yields 6,8,10
|
||||
- `!(10-20 & !2n)` → complement of odd pages from 11..19 inside 10..20
|
||||
- `(2n | 3n+1) & 1-20` → union of even numbers and 3n+1 numbers, intersected with 1..20
|
||||
|
||||
### Whitespace and case
|
||||
|
||||
- Whitespace is ignored: ` odd & 1 - 7` is valid.
|
||||
- Keywords are case-insensitive: `ODD`, `Odd`, `odd` all work.
|
||||
|
||||
### Universe, clamping, deduplication
|
||||
|
||||
- The selection universe is the document's pages `[1..maxPages]`.
|
||||
- Numbers outside the universe are discarded.
|
||||
- Ranges are clamped to `[1..maxPages]` (e.g., `0-5` → `1-5`, `9-999` in a 10-page doc → `9-10`).
|
||||
- Duplicates are removed; the final result is sorted ascending.
|
||||
|
||||
### Examples
|
||||
|
||||
- `1-10 & 2n & !5-7` → 2,4,8,10
|
||||
- `odd` → 1,3,5,7,9,...
|
||||
- `even` → 2,4,6,8,10,...
|
||||
- `2n-1` → 1,3,5,7,9,...
|
||||
- `3n+1` → 4,7,10,13,16,... (up to max pages)
|
||||
- `1-3, 8-9` → 1,2,3,8,9
|
||||
- `1-2 | 9-10 or 5` → 1,2,5,9,10
|
||||
- `!(1-5)` → remove the first five pages from the universe
|
||||
- `!(10-20 & !2n)` → complement of odd pages between 10 and 20
|
||||
253
frontend/src/core/utils/bulkselection/parseSelection.test.ts
Normal file
253
frontend/src/core/utils/bulkselection/parseSelection.test.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseSelection } from '@app/utils/bulkselection/parseSelection';
|
||||
|
||||
describe('parseSelection', () => {
|
||||
const max = 120;
|
||||
|
||||
it('1) parses single numbers', () => {
|
||||
expect(parseSelection('5', max)).toEqual([5]);
|
||||
});
|
||||
|
||||
it('2) parses simple range', () => {
|
||||
expect(parseSelection('3-7', max)).toEqual([3,4,5,6,7]);
|
||||
});
|
||||
|
||||
it('3) parses multiple numbers and ranges via comma OR', () => {
|
||||
expect(parseSelection('1,3-5,10', max)).toEqual([1,3,4,5,10]);
|
||||
});
|
||||
|
||||
it('4) respects bounds (clamps to 1..max and filters invalid)', () => {
|
||||
expect(parseSelection('0, -2, 1-2, 9999', max)).toEqual([1,2]);
|
||||
});
|
||||
|
||||
it('5) supports even keyword', () => {
|
||||
expect(parseSelection('even', 10)).toEqual([2,4,6,8,10]);
|
||||
});
|
||||
|
||||
it('6) supports odd keyword', () => {
|
||||
expect(parseSelection('odd', 10)).toEqual([1,3,5,7,9]);
|
||||
});
|
||||
|
||||
it('7) supports 2n progression', () => {
|
||||
expect(parseSelection('2n', 12)).toEqual([2,4,6,8,10,12]);
|
||||
});
|
||||
|
||||
it('8) supports kn±c progression (3n+1)', () => {
|
||||
expect(parseSelection('3n+1', 10)).toEqual([1,4,7,10]);
|
||||
});
|
||||
|
||||
it('9) supports kn±c progression (4n-1)', () => {
|
||||
expect(parseSelection('4n-1', 15)).toEqual([3,7,11,15]);
|
||||
});
|
||||
|
||||
it('10) supports logical AND (&) intersection', () => {
|
||||
// even AND 1-10 => even numbers within 1..10
|
||||
expect(parseSelection('even & 1-10', 20)).toEqual([2,4,6,8,10]);
|
||||
});
|
||||
|
||||
it('11) supports logical OR with comma', () => {
|
||||
expect(parseSelection('1-3, 8-9', 20)).toEqual([1,2,3,8,9]);
|
||||
});
|
||||
|
||||
it('12) supports logical OR with | and word or', () => {
|
||||
expect(parseSelection('1-2 | 9-10 or 5', 20)).toEqual([1,2,5,9,10]);
|
||||
});
|
||||
|
||||
it('13) supports NOT operator !', () => {
|
||||
// !1-5 within max=10 -> 6..10
|
||||
expect(parseSelection('!1-5', 10)).toEqual([6,7,8,9,10]);
|
||||
});
|
||||
|
||||
it('14) supports combination: 1-10 & 2n & !5-7', () => {
|
||||
expect(parseSelection('1-10 & 2n & !5-7', 20)).toEqual([2,4,8,10]);
|
||||
});
|
||||
|
||||
it('15) preserves precedence: AND over OR', () => {
|
||||
// 1-10 & even, 15 OR => ( (1-10 & even) , 15 )
|
||||
expect(parseSelection('1-10 & even, 15', 20)).toEqual([2,4,6,8,10,15]);
|
||||
});
|
||||
|
||||
it('16) handles whitespace and case-insensitive keywords', () => {
|
||||
expect(parseSelection(' OdD & 1-7 ', 10)).toEqual([1,3,5,7]);
|
||||
});
|
||||
|
||||
it('17) progression plus range: 2n | 9-11 within 12', () => {
|
||||
expect(parseSelection('2n | 9-11', 12)).toEqual([2,4,6,8,9,10,11,12]);
|
||||
});
|
||||
|
||||
it('18) complex: (2n-1 & 1-20) & ! (5-7)', () => {
|
||||
expect(parseSelection('2n-1 & 1-20 & !5-7', 20)).toEqual([1,3,9,11,13,15,17,19]);
|
||||
});
|
||||
|
||||
it('19) falls back to CSV when expression malformed', () => {
|
||||
// malformed: "2x" -> fallback should treat as CSV tokens -> only 2 ignored -> result empty
|
||||
expect(parseSelection('2x', 10)).toEqual([]);
|
||||
// malformed middle; still fallback handles CSV bits
|
||||
expect(parseSelection('1, 3-5, foo, 9', 10)).toEqual([1,3,4,5,9]);
|
||||
});
|
||||
|
||||
it('20) clamps ranges that exceed bounds', () => {
|
||||
expect(parseSelection('0-5, 9-10', 10)).toEqual([1,2,3,4,5,9,10]);
|
||||
});
|
||||
|
||||
it('21) supports parentheses to override precedence', () => {
|
||||
// Without parentheses: 1-10 & even, 15 => [2,4,6,8,10,15]
|
||||
// With parentheses around OR: 1-10 & (even, 15) => [2,4,6,8,10]
|
||||
expect(parseSelection('1-10 & (even, 15)', 20)).toEqual([2,4,6,8,10]);
|
||||
});
|
||||
|
||||
it('22) NOT over a grouped intersection', () => {
|
||||
// !(10-20 & !2n) within 1..25
|
||||
// Inner: 10-20 & !2n => odd numbers from 11..19 plus 10,12,14,16,18,20 excluded
|
||||
// Complement in 1..25 removes those, keeping others
|
||||
const result = parseSelection('!(10-20 & !2n)', 25);
|
||||
expect(result).toEqual([1,2,3,4,5,6,7,8,9,10,12,14,16,18,20,21,22,23,24,25]);
|
||||
});
|
||||
|
||||
it('23) nested parentheses with progressions', () => {
|
||||
expect(parseSelection('(2n | 3n+1) & 1-20', 50)).toEqual([
|
||||
1,2,4,6,7,8,10,12,13,14,16,18,19,20
|
||||
]);
|
||||
});
|
||||
|
||||
it('24) parentheses with NOT directly on group', () => {
|
||||
expect(parseSelection('!(1-5, odd)', 10)).toEqual([6,8,10]);
|
||||
});
|
||||
|
||||
it('25) whitespace within parentheses is ignored', () => {
|
||||
expect(parseSelection('( 1 - 3 , 6 )', 10)).toEqual([1,2,3,6]);
|
||||
});
|
||||
|
||||
it('26) malformed missing closing parenthesis falls back to CSV', () => {
|
||||
// Expression parse should fail; fallback CSV should pick numbers only
|
||||
expect(parseSelection('(1-3, 6', 10)).toEqual([6]);
|
||||
});
|
||||
|
||||
it('27) nested NOT and AND with parentheses', () => {
|
||||
// !(odd & 5-9) within 1..12 => remove odd numbers 5,7,9
|
||||
expect(parseSelection('!(odd & 5-9)', 12)).toEqual([1,2,3,4,6,8,10,11,12]);
|
||||
});
|
||||
|
||||
it('28) deep nesting and mixing operators', () => {
|
||||
const expr = '(1-4 & 2n) , ( (5-10 & odd) & !(7) ), (3n+1 & 1-20)';
|
||||
expect(parseSelection(expr, 20)).toEqual([1,2,4,5,7,9,10,13,16,19]);
|
||||
});
|
||||
|
||||
it('31) word NOT works like ! for terms', () => {
|
||||
expect(parseSelection('not 1-3', 6)).toEqual([4,5,6]);
|
||||
});
|
||||
|
||||
it('32) word NOT works like ! for groups', () => {
|
||||
expect(parseSelection('not (odd & 1-6)', 8)).toEqual([2,4,6,7,8]);
|
||||
});
|
||||
|
||||
it('29) parentheses around a single term has no effect', () => {
|
||||
expect(parseSelection('(even)', 8)).toEqual([2,4,6,8]);
|
||||
});
|
||||
|
||||
it('30) redundant nested parentheses', () => {
|
||||
expect(parseSelection('(((1-3))), ((2n))', 6)).toEqual([1,2,3,4,6]);
|
||||
});
|
||||
|
||||
// Additional edge cases and comprehensive coverage
|
||||
it('33) handles empty input gracefully', () => {
|
||||
expect(parseSelection('', 10)).toEqual([]);
|
||||
expect(parseSelection(' ', 10)).toEqual([]);
|
||||
});
|
||||
|
||||
it('34) handles zero or negative maxPages', () => {
|
||||
expect(parseSelection('1-10', 0)).toEqual([]);
|
||||
expect(parseSelection('1-10', -5)).toEqual([]);
|
||||
});
|
||||
|
||||
it('35) handles large progressions efficiently', () => {
|
||||
expect(parseSelection('100n', 1000)).toEqual([100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]);
|
||||
});
|
||||
|
||||
it('36) handles progressions with large offsets', () => {
|
||||
expect(parseSelection('5n+97', 100)).toEqual([97]);
|
||||
expect(parseSelection('3n-2', 10)).toEqual([1, 4, 7, 10]);
|
||||
});
|
||||
|
||||
it('37) mixed case keywords work correctly', () => {
|
||||
expect(parseSelection('EVEN & Odd', 6)).toEqual([]);
|
||||
expect(parseSelection('Even OR odd', 6)).toEqual([1, 2, 3, 4, 5, 6]);
|
||||
});
|
||||
|
||||
it('38) complex nested expressions with all operators', () => {
|
||||
const expr = '(1-20 & even) | (odd & !5-15) | (3n+1 & 1-10)';
|
||||
// (1-20 & even) = [2,4,6,8,10,12,14,16,18,20]
|
||||
// (odd & !5-15) = odd numbers not in 5-15 = [1,3,17,19]
|
||||
// (3n+1 & 1-10) = [1,4,7,10]
|
||||
// Union of all = [1,2,3,4,6,7,8,10,12,14,16,17,18,19,20]
|
||||
expect(parseSelection(expr, 20)).toEqual([1, 2, 3, 4, 6, 7, 8, 10, 12, 14, 16, 17, 18, 19, 20]);
|
||||
});
|
||||
|
||||
it('39) multiple NOT operators in sequence', () => {
|
||||
expect(parseSelection('not not 1-5', 10)).toEqual([1, 2, 3, 4, 5]);
|
||||
expect(parseSelection('!!!1-3', 10)).toEqual([4, 5, 6, 7, 8, 9, 10]);
|
||||
});
|
||||
|
||||
it('40) edge case: single page selection', () => {
|
||||
expect(parseSelection('1', 1)).toEqual([1]);
|
||||
expect(parseSelection('5', 3)).toEqual([]);
|
||||
});
|
||||
|
||||
it('41) backwards ranges are handled correctly', () => {
|
||||
expect(parseSelection('10-5', 15)).toEqual([5, 6, 7, 8, 9, 10]);
|
||||
});
|
||||
|
||||
it('42) progressions that start beyond maxPages', () => {
|
||||
expect(parseSelection('10n+50', 40)).toEqual([]);
|
||||
expect(parseSelection('5n+35', 40)).toEqual([35, 40]);
|
||||
});
|
||||
|
||||
it('43) complex operator precedence with mixed syntax', () => {
|
||||
// AND has higher precedence than OR
|
||||
expect(parseSelection('1-3, 5-7 & even', 10)).toEqual([1, 2, 3, 6]);
|
||||
expect(parseSelection('1-3 | 5-7 and even', 10)).toEqual([1, 2, 3, 6]);
|
||||
});
|
||||
|
||||
it('44) whitespace tolerance in complex expressions', () => {
|
||||
const expr1 = '1-5&even|odd&!3';
|
||||
const expr2 = ' 1 - 5 & even | odd & ! 3 ';
|
||||
expect(parseSelection(expr1, 10)).toEqual(parseSelection(expr2, 10));
|
||||
});
|
||||
|
||||
it('45) fallback behavior with partial valid expressions', () => {
|
||||
// Should fallback and extract valid CSV parts
|
||||
expect(parseSelection('1, 2-4, invalid, 7', 10)).toEqual([1, 2, 3, 4, 7]);
|
||||
expect(parseSelection('1-3, @#$, 8-9', 10)).toEqual([1, 2, 3, 8, 9]);
|
||||
});
|
||||
|
||||
it('46) progressions with k=1 (equivalent to n)', () => {
|
||||
expect(parseSelection('1n', 5)).toEqual([1, 2, 3, 4, 5]);
|
||||
expect(parseSelection('1n+2', 5)).toEqual([2, 3, 4, 5]);
|
||||
});
|
||||
|
||||
it('47) very large ranges are clamped correctly', () => {
|
||||
expect(parseSelection('1-999999', 10)).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
// Note: -100-5 would fallback to CSV and reject -100, but 0-5 should work
|
||||
expect(parseSelection('0-5', 10)).toEqual([1, 2, 3, 4, 5]);
|
||||
});
|
||||
|
||||
it('48) multiple comma-separated ranges', () => {
|
||||
expect(parseSelection('1-2, 4-5, 7-8, 10', 10)).toEqual([1, 2, 4, 5, 7, 8, 10]);
|
||||
});
|
||||
|
||||
it('49) combination of all features in one expression', () => {
|
||||
const expr = '(1-10 & even) | (odd & 15-25) & !(3n+1 & 1-30) | 50n';
|
||||
const result = parseSelection(expr, 100);
|
||||
// This should combine: even numbers 2,4,6,8,10 with odd 15-25 excluding 3n+1 matches, plus 50n
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
expect(result).toContain(50);
|
||||
expect(result).toContain(100);
|
||||
});
|
||||
|
||||
it('50) stress test with deeply nested parentheses', () => {
|
||||
const expr = '((((1-5)))) & ((((even)))) | ((((odd & 7-9))))';
|
||||
expect(parseSelection(expr, 10)).toEqual([2, 4, 7, 9]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
413
frontend/src/core/utils/bulkselection/parseSelection.ts
Normal file
413
frontend/src/core/utils/bulkselection/parseSelection.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
// A parser that converts selection expressions (e.g., "1-10 & 2n & !50-100", "odd", "2n-1")
|
||||
// into a list of page numbers within [1, maxPages].
|
||||
|
||||
/*
|
||||
Supported grammar (case-insensitive for words):
|
||||
expression := disjunction
|
||||
disjunction := conjunction ( ("," | "|" | "or") conjunction )*
|
||||
conjunction := unary ( ("&" | "and") unary )*
|
||||
unary := ("!" unary) | ("not" unary) | primary
|
||||
primary := "(" expression ")" | range | progression | keyword | number
|
||||
range := number "-" number // inclusive
|
||||
progression := k ["*"] "n" (("+" | "-") c)? // k >= 1, c any integer, n starts at 0
|
||||
keyword := "even" | "odd"
|
||||
number := digits (>= 1)
|
||||
|
||||
Precedence: "!" (NOT) > "&"/"and" (AND) > "," "|" "or" (OR)
|
||||
Associativity: left-to-right within the same precedence level
|
||||
|
||||
Notes:
|
||||
- Whitespace is ignored.
|
||||
- The universe is [1..maxPages]. The complement operator ("!" / "not") applies within this universe.
|
||||
- Out-of-bounds numbers are clamped in ranges and ignored as singletons.
|
||||
- On parse failure, the parser falls back to CSV (numbers and ranges separated by commas).
|
||||
|
||||
Examples:
|
||||
1-10 & even -> even pages between 1 and 10
|
||||
!(5-7) -> all pages except 5..7
|
||||
3n+1 -> 1,4,7,... (n starts at 0)
|
||||
(2n | 3n+1) & 1-20 -> multiples of 2 or numbers of the form 3n+1 within 1..20
|
||||
*/
|
||||
|
||||
export function parseSelection(input: string, maxPages: number): number[] {
|
||||
const clampedMax = Math.max(0, Math.floor(maxPages || 0));
|
||||
if (clampedMax === 0) return [];
|
||||
|
||||
const trimmed = (input || '').trim();
|
||||
if (trimmed.length === 0) return [];
|
||||
|
||||
try {
|
||||
const parser = new ExpressionParser(trimmed, clampedMax);
|
||||
const resultSet = parser.parse();
|
||||
return toSortedArray(resultSet);
|
||||
} catch {
|
||||
// Fallback: simple CSV parser (e.g., "1,3,5-10")
|
||||
return toSortedArray(parseCsvFallback(trimmed, clampedMax));
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSelectionWithDiagnostics(
|
||||
input: string,
|
||||
maxPages: number,
|
||||
options?: { strict?: boolean }
|
||||
): { pages: number[]; warning?: string } {
|
||||
const clampedMax = Math.max(0, Math.floor(maxPages || 0));
|
||||
if (clampedMax === 0) return { pages: [] };
|
||||
|
||||
const trimmed = (input || '').trim();
|
||||
if (trimmed.length === 0) return { pages: [] };
|
||||
|
||||
try {
|
||||
const parser = new ExpressionParser(trimmed, clampedMax);
|
||||
const resultSet = parser.parse();
|
||||
return { pages: toSortedArray(resultSet) };
|
||||
} catch (err) {
|
||||
if (options?.strict) {
|
||||
throw err;
|
||||
}
|
||||
const pages = toSortedArray(parseCsvFallback(trimmed, clampedMax));
|
||||
const tokens = trimmed.split(',').map(t => t.trim()).filter(Boolean);
|
||||
const bad = tokens.find(tok => !/^(\d+\s*-\s*\d+|\d+)$/.test(tok));
|
||||
const warning = `Malformed expression${bad ? ` at: '${bad}'` : ''}. Falling back to CSV interpretation.`;
|
||||
return { pages, warning };
|
||||
}
|
||||
}
|
||||
|
||||
function toSortedArray(set: Set<number>): number[] {
|
||||
return Array.from(set).sort((a, b) => a - b);
|
||||
}
|
||||
|
||||
function parseCsvFallback(input: string, max: number): Set<number> {
|
||||
const result = new Set<number>();
|
||||
const parts = input.split(',').map(p => p.trim()).filter(Boolean);
|
||||
for (const part of parts) {
|
||||
const rangeMatch = part.match(/^(\d+)\s*-\s*(\d+)$/);
|
||||
if (rangeMatch) {
|
||||
const start = clampToRange(parseInt(rangeMatch[1], 10), 1, max);
|
||||
const end = clampToRange(parseInt(rangeMatch[2], 10), 1, max);
|
||||
if (Number.isFinite(start) && Number.isFinite(end)) {
|
||||
const [lo, hi] = start <= end ? [start, end] : [end, start];
|
||||
for (let i = lo; i <= hi; i++) result.add(i);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Accept only pure positive integers (no signs, no letters)
|
||||
if (/^\d+$/.test(part)) {
|
||||
const n = parseInt(part, 10);
|
||||
if (Number.isFinite(n) && n >= 1 && n <= max) result.add(n);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function clampToRange(v: number, min: number, max: number): number {
|
||||
if (!Number.isFinite(v)) return NaN as unknown as number;
|
||||
return Math.min(Math.max(v, min), max);
|
||||
}
|
||||
|
||||
class ExpressionParser {
|
||||
private readonly src: string;
|
||||
private readonly max: number;
|
||||
private idx: number = 0;
|
||||
|
||||
constructor(source: string, maxPages: number) {
|
||||
this.src = source;
|
||||
this.max = maxPages;
|
||||
}
|
||||
|
||||
parse(): Set<number> {
|
||||
this.skipWs();
|
||||
const set = this.parseDisjunction();
|
||||
this.skipWs();
|
||||
// If there are leftover non-space characters, treat as error
|
||||
if (this.idx < this.src.length) {
|
||||
throw new Error('Unexpected trailing input');
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
private parseDisjunction(): Set<number> {
|
||||
let left = this.parseConjunction();
|
||||
while (true) {
|
||||
this.skipWs();
|
||||
const op = this.peekWordOrSymbol();
|
||||
if (!op) break;
|
||||
if (op.type === 'symbol' && (op.value === ',' || op.value === '|')) {
|
||||
this.consume(op.length);
|
||||
const right = this.parseConjunction();
|
||||
left = union(left, right);
|
||||
continue;
|
||||
}
|
||||
if (op.type === 'word' && op.value === 'or') {
|
||||
this.consume(op.length);
|
||||
const right = this.parseConjunction();
|
||||
left = union(left, right);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return left;
|
||||
}
|
||||
|
||||
private parseConjunction(): Set<number> {
|
||||
let left = this.parseUnary();
|
||||
while (true) {
|
||||
this.skipWs();
|
||||
const op = this.peekWordOrSymbol();
|
||||
if (!op) break;
|
||||
if (op.type === 'symbol' && op.value === '&') {
|
||||
this.consume(op.length);
|
||||
const right = this.parseUnary();
|
||||
left = intersect(left, right);
|
||||
continue;
|
||||
}
|
||||
if (op.type === 'word' && op.value === 'and') {
|
||||
this.consume(op.length);
|
||||
const right = this.parseUnary();
|
||||
left = intersect(left, right);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return left;
|
||||
}
|
||||
|
||||
private parseUnary(): Set<number> {
|
||||
this.skipWs();
|
||||
if (this.peek('!')) {
|
||||
this.consume(1);
|
||||
const inner = this.parseUnary();
|
||||
return complement(inner, this.max);
|
||||
}
|
||||
// Word-form NOT
|
||||
if (this.tryConsumeNot()) {
|
||||
const inner = this.parseUnary();
|
||||
return complement(inner, this.max);
|
||||
}
|
||||
return this.parsePrimary();
|
||||
}
|
||||
|
||||
private parsePrimary(): Set<number> {
|
||||
this.skipWs();
|
||||
|
||||
// Parenthesized expression: '(' expression ')'
|
||||
if (this.peek('(')) {
|
||||
this.consume(1);
|
||||
const inner = this.parseDisjunction();
|
||||
this.skipWs();
|
||||
if (!this.peek(')')) throw new Error('Expected )');
|
||||
this.consume(1);
|
||||
return inner;
|
||||
}
|
||||
|
||||
// Keywords: even / odd
|
||||
const keyword = this.tryReadKeyword();
|
||||
if (keyword) {
|
||||
if (keyword === 'even') return this.buildEven();
|
||||
if (keyword === 'odd') return this.buildOdd();
|
||||
}
|
||||
|
||||
// Progression: k n ( +/- c )?
|
||||
const progression = this.tryReadProgression();
|
||||
if (progression) {
|
||||
return this.buildProgression(progression.k, progression.c);
|
||||
}
|
||||
|
||||
// Number or Range
|
||||
const num = this.tryReadNumber();
|
||||
if (num !== null) {
|
||||
this.skipWs();
|
||||
if (this.peek('-')) {
|
||||
// Range
|
||||
this.consume(1);
|
||||
this.skipWs();
|
||||
const end = this.readRequiredNumber();
|
||||
return this.buildRange(num, end);
|
||||
}
|
||||
return this.buildSingleton(num);
|
||||
}
|
||||
|
||||
// If nothing matched, error
|
||||
throw new Error('Expected primary');
|
||||
}
|
||||
|
||||
private buildSingleton(n: number): Set<number> {
|
||||
const set = new Set<number>();
|
||||
if (n >= 1 && n <= this.max) set.add(n);
|
||||
return set;
|
||||
}
|
||||
|
||||
private buildRange(a: number, b: number): Set<number> {
|
||||
const set = new Set<number>();
|
||||
let start = a, end = b;
|
||||
if (!Number.isFinite(start) || !Number.isFinite(end)) return set;
|
||||
if (start > end) [start, end] = [end, start];
|
||||
start = Math.max(1, start);
|
||||
end = Math.min(this.max, end);
|
||||
for (let i = start; i <= end; i++) set.add(i);
|
||||
return set;
|
||||
}
|
||||
|
||||
private buildProgression(k: number, c: number): Set<number> {
|
||||
const set = new Set<number>();
|
||||
if (!(k >= 1)) return set;
|
||||
// n starts at 0: k*n + c, for n=0,1,2,... while within [1..max]
|
||||
for (let n = 0; ; n++) {
|
||||
const value = k * n + c;
|
||||
if (value > this.max) break;
|
||||
if (value >= 1) set.add(value);
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
private buildEven(): Set<number> {
|
||||
return this.buildProgression(2, 0);
|
||||
}
|
||||
|
||||
private buildOdd(): Set<number> {
|
||||
return this.buildProgression(2, -1);
|
||||
}
|
||||
|
||||
private tryReadKeyword(): 'even' | 'odd' | null {
|
||||
const start = this.idx;
|
||||
const word = this.readWord();
|
||||
if (!word) return null;
|
||||
const lower = word.toLowerCase();
|
||||
if (lower === 'even' || lower === 'odd') {
|
||||
return lower as 'even' | 'odd';
|
||||
}
|
||||
// Not a keyword; rewind
|
||||
this.idx = start;
|
||||
return null;
|
||||
}
|
||||
|
||||
private tryReadProgression(): { k: number; c: number } | null {
|
||||
const start = this.idx;
|
||||
this.skipWs();
|
||||
const k = this.tryReadNumber();
|
||||
if (k === null) {
|
||||
this.idx = start;
|
||||
return null;
|
||||
}
|
||||
this.skipWs();
|
||||
// Optional '*'
|
||||
if (this.peek('*')) this.consume(1);
|
||||
this.skipWs();
|
||||
if (!this.peek('n') && !this.peek('N')) {
|
||||
this.idx = start;
|
||||
return null;
|
||||
}
|
||||
this.consume(1); // consume 'n'
|
||||
this.skipWs();
|
||||
// Optional (+|-) c
|
||||
let c = 0;
|
||||
if (this.peek('+') || this.peek('-')) {
|
||||
const sign = this.src[this.idx];
|
||||
this.consume(1);
|
||||
this.skipWs();
|
||||
const cVal = this.tryReadNumber();
|
||||
if (cVal === null) {
|
||||
this.idx = start;
|
||||
return null;
|
||||
}
|
||||
c = sign === '-' ? -cVal : cVal;
|
||||
}
|
||||
return { k, c };
|
||||
}
|
||||
|
||||
private tryReadNumber(): number | null {
|
||||
this.skipWs();
|
||||
const m = this.src.slice(this.idx).match(/^(\d+)/);
|
||||
if (!m) return null;
|
||||
this.consume(m[1].length);
|
||||
const num = parseInt(m[1], 10);
|
||||
return Number.isFinite(num) ? num : null;
|
||||
}
|
||||
|
||||
private readRequiredNumber(): number {
|
||||
const n = this.tryReadNumber();
|
||||
if (n === null) throw new Error('Expected number');
|
||||
return n;
|
||||
}
|
||||
|
||||
private readWord(): string | null {
|
||||
this.skipWs();
|
||||
const m = this.src.slice(this.idx).match(/^([A-Za-z]+)/);
|
||||
if (!m) return null;
|
||||
this.consume(m[1].length);
|
||||
return m[1];
|
||||
}
|
||||
|
||||
private tryConsumeNot(): boolean {
|
||||
const start = this.idx;
|
||||
const word = this.readWord();
|
||||
if (!word) {
|
||||
this.idx = start;
|
||||
return false;
|
||||
}
|
||||
if (word.toLowerCase() === 'not') {
|
||||
return true;
|
||||
}
|
||||
this.idx = start;
|
||||
return false;
|
||||
}
|
||||
|
||||
private peekWordOrSymbol(): { type: 'word' | 'symbol'; value: string; raw: string; length: number } | null {
|
||||
this.skipWs();
|
||||
if (this.idx >= this.src.length) return null;
|
||||
const ch = this.src[this.idx];
|
||||
if (/[A-Za-z]/.test(ch)) {
|
||||
const start = this.idx;
|
||||
const word = this.readWord();
|
||||
if (!word) return null;
|
||||
const lower = word.toLowerCase();
|
||||
// Always rewind; the caller will consume if it uses this token
|
||||
const len = word.length;
|
||||
this.idx = start;
|
||||
if (lower === 'and' || lower === 'or') {
|
||||
return { type: 'word', value: lower, raw: word, length: len };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (ch === '&' || ch === '|' || ch === ',') {
|
||||
return { type: 'symbol', value: ch, raw: ch, length: 1 };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private skipWs() {
|
||||
while (this.idx < this.src.length && /\s/.test(this.src[this.idx])) this.idx++;
|
||||
}
|
||||
|
||||
private peek(s: string): boolean {
|
||||
return this.src.startsWith(s, this.idx);
|
||||
}
|
||||
|
||||
private consume(n: number) {
|
||||
this.idx += n;
|
||||
}
|
||||
}
|
||||
|
||||
function union(a: Set<number>, b: Set<number>): Set<number> {
|
||||
if (a.size === 0) return new Set(b);
|
||||
if (b.size === 0) return new Set(a);
|
||||
const out = new Set<number>(a);
|
||||
for (const v of b) out.add(v);
|
||||
return out;
|
||||
}
|
||||
|
||||
function intersect(a: Set<number>, b: Set<number>): Set<number> {
|
||||
if (a.size === 0 || b.size === 0) return new Set<number>();
|
||||
const out = new Set<number>();
|
||||
const [small, large] = a.size <= b.size ? [a, b] : [b, a];
|
||||
for (const v of small) if (large.has(v)) out.add(v);
|
||||
return out;
|
||||
}
|
||||
|
||||
function complement(a: Set<number>, max: number): Set<number> {
|
||||
const out = new Set<number>();
|
||||
for (let i = 1; i <= max; i++) if (!a.has(i)) out.add(i);
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
31
frontend/src/core/utils/clickHandlers.ts
Normal file
31
frontend/src/core/utils/clickHandlers.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Utility functions for handling click events in navigation components
|
||||
*/
|
||||
|
||||
/**
|
||||
* Determines if a click event is a "special" click that should use browser's default navigation
|
||||
* instead of SPA navigation. Special clicks include:
|
||||
* - Ctrl+click (or Cmd+click on Mac)
|
||||
* - Shift+click
|
||||
* - Middle mouse button click
|
||||
*/
|
||||
export function isSpecialClick(e: React.MouseEvent): boolean {
|
||||
return e.metaKey || e.ctrlKey || e.shiftKey || e.button === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles a click event for SPA navigation, but allows special clicks to use browser defaults
|
||||
*
|
||||
* @param e - The click event
|
||||
* @param handleClick - Function to execute for regular clicks (SPA navigation)
|
||||
* @returns true if the event was handled as a special click, false if it was handled as regular click
|
||||
*/
|
||||
export function handleUnlessSpecialClick(e: React.MouseEvent, handleClick: () => void): boolean {
|
||||
if (isSpecialClick(e)) {
|
||||
return true; // Let browser handle via href
|
||||
}
|
||||
|
||||
e.preventDefault();
|
||||
handleClick();
|
||||
return false;
|
||||
}
|
||||
334
frontend/src/core/utils/convertUtils.test.ts
Normal file
334
frontend/src/core/utils/convertUtils.test.ts
Normal file
@@ -0,0 +1,334 @@
|
||||
/**
|
||||
* Unit tests for convertUtils
|
||||
*/
|
||||
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import {
|
||||
getEndpointName,
|
||||
getEndpointUrl,
|
||||
isConversionSupported,
|
||||
isImageFormat
|
||||
} from '@app/utils/convertUtils';
|
||||
|
||||
describe('convertUtils', () => {
|
||||
|
||||
describe('getEndpointName', () => {
|
||||
|
||||
test('should return correct endpoint names for all supported conversions', () => {
|
||||
// PDF to Image formats
|
||||
expect(getEndpointName('pdf', 'png')).toBe('pdf-to-img');
|
||||
expect(getEndpointName('pdf', 'jpg')).toBe('pdf-to-img');
|
||||
expect(getEndpointName('pdf', 'gif')).toBe('pdf-to-img');
|
||||
expect(getEndpointName('pdf', 'tiff')).toBe('pdf-to-img');
|
||||
expect(getEndpointName('pdf', 'bmp')).toBe('pdf-to-img');
|
||||
expect(getEndpointName('pdf', 'webp')).toBe('pdf-to-img');
|
||||
|
||||
// PDF to Office formats
|
||||
expect(getEndpointName('pdf', 'docx')).toBe('pdf-to-word');
|
||||
expect(getEndpointName('pdf', 'odt')).toBe('pdf-to-word');
|
||||
expect(getEndpointName('pdf', 'pptx')).toBe('pdf-to-presentation');
|
||||
expect(getEndpointName('pdf', 'odp')).toBe('pdf-to-presentation');
|
||||
|
||||
// PDF to Data formats
|
||||
expect(getEndpointName('pdf', 'csv')).toBe('pdf-to-csv');
|
||||
expect(getEndpointName('pdf', 'txt')).toBe('pdf-to-text');
|
||||
expect(getEndpointName('pdf', 'rtf')).toBe('pdf-to-text');
|
||||
expect(getEndpointName('pdf', 'md')).toBe('pdf-to-markdown');
|
||||
|
||||
// PDF to Web formats
|
||||
expect(getEndpointName('pdf', 'html')).toBe('pdf-to-html');
|
||||
expect(getEndpointName('pdf', 'xml')).toBe('pdf-to-xml');
|
||||
|
||||
// PDF to PDF/A
|
||||
expect(getEndpointName('pdf', 'pdfa')).toBe('pdf-to-pdfa');
|
||||
|
||||
// Office Documents to PDF
|
||||
expect(getEndpointName('docx', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('doc', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('odt', 'pdf')).toBe('file-to-pdf');
|
||||
|
||||
// Spreadsheets to PDF
|
||||
expect(getEndpointName('xlsx', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('xls', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('ods', 'pdf')).toBe('file-to-pdf');
|
||||
|
||||
// Presentations to PDF
|
||||
expect(getEndpointName('pptx', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('ppt', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('odp', 'pdf')).toBe('file-to-pdf');
|
||||
|
||||
// Images to PDF
|
||||
expect(getEndpointName('jpg', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('jpeg', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('png', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('gif', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('bmp', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('tiff', 'pdf')).toBe('img-to-pdf');
|
||||
expect(getEndpointName('webp', 'pdf')).toBe('img-to-pdf');
|
||||
|
||||
// Web formats to PDF
|
||||
expect(getEndpointName('html', 'pdf')).toBe('html-to-pdf');
|
||||
|
||||
// Markdown to PDF
|
||||
expect(getEndpointName('md', 'pdf')).toBe('markdown-to-pdf');
|
||||
|
||||
// Text formats to PDF
|
||||
expect(getEndpointName('txt', 'pdf')).toBe('file-to-pdf');
|
||||
expect(getEndpointName('rtf', 'pdf')).toBe('file-to-pdf');
|
||||
|
||||
// Email to PDF
|
||||
expect(getEndpointName('eml', 'pdf')).toBe('eml-to-pdf');
|
||||
});
|
||||
|
||||
test('should return empty string for unsupported conversions', () => {
|
||||
expect(getEndpointName('pdf', 'exe')).toBe('');
|
||||
expect(getEndpointName('wav', 'pdf')).toBe('file-to-pdf'); // Try using file to pdf as fallback
|
||||
expect(getEndpointName('png', 'docx')).toBe(''); // Images can't convert to Word docs
|
||||
});
|
||||
|
||||
test('should handle empty or invalid inputs', () => {
|
||||
expect(getEndpointName('', '')).toBe('');
|
||||
expect(getEndpointName('pdf', '')).toBe('');
|
||||
expect(getEndpointName('', 'pdf')).toBe('');
|
||||
expect(getEndpointName('nonexistent', 'alsononexistent')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEndpointUrl', () => {
|
||||
|
||||
test('should return correct endpoint URLs for all supported conversions', () => {
|
||||
// PDF to Image formats
|
||||
expect(getEndpointUrl('pdf', 'png')).toBe('/api/v1/convert/pdf/img');
|
||||
expect(getEndpointUrl('pdf', 'jpg')).toBe('/api/v1/convert/pdf/img');
|
||||
expect(getEndpointUrl('pdf', 'gif')).toBe('/api/v1/convert/pdf/img');
|
||||
expect(getEndpointUrl('pdf', 'tiff')).toBe('/api/v1/convert/pdf/img');
|
||||
expect(getEndpointUrl('pdf', 'bmp')).toBe('/api/v1/convert/pdf/img');
|
||||
expect(getEndpointUrl('pdf', 'webp')).toBe('/api/v1/convert/pdf/img');
|
||||
|
||||
// PDF to Office formats
|
||||
expect(getEndpointUrl('pdf', 'docx')).toBe('/api/v1/convert/pdf/word');
|
||||
expect(getEndpointUrl('pdf', 'odt')).toBe('/api/v1/convert/pdf/word');
|
||||
expect(getEndpointUrl('pdf', 'pptx')).toBe('/api/v1/convert/pdf/presentation');
|
||||
expect(getEndpointUrl('pdf', 'odp')).toBe('/api/v1/convert/pdf/presentation');
|
||||
|
||||
// PDF to Data formats
|
||||
expect(getEndpointUrl('pdf', 'csv')).toBe('/api/v1/convert/pdf/csv');
|
||||
expect(getEndpointUrl('pdf', 'txt')).toBe('/api/v1/convert/pdf/text');
|
||||
expect(getEndpointUrl('pdf', 'rtf')).toBe('/api/v1/convert/pdf/text');
|
||||
expect(getEndpointUrl('pdf', 'md')).toBe('/api/v1/convert/pdf/markdown');
|
||||
|
||||
// PDF to Web formats
|
||||
expect(getEndpointUrl('pdf', 'html')).toBe('/api/v1/convert/pdf/html');
|
||||
expect(getEndpointUrl('pdf', 'xml')).toBe('/api/v1/convert/pdf/xml');
|
||||
|
||||
// PDF to PDF/A
|
||||
expect(getEndpointUrl('pdf', 'pdfa')).toBe('/api/v1/convert/pdf/pdfa');
|
||||
|
||||
// Office Documents to PDF
|
||||
expect(getEndpointUrl('docx', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('doc', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('odt', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
|
||||
// Spreadsheets to PDF
|
||||
expect(getEndpointUrl('xlsx', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('xls', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('ods', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
|
||||
// Presentations to PDF
|
||||
expect(getEndpointUrl('pptx', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('ppt', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('odp', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
|
||||
// Images to PDF
|
||||
expect(getEndpointUrl('jpg', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('jpeg', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('png', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('gif', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('bmp', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('tiff', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
expect(getEndpointUrl('webp', 'pdf')).toBe('/api/v1/convert/img/pdf');
|
||||
|
||||
// Web formats to PDF
|
||||
expect(getEndpointUrl('html', 'pdf')).toBe('/api/v1/convert/html/pdf');
|
||||
|
||||
// Markdown to PDF
|
||||
expect(getEndpointUrl('md', 'pdf')).toBe('/api/v1/convert/markdown/pdf');
|
||||
|
||||
// Text formats to PDF
|
||||
expect(getEndpointUrl('txt', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
expect(getEndpointUrl('rtf', 'pdf')).toBe('/api/v1/convert/file/pdf');
|
||||
|
||||
// Email to PDF
|
||||
expect(getEndpointUrl('eml', 'pdf')).toBe('/api/v1/convert/eml/pdf');
|
||||
});
|
||||
|
||||
test('should return empty string for unsupported conversions', () => {
|
||||
expect(getEndpointUrl('pdf', 'exe')).toBe('');
|
||||
expect(getEndpointUrl('wav', 'pdf')).toBe('/api/v1/convert/file/pdf'); // Try using file to pdf as fallback
|
||||
expect(getEndpointUrl('invalid', 'invalid')).toBe('');
|
||||
});
|
||||
|
||||
test('should handle empty inputs', () => {
|
||||
expect(getEndpointUrl('', '')).toBe('');
|
||||
expect(getEndpointUrl('pdf', '')).toBe('');
|
||||
expect(getEndpointUrl('', 'pdf')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isConversionSupported', () => {
|
||||
|
||||
test('should return true for all supported conversions', () => {
|
||||
// PDF to Image formats
|
||||
expect(isConversionSupported('pdf', 'png')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'jpg')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'gif')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'tiff')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'bmp')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'webp')).toBe(true);
|
||||
|
||||
// PDF to Office formats
|
||||
expect(isConversionSupported('pdf', 'docx')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'odt')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'pptx')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'odp')).toBe(true);
|
||||
|
||||
// PDF to Data formats
|
||||
expect(isConversionSupported('pdf', 'csv')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'txt')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'rtf')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'md')).toBe(true);
|
||||
|
||||
// PDF to Web formats
|
||||
expect(isConversionSupported('pdf', 'html')).toBe(true);
|
||||
expect(isConversionSupported('pdf', 'xml')).toBe(true);
|
||||
|
||||
// PDF to PDF/A
|
||||
expect(isConversionSupported('pdf', 'pdfa')).toBe(true);
|
||||
|
||||
// Office Documents to PDF
|
||||
expect(isConversionSupported('docx', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('doc', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('odt', 'pdf')).toBe(true);
|
||||
|
||||
// Spreadsheets to PDF
|
||||
expect(isConversionSupported('xlsx', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('xls', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('ods', 'pdf')).toBe(true);
|
||||
|
||||
// Presentations to PDF
|
||||
expect(isConversionSupported('pptx', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('ppt', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('odp', 'pdf')).toBe(true);
|
||||
|
||||
// Images to PDF
|
||||
expect(isConversionSupported('jpg', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('jpeg', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('png', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('gif', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('bmp', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('tiff', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('webp', 'pdf')).toBe(true);
|
||||
|
||||
// Web formats to PDF
|
||||
expect(isConversionSupported('html', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('htm', 'pdf')).toBe(true);
|
||||
|
||||
// Markdown to PDF
|
||||
expect(isConversionSupported('md', 'pdf')).toBe(true);
|
||||
|
||||
// Text formats to PDF
|
||||
expect(isConversionSupported('txt', 'pdf')).toBe(true);
|
||||
expect(isConversionSupported('rtf', 'pdf')).toBe(true);
|
||||
|
||||
// Email to PDF
|
||||
expect(isConversionSupported('eml', 'pdf')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for unsupported conversions', () => {
|
||||
expect(isConversionSupported('pdf', 'exe')).toBe(false);
|
||||
expect(isConversionSupported('wav', 'pdf')).toBe(true); // Fallback to file to pdf
|
||||
expect(isConversionSupported('png', 'docx')).toBe(false);
|
||||
expect(isConversionSupported('nonexistent', 'alsononexistent')).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle empty inputs', () => {
|
||||
expect(isConversionSupported('', '')).toBe(false);
|
||||
expect(isConversionSupported('pdf', '')).toBe(false);
|
||||
expect(isConversionSupported('', 'pdf')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isImageFormat', () => {
|
||||
|
||||
test('should return true for image formats', () => {
|
||||
expect(isImageFormat('png')).toBe(true);
|
||||
expect(isImageFormat('jpg')).toBe(true);
|
||||
expect(isImageFormat('jpeg')).toBe(true);
|
||||
expect(isImageFormat('gif')).toBe(true);
|
||||
expect(isImageFormat('tiff')).toBe(true);
|
||||
expect(isImageFormat('bmp')).toBe(true);
|
||||
expect(isImageFormat('webp')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for non-image formats', () => {
|
||||
expect(isImageFormat('pdf')).toBe(false);
|
||||
expect(isImageFormat('docx')).toBe(false);
|
||||
expect(isImageFormat('txt')).toBe(false);
|
||||
expect(isImageFormat('csv')).toBe(false);
|
||||
expect(isImageFormat('html')).toBe(false);
|
||||
expect(isImageFormat('xml')).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle case insensitivity', () => {
|
||||
expect(isImageFormat('PNG')).toBe(true);
|
||||
expect(isImageFormat('JPG')).toBe(true);
|
||||
expect(isImageFormat('JPEG')).toBe(true);
|
||||
expect(isImageFormat('Png')).toBe(true);
|
||||
expect(isImageFormat('JpG')).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle empty and invalid inputs', () => {
|
||||
expect(isImageFormat('')).toBe(false);
|
||||
expect(isImageFormat('invalid')).toBe(false);
|
||||
expect(isImageFormat('123')).toBe(false);
|
||||
expect(isImageFormat('.')).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle mixed case and edge cases', () => {
|
||||
expect(isImageFormat('webP')).toBe(true);
|
||||
expect(isImageFormat('WEBP')).toBe(true);
|
||||
expect(isImageFormat('tIFf')).toBe(true);
|
||||
expect(isImageFormat('bMp')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Error Handling', () => {
|
||||
|
||||
test('should handle null and undefined inputs gracefully', () => {
|
||||
// Note: TypeScript prevents these, but test runtime behavior for robustness
|
||||
// The current implementation handles these gracefully by returning falsy values
|
||||
expect(getEndpointName(null as any, null as any)).toBe('');
|
||||
expect(getEndpointUrl(undefined as any, undefined as any)).toBe('');
|
||||
expect(isConversionSupported(null as any, null as any)).toBe(false);
|
||||
|
||||
// isImageFormat will throw because it calls toLowerCase() on null/undefined
|
||||
expect(() => isImageFormat(null as any)).toThrow();
|
||||
expect(() => isImageFormat(undefined as any)).toThrow();
|
||||
});
|
||||
|
||||
test('should handle special characters in file extensions', () => {
|
||||
expect(isImageFormat('png@')).toBe(false);
|
||||
expect(isImageFormat('jpg#')).toBe(false);
|
||||
expect(isImageFormat('png.')).toBe(false);
|
||||
expect(getEndpointName('pdf@', 'png')).toBe('');
|
||||
expect(getEndpointName('pdf', 'png#')).toBe('');
|
||||
});
|
||||
|
||||
test('should handle very long extension names', () => {
|
||||
const longExtension = 'a'.repeat(100);
|
||||
expect(isImageFormat(longExtension)).toBe(false);
|
||||
expect(getEndpointName('pdf', longExtension)).toBe('');
|
||||
expect(getEndpointName(longExtension, 'pdf')).toBe('file-to-pdf'); // Fallback to file to pdf
|
||||
});
|
||||
});
|
||||
});
|
||||
90
frontend/src/core/utils/convertUtils.ts
Normal file
90
frontend/src/core/utils/convertUtils.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import {
|
||||
CONVERSION_ENDPOINTS,
|
||||
ENDPOINT_NAMES,
|
||||
EXTENSION_TO_ENDPOINT,
|
||||
CONVERSION_MATRIX,
|
||||
TO_FORMAT_OPTIONS
|
||||
} from '@app/constants/convertConstants';
|
||||
|
||||
/**
|
||||
* Resolves the endpoint name for a given conversion
|
||||
*/
|
||||
export const getEndpointName = (fromExtension: string, toExtension: string): string => {
|
||||
if (!fromExtension || !toExtension) return '';
|
||||
|
||||
let endpointKey = EXTENSION_TO_ENDPOINT[fromExtension]?.[toExtension];
|
||||
|
||||
// If no explicit mapping exists and we're converting to PDF,
|
||||
// fall back to 'any' which uses file-to-pdf endpoint
|
||||
if (!endpointKey && toExtension === 'pdf' && fromExtension !== 'any') {
|
||||
endpointKey = EXTENSION_TO_ENDPOINT['any']?.[toExtension];
|
||||
}
|
||||
|
||||
return endpointKey || '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves the full endpoint URL for a given conversion
|
||||
*/
|
||||
export const getEndpointUrl = (fromExtension: string, toExtension: string): string => {
|
||||
const endpointName = getEndpointName(fromExtension, toExtension);
|
||||
if (!endpointName) return '';
|
||||
|
||||
// Find the endpoint URL from CONVERSION_ENDPOINTS using the endpoint name
|
||||
for (const [key, endpoint] of Object.entries(CONVERSION_ENDPOINTS)) {
|
||||
if (ENDPOINT_NAMES[key as keyof typeof ENDPOINT_NAMES] === endpointName) {
|
||||
return endpoint;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a conversion is supported
|
||||
*/
|
||||
export const isConversionSupported = (fromExtension: string, toExtension: string): boolean => {
|
||||
return getEndpointName(fromExtension, toExtension) !== '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if the given extension is an image format
|
||||
*/
|
||||
export const isImageFormat = (extension: string): boolean => {
|
||||
return ['png', 'jpg', 'jpeg', 'gif', 'tiff', 'bmp', 'webp', 'svg'].includes(extension.toLowerCase());
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if the given extension is a web format
|
||||
*/
|
||||
export const isWebFormat = (extension: string): boolean => {
|
||||
return ['html', 'zip'].includes(extension.toLowerCase());
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets available target extensions for a given source extension
|
||||
* Extracted from useConvertParameters to be reusable in automation settings
|
||||
*/
|
||||
export const getAvailableToExtensions = (fromExtension: string): Array<{value: string, label: string, group: string}> => {
|
||||
if (!fromExtension) return [];
|
||||
|
||||
// Handle dynamic format identifiers (file-<extension>)
|
||||
if (fromExtension.startsWith('file-')) {
|
||||
// Dynamic format - use 'any' conversion options (file-to-pdf)
|
||||
const supportedExtensions = CONVERSION_MATRIX['any'] || [];
|
||||
return TO_FORMAT_OPTIONS.filter(option =>
|
||||
supportedExtensions.includes(option.value)
|
||||
);
|
||||
}
|
||||
|
||||
let supportedExtensions = CONVERSION_MATRIX[fromExtension] || [];
|
||||
|
||||
// If no explicit conversion exists, but file-to-pdf might be available,
|
||||
// fall back to 'any' conversion (which converts unknown files to PDF via file-to-pdf)
|
||||
if (supportedExtensions.length === 0 && fromExtension !== 'any') {
|
||||
supportedExtensions = CONVERSION_MATRIX['any'] || [];
|
||||
}
|
||||
|
||||
return TO_FORMAT_OPTIONS.filter(option =>
|
||||
supportedExtensions.includes(option.value)
|
||||
);
|
||||
};
|
||||
218
frontend/src/core/utils/cropCoordinates.ts
Normal file
218
frontend/src/core/utils/cropCoordinates.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
/**
|
||||
* Utility functions for crop coordinate conversion and PDF bounds handling
|
||||
*/
|
||||
|
||||
export interface PDFBounds {
|
||||
/** PDF width in points (actual PDF dimensions) */
|
||||
actualWidth: number;
|
||||
/** PDF height in points (actual PDF dimensions) */
|
||||
actualHeight: number;
|
||||
/** Thumbnail display width in pixels */
|
||||
thumbnailWidth: number;
|
||||
/** Thumbnail display height in pixels */
|
||||
thumbnailHeight: number;
|
||||
/** Horizontal offset for centering thumbnail in container */
|
||||
offsetX: number;
|
||||
/** Vertical offset for centering thumbnail in container */
|
||||
offsetY: number;
|
||||
/** Scale factor: thumbnailSize / actualSize */
|
||||
scale: number;
|
||||
}
|
||||
|
||||
export interface Rectangle {
|
||||
/** X coordinate */
|
||||
x: number;
|
||||
/** Y coordinate */
|
||||
y: number;
|
||||
/** Width */
|
||||
width: number;
|
||||
/** Height */
|
||||
height: number;
|
||||
}
|
||||
|
||||
/** Runtime type guard */
|
||||
export function isRectangle(value: unknown): value is Rectangle {
|
||||
if (value === null || typeof value !== "object") return false;
|
||||
|
||||
const r = value as Record<string, unknown>;
|
||||
const isNum = (n: unknown): n is number =>
|
||||
typeof n === "number" && Number.isFinite(n);
|
||||
|
||||
return (
|
||||
isNum(r.x) &&
|
||||
isNum(r.y) &&
|
||||
isNum(r.width) &&
|
||||
isNum(r.height) &&
|
||||
r.width >= 0 &&
|
||||
r.height >= 0
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate PDF bounds for coordinate conversion based on thumbnail dimensions
|
||||
*/
|
||||
export const calculatePDFBounds = (
|
||||
actualPDFWidth: number,
|
||||
actualPDFHeight: number,
|
||||
containerWidth: number,
|
||||
containerHeight: number
|
||||
): PDFBounds => {
|
||||
// Calculate scale to fit PDF within container while maintaining aspect ratio
|
||||
const scaleX = containerWidth / actualPDFWidth;
|
||||
const scaleY = containerHeight / actualPDFHeight;
|
||||
const scale = Math.min(scaleX, scaleY);
|
||||
|
||||
// Calculate actual thumbnail display size
|
||||
const thumbnailWidth = actualPDFWidth * scale;
|
||||
const thumbnailHeight = actualPDFHeight * scale;
|
||||
|
||||
// Calculate centering offsets - these represent where the thumbnail is positioned within the container
|
||||
const offsetX = (containerWidth - thumbnailWidth) / 2;
|
||||
const offsetY = (containerHeight - thumbnailHeight) / 2;
|
||||
|
||||
return {
|
||||
actualWidth: actualPDFWidth,
|
||||
actualHeight: actualPDFHeight,
|
||||
thumbnailWidth,
|
||||
thumbnailHeight,
|
||||
offsetX,
|
||||
offsetY,
|
||||
scale
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert DOM coordinates (relative to container) to PDF coordinates
|
||||
* Handles coordinate system conversion (DOM uses top-left, PDF uses bottom-left origin)
|
||||
*/
|
||||
export const domToPDFCoordinates = (
|
||||
domRect: Rectangle,
|
||||
pdfBounds: PDFBounds
|
||||
): Rectangle => {
|
||||
// Convert DOM coordinates to thumbnail-relative coordinates
|
||||
const thumbX = domRect.x - pdfBounds.offsetX;
|
||||
const thumbY = domRect.y - pdfBounds.offsetY;
|
||||
|
||||
// Convert to PDF coordinates (scale and flip Y-axis)
|
||||
const pdfX = thumbX / pdfBounds.scale;
|
||||
const pdfY = pdfBounds.actualHeight - ((thumbY + domRect.height) / pdfBounds.scale);
|
||||
const pdfWidth = domRect.width / pdfBounds.scale;
|
||||
const pdfHeight = domRect.height / pdfBounds.scale;
|
||||
|
||||
return {
|
||||
x: pdfX,
|
||||
y: pdfY,
|
||||
width: pdfWidth,
|
||||
height: pdfHeight
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert PDF coordinates to DOM coordinates (relative to container)
|
||||
*/
|
||||
export const pdfToDOMCoordinates = (
|
||||
cropArea: Rectangle,
|
||||
pdfBounds: PDFBounds
|
||||
): Rectangle => {
|
||||
// Convert PDF coordinates to thumbnail coordinates (scale and flip Y-axis)
|
||||
const thumbX = cropArea.x * pdfBounds.scale;
|
||||
const thumbY = (pdfBounds.actualHeight - cropArea.y - cropArea.height) * pdfBounds.scale;
|
||||
const thumbWidth = cropArea.width * pdfBounds.scale;
|
||||
const thumbHeight = cropArea.height * pdfBounds.scale;
|
||||
|
||||
// Add container offsets to get DOM coordinates
|
||||
return {
|
||||
x: thumbX + pdfBounds.offsetX,
|
||||
y: thumbY + pdfBounds.offsetY,
|
||||
width: thumbWidth,
|
||||
height: thumbHeight
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Constrain a crop area to stay within PDF bounds
|
||||
*/
|
||||
export const constrainCropAreaToPDF = (
|
||||
cropArea: Rectangle,
|
||||
pdfBounds: PDFBounds
|
||||
): Rectangle => {
|
||||
// Ensure crop area doesn't extend beyond PDF boundaries
|
||||
const maxX = Math.max(0, pdfBounds.actualWidth - cropArea.width);
|
||||
const maxY = Math.max(0, pdfBounds.actualHeight - cropArea.height);
|
||||
|
||||
return {
|
||||
x: Math.max(0, Math.min(cropArea.x, maxX)),
|
||||
y: Math.max(0, Math.min(cropArea.y, maxY)),
|
||||
width: Math.min(cropArea.width, pdfBounds.actualWidth - Math.max(0, cropArea.x)),
|
||||
height: Math.min(cropArea.height, pdfBounds.actualHeight - Math.max(0, cropArea.y))
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Constrain DOM coordinates to stay within thumbnail bounds
|
||||
*/
|
||||
export const constrainDOMRectToThumbnail = (
|
||||
domRect: Rectangle,
|
||||
pdfBounds: PDFBounds
|
||||
): Rectangle => {
|
||||
const thumbnailLeft = pdfBounds.offsetX;
|
||||
const thumbnailTop = pdfBounds.offsetY;
|
||||
const thumbnailRight = pdfBounds.offsetX + pdfBounds.thumbnailWidth;
|
||||
const thumbnailBottom = pdfBounds.offsetY + pdfBounds.thumbnailHeight;
|
||||
|
||||
// Constrain position
|
||||
const maxX = Math.max(thumbnailLeft, thumbnailRight - domRect.width);
|
||||
const maxY = Math.max(thumbnailTop, thumbnailBottom - domRect.height);
|
||||
|
||||
const constrainedX = Math.max(thumbnailLeft, Math.min(domRect.x, maxX));
|
||||
const constrainedY = Math.max(thumbnailTop, Math.min(domRect.y, maxY));
|
||||
|
||||
// Constrain size to fit within thumbnail bounds from current position
|
||||
const maxWidth = thumbnailRight - constrainedX;
|
||||
const maxHeight = thumbnailBottom - constrainedY;
|
||||
|
||||
return {
|
||||
x: constrainedX,
|
||||
y: constrainedY,
|
||||
width: Math.min(domRect.width, maxWidth),
|
||||
height: Math.min(domRect.height, maxHeight)
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a point is within the thumbnail area (not just the container)
|
||||
*/
|
||||
export const isPointInThumbnail = (
|
||||
x: number,
|
||||
y: number,
|
||||
pdfBounds: PDFBounds
|
||||
): boolean => {
|
||||
return x >= pdfBounds.offsetX &&
|
||||
x <= pdfBounds.offsetX + pdfBounds.thumbnailWidth &&
|
||||
y >= pdfBounds.offsetY &&
|
||||
y <= pdfBounds.offsetY + pdfBounds.thumbnailHeight;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a default crop area that covers the entire PDF
|
||||
*/
|
||||
export const createFullPDFCropArea = (pdfBounds: PDFBounds): Rectangle => {
|
||||
return {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: pdfBounds.actualWidth,
|
||||
height: pdfBounds.actualHeight
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Round crop coordinates to reasonable precision (0.1 point)
|
||||
*/
|
||||
export const roundCropArea = (cropArea: Rectangle): Rectangle => {
|
||||
return {
|
||||
x: Math.round(cropArea.x * 10) / 10,
|
||||
y: Math.round(cropArea.y * 10) / 10,
|
||||
width: Math.round(cropArea.width * 10) / 10,
|
||||
height: Math.round(cropArea.height * 10) / 10
|
||||
};
|
||||
};
|
||||
149
frontend/src/core/utils/downloadUtils.ts
Normal file
149
frontend/src/core/utils/downloadUtils.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { StirlingFileStub } from '@app/types/fileContext';
|
||||
import { fileStorage } from '@app/services/fileStorage';
|
||||
import { zipFileService } from '@app/services/zipFileService';
|
||||
|
||||
/**
|
||||
* Downloads a blob as a file using browser download API
|
||||
* @param blob - The blob to download
|
||||
* @param filename - The filename for the download
|
||||
*/
|
||||
export function downloadBlob(blob: Blob, filename: string): void {
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
|
||||
// Clean up the blob URL
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a single file from IndexedDB storage
|
||||
* @param file - The file object with storage information
|
||||
* @throws Error if file cannot be retrieved from storage
|
||||
*/
|
||||
export async function downloadFileFromStorage(file: StirlingFileStub): Promise<void> {
|
||||
const lookupKey = file.id;
|
||||
const stirlingFile = await fileStorage.getStirlingFile(lookupKey);
|
||||
|
||||
if (!stirlingFile) {
|
||||
throw new Error(`File "${file.name}" not found in storage`);
|
||||
}
|
||||
|
||||
// StirlingFile is already a File object, just download it
|
||||
downloadBlob(stirlingFile, stirlingFile.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads multiple files as individual downloads
|
||||
* @param files - Array of files to download
|
||||
*/
|
||||
export async function downloadMultipleFiles(files: StirlingFileStub[]): Promise<void> {
|
||||
for (const file of files) {
|
||||
await downloadFileFromStorage(file);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads multiple files as a single ZIP archive
|
||||
* @param files - Array of files to include in ZIP
|
||||
* @param zipFilename - Optional custom ZIP filename (defaults to timestamped name)
|
||||
*/
|
||||
export async function downloadFilesAsZip(files: StirlingFileStub[], zipFilename?: string): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
throw new Error('No files provided for ZIP download');
|
||||
}
|
||||
|
||||
// Convert stored files to File objects
|
||||
const filesToZip: File[] = [];
|
||||
for (const fileWithUrl of files) {
|
||||
const lookupKey = fileWithUrl.id;
|
||||
const stirlingFile = await fileStorage.getStirlingFile(lookupKey);
|
||||
|
||||
if (stirlingFile) {
|
||||
// StirlingFile is already a File object!
|
||||
filesToZip.push(stirlingFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (filesToZip.length === 0) {
|
||||
throw new Error('No valid files found in storage for ZIP download');
|
||||
}
|
||||
|
||||
// Generate default filename if not provided
|
||||
const finalZipFilename = zipFilename ||
|
||||
`files-${new Date().toISOString().slice(0, 19).replace(/[:-]/g, '')}.zip`;
|
||||
|
||||
// Create and download ZIP
|
||||
const { zipFile } = await zipFileService.createZipFromFiles(filesToZip, finalZipFilename);
|
||||
downloadBlob(zipFile, finalZipFilename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Smart download function that handles single or multiple files appropriately
|
||||
* - Single file: Downloads directly
|
||||
* - Multiple files: Downloads as ZIP
|
||||
* @param files - Array of files to download
|
||||
* @param options - Download options
|
||||
*/
|
||||
export async function downloadFiles(
|
||||
files: StirlingFileStub[],
|
||||
options: {
|
||||
forceZip?: boolean;
|
||||
zipFilename?: string;
|
||||
multipleAsIndividual?: boolean;
|
||||
} = {}
|
||||
): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
throw new Error('No files provided for download');
|
||||
}
|
||||
|
||||
if (files.length === 1 && !options.forceZip) {
|
||||
// Single file download
|
||||
await downloadFileFromStorage(files[0]);
|
||||
} else if (options.multipleAsIndividual) {
|
||||
// Multiple individual downloads
|
||||
await downloadMultipleFiles(files);
|
||||
} else {
|
||||
// ZIP download (default for multiple files)
|
||||
await downloadFilesAsZip(files, options.zipFilename);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a File object directly (for files already in memory)
|
||||
* @param file - The File object to download
|
||||
* @param filename - Optional custom filename
|
||||
*/
|
||||
export function downloadFileObject(file: File, filename?: string): void {
|
||||
downloadBlob(file, filename || file.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads text content as a file
|
||||
* @param content - Text content to download
|
||||
* @param filename - Filename for the download
|
||||
* @param mimeType - MIME type (defaults to text/plain)
|
||||
*/
|
||||
export function downloadTextAsFile(
|
||||
content: string,
|
||||
filename: string,
|
||||
mimeType: string = 'text/plain'
|
||||
): void {
|
||||
const blob = new Blob([content], { type: mimeType });
|
||||
downloadBlob(blob, filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads JSON data as a file
|
||||
* @param data - Data to serialize and download
|
||||
* @param filename - Filename for the download
|
||||
*/
|
||||
export function downloadJsonAsFile(data: any, filename: string): void {
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
downloadTextAsFile(content, filename, 'application/json');
|
||||
}
|
||||
127
frontend/src/core/utils/fileHash.ts
Normal file
127
frontend/src/core/utils/fileHash.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
/**
|
||||
* File hashing utilities for cache key generation
|
||||
*/
|
||||
|
||||
export class FileHasher {
|
||||
private static readonly CHUNK_SIZE = 64 * 1024; // 64KB chunks for hashing
|
||||
|
||||
/**
|
||||
* Generate a content-based hash for a file
|
||||
* Uses first + last + middle chunks to create a reasonably unique hash
|
||||
* without reading the entire file (which would be expensive for large files)
|
||||
*/
|
||||
static async generateContentHash(file: File): Promise<string> {
|
||||
const chunks = await this.getFileChunks(file);
|
||||
const combined = await this.combineChunks(chunks);
|
||||
return await this.hashArrayBuffer(combined);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a fast hash based on file metadata
|
||||
* Faster but less collision-resistant than content hash
|
||||
*/
|
||||
static generateMetadataHash(file: File): string {
|
||||
const data = `${file.name}-${file.size}-${file.lastModified}-${file.type}`;
|
||||
return this.simpleHash(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a hybrid hash that balances speed and uniqueness
|
||||
* Uses metadata + small content sample
|
||||
*/
|
||||
static async generateHybridHash(file: File): Promise<string> {
|
||||
const metadataHash = this.generateMetadataHash(file);
|
||||
|
||||
// For small files, use full content hash
|
||||
if (file.size <= 1024 * 1024) { // 1MB
|
||||
const contentHash = await this.generateContentHash(file);
|
||||
return `${metadataHash}-${contentHash}`;
|
||||
}
|
||||
|
||||
// For large files, use first chunk only
|
||||
const firstChunk = file.slice(0, this.CHUNK_SIZE);
|
||||
const firstChunkBuffer = await firstChunk.arrayBuffer();
|
||||
const firstChunkHash = await this.hashArrayBuffer(firstChunkBuffer);
|
||||
|
||||
return `${metadataHash}-${firstChunkHash}`;
|
||||
}
|
||||
|
||||
private static async getFileChunks(file: File): Promise<ArrayBuffer[]> {
|
||||
const chunks: ArrayBuffer[] = [];
|
||||
|
||||
// First chunk
|
||||
if (file.size > 0) {
|
||||
const firstChunk = file.slice(0, Math.min(this.CHUNK_SIZE, file.size));
|
||||
chunks.push(await firstChunk.arrayBuffer());
|
||||
}
|
||||
|
||||
// Middle chunk (if file is large enough)
|
||||
if (file.size > this.CHUNK_SIZE * 2) {
|
||||
const middleStart = Math.floor(file.size / 2) - Math.floor(this.CHUNK_SIZE / 2);
|
||||
const middleEnd = middleStart + this.CHUNK_SIZE;
|
||||
const middleChunk = file.slice(middleStart, middleEnd);
|
||||
chunks.push(await middleChunk.arrayBuffer());
|
||||
}
|
||||
|
||||
// Last chunk (if file is large enough and different from first)
|
||||
if (file.size > this.CHUNK_SIZE) {
|
||||
const lastStart = Math.max(file.size - this.CHUNK_SIZE, this.CHUNK_SIZE);
|
||||
const lastChunk = file.slice(lastStart);
|
||||
chunks.push(await lastChunk.arrayBuffer());
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
private static async combineChunks(chunks: ArrayBuffer[]): Promise<ArrayBuffer> {
|
||||
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
|
||||
const combined = new Uint8Array(totalLength);
|
||||
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
combined.set(new Uint8Array(chunk), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
|
||||
return combined.buffer;
|
||||
}
|
||||
|
||||
private static async hashArrayBuffer(buffer: ArrayBuffer): Promise<string> {
|
||||
// Use Web Crypto API for proper hashing
|
||||
if (crypto.subtle) {
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
|
||||
// Fallback for environments without crypto.subtle
|
||||
return this.simpleHash(Array.from(new Uint8Array(buffer)).join(''));
|
||||
}
|
||||
|
||||
private static simpleHash(str: string): string {
|
||||
let hash = 0;
|
||||
if (str.length === 0) return hash.toString();
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str.charCodeAt(i);
|
||||
hash = ((hash << 5) - hash) + char;
|
||||
hash = hash & hash; // Convert to 32-bit integer
|
||||
}
|
||||
|
||||
return Math.abs(hash).toString(16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a file matches its expected hash
|
||||
* Useful for detecting file corruption or changes
|
||||
*/
|
||||
static async validateFileHash(file: File, expectedHash: string): Promise<boolean> {
|
||||
try {
|
||||
const actualHash = await this.generateHybridHash(file);
|
||||
return actualHash === expectedHash;
|
||||
} catch (error) {
|
||||
console.error('Hash validation failed:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
78
frontend/src/core/utils/fileHistoryUtils.ts
Normal file
78
frontend/src/core/utils/fileHistoryUtils.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* File History Utilities
|
||||
*
|
||||
* Helper functions for IndexedDB-based file history management.
|
||||
* Handles file history operations and lineage tracking.
|
||||
*/
|
||||
import { StirlingFileStub } from '@app/types/fileContext';
|
||||
|
||||
/**
|
||||
* Group files by processing branches - each branch ends in a leaf file
|
||||
* Returns Map<fileId, lineagePath[]> where fileId is the leaf and lineagePath is the path back to original
|
||||
*/
|
||||
export function groupFilesByOriginal(StirlingFileStubs: StirlingFileStub[]): Map<string, StirlingFileStub[]> {
|
||||
const groups = new Map<string, StirlingFileStub[]>();
|
||||
|
||||
// Create a map for quick lookups
|
||||
const fileMap = new Map<string, StirlingFileStub>();
|
||||
for (const record of StirlingFileStubs) {
|
||||
fileMap.set(record.id, record);
|
||||
}
|
||||
|
||||
// Find leaf files (files that are not parents of any other files AND have version history)
|
||||
// Original files (v0) should only be leaves if they have no processed versions at all
|
||||
const leafFiles = StirlingFileStubs.filter(stub => {
|
||||
const isParentOfOthers = StirlingFileStubs.some(otherStub => otherStub.parentFileId === stub.id);
|
||||
const isOriginalOfOthers = StirlingFileStubs.some(otherStub => otherStub.originalFileId === stub.id);
|
||||
|
||||
// A file is a leaf if:
|
||||
// 1. It's not a parent of any other files, AND
|
||||
// 2. It has processing history (versionNumber > 0) OR it's not referenced as original by others
|
||||
return !isParentOfOthers && (stub.versionNumber && stub.versionNumber > 0 || !isOriginalOfOthers);
|
||||
});
|
||||
|
||||
// For each leaf file, build its complete lineage path back to original
|
||||
for (const leafFile of leafFiles) {
|
||||
const lineagePath: StirlingFileStub[] = [];
|
||||
let currentFile: StirlingFileStub | undefined = leafFile;
|
||||
|
||||
// Trace back through parentFileId chain to build this specific branch
|
||||
while (currentFile) {
|
||||
lineagePath.push(currentFile);
|
||||
|
||||
// Move to parent file in this branch
|
||||
let nextFile: StirlingFileStub | undefined = undefined;
|
||||
|
||||
if (currentFile.parentFileId) {
|
||||
nextFile = fileMap.get(currentFile.parentFileId);
|
||||
} else if (currentFile.originalFileId && currentFile.originalFileId !== currentFile.id) {
|
||||
// For v1 files, the original file might be referenced by originalFileId
|
||||
nextFile = fileMap.get(currentFile.originalFileId);
|
||||
}
|
||||
|
||||
// Check for infinite loops before moving to next
|
||||
if (nextFile && lineagePath.some(file => file.id === nextFile!.id)) {
|
||||
break;
|
||||
}
|
||||
|
||||
currentFile = nextFile;
|
||||
}
|
||||
|
||||
// Sort lineage with latest version first (leaf at top)
|
||||
lineagePath.sort((a, b) => (b.versionNumber || 0) - (a.versionNumber || 0));
|
||||
|
||||
// Use leaf file ID as the group key - each branch gets its own group
|
||||
groups.set(leafFile.id, lineagePath);
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file has version history
|
||||
*/
|
||||
export function hasVersionHistory(fileStub: StirlingFileStub): boolean {
|
||||
return !!(fileStub.originalFileId && fileStub.versionNumber && fileStub.versionNumber > 0);
|
||||
}
|
||||
|
||||
|
||||
14
frontend/src/core/utils/fileIdSafety.ts
Normal file
14
frontend/src/core/utils/fileIdSafety.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Runtime validation utilities for FileId safety
|
||||
*/
|
||||
|
||||
import { FileId } from '@app/types/fileContext';
|
||||
|
||||
// Validate that a string is a proper FileId (has UUID format)
|
||||
export function isValidFileId(id: string): id is FileId {
|
||||
// Check UUID v4 format: 8-4-4-4-12 hex digits
|
||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
||||
return uuidRegex.test(id);
|
||||
}
|
||||
|
||||
|
||||
147
frontend/src/core/utils/fileResponseUtils.test.ts
Normal file
147
frontend/src/core/utils/fileResponseUtils.test.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
/**
|
||||
* Unit tests for file response utility functions
|
||||
*/
|
||||
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { getFilenameFromHeaders, createFileFromApiResponse } from '@app/utils/fileResponseUtils';
|
||||
|
||||
describe('fileResponseUtils', () => {
|
||||
|
||||
describe('getFilenameFromHeaders', () => {
|
||||
|
||||
test('should extract filename from content-disposition header', () => {
|
||||
const contentDisposition = 'attachment; filename="document.pdf"';
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe('document.pdf');
|
||||
});
|
||||
|
||||
test('should extract filename without quotes', () => {
|
||||
const contentDisposition = 'attachment; filename=document.pdf';
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe('document.pdf');
|
||||
});
|
||||
|
||||
test('should handle single quotes', () => {
|
||||
const contentDisposition = "attachment; filename='document.pdf'";
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe('document.pdf');
|
||||
});
|
||||
|
||||
test('should return null for malformed header', () => {
|
||||
const contentDisposition = 'attachment; invalid=format';
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe(null);
|
||||
});
|
||||
|
||||
test('should return null for empty header', () => {
|
||||
const filename = getFilenameFromHeaders('');
|
||||
|
||||
expect(filename).toBe(null);
|
||||
});
|
||||
|
||||
test('should return null for undefined header', () => {
|
||||
const filename = getFilenameFromHeaders();
|
||||
|
||||
expect(filename).toBe(null);
|
||||
});
|
||||
|
||||
test('should handle complex filenames with spaces and special chars', () => {
|
||||
const contentDisposition = 'attachment; filename="My Document (1).pdf"';
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe('My Document (1).pdf');
|
||||
});
|
||||
|
||||
test('should handle filename with extension when downloadHtml is enabled', () => {
|
||||
const contentDisposition = 'attachment; filename="email_content.html"';
|
||||
const filename = getFilenameFromHeaders(contentDisposition);
|
||||
|
||||
expect(filename).toBe('email_content.html');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createFileFromApiResponse', () => {
|
||||
|
||||
test('should create file using header filename when available', () => {
|
||||
const responseData = new Uint8Array([1, 2, 3, 4]);
|
||||
const headers = {
|
||||
'content-type': 'application/pdf',
|
||||
'content-disposition': 'attachment; filename="server_filename.pdf"'
|
||||
};
|
||||
const fallbackFilename = 'fallback.pdf';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('server_filename.pdf');
|
||||
expect(file.type).toBe('application/pdf');
|
||||
expect(file.size).toBe(4);
|
||||
});
|
||||
|
||||
test('should use fallback filename when no header filename', () => {
|
||||
const responseData = new Uint8Array([1, 2, 3, 4]);
|
||||
const headers = {
|
||||
'content-type': 'application/pdf'
|
||||
};
|
||||
const fallbackFilename = 'converted_file.pdf';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('converted_file.pdf');
|
||||
expect(file.type).toBe('application/pdf');
|
||||
});
|
||||
|
||||
test('should handle HTML response when downloadHtml is enabled', () => {
|
||||
const responseData = '<html><body>Test</body></html>';
|
||||
const headers = {
|
||||
'content-type': 'text/html',
|
||||
'content-disposition': 'attachment; filename="email_content.html"'
|
||||
};
|
||||
const fallbackFilename = 'fallback.pdf';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('email_content.html');
|
||||
expect(file.type).toBe('text/html');
|
||||
});
|
||||
|
||||
test('should handle ZIP response', () => {
|
||||
const responseData = new Uint8Array([80, 75, 3, 4]); // ZIP file signature
|
||||
const headers = {
|
||||
'content-type': 'application/zip',
|
||||
'content-disposition': 'attachment; filename="converted_files.zip"'
|
||||
};
|
||||
const fallbackFilename = 'fallback.pdf';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('converted_files.zip');
|
||||
expect(file.type).toBe('application/zip');
|
||||
});
|
||||
|
||||
test('should use default content-type when none provided', () => {
|
||||
const responseData = new Uint8Array([1, 2, 3, 4]);
|
||||
const headers = {};
|
||||
const fallbackFilename = 'test.bin';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('test.bin');
|
||||
expect(file.type).toBe('application/octet-stream');
|
||||
});
|
||||
|
||||
test('should handle null/undefined headers gracefully', () => {
|
||||
const responseData = new Uint8Array([1, 2, 3, 4]);
|
||||
const headers = null;
|
||||
const fallbackFilename = 'test.bin';
|
||||
|
||||
const file = createFileFromApiResponse(responseData, headers, fallbackFilename);
|
||||
|
||||
expect(file.name).toBe('test.bin');
|
||||
expect(file.type).toBe('application/octet-stream');
|
||||
});
|
||||
});
|
||||
});
|
||||
45
frontend/src/core/utils/fileResponseUtils.ts
Normal file
45
frontend/src/core/utils/fileResponseUtils.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* Generic utility functions for handling file responses from API endpoints
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extracts filename from Content-Disposition header
|
||||
* @param contentDisposition - Content-Disposition header value
|
||||
* @returns Filename if found, null otherwise
|
||||
*/
|
||||
export const getFilenameFromHeaders = (contentDisposition: string = ''): string | null => {
|
||||
const match = contentDisposition.match(/filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/);
|
||||
if (match && match[1]) {
|
||||
const filename = match[1].replace(/['"]/g, '');
|
||||
|
||||
// Decode URL-encoded characters (e.g., %20 -> space)
|
||||
try {
|
||||
return decodeURIComponent(filename);
|
||||
} catch {
|
||||
// If decoding fails, return the original filename
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a File object from API response using the filename from headers
|
||||
* @param responseData - The response data (blob/arraybuffer/string)
|
||||
* @param headers - Response headers object
|
||||
* @param fallbackFilename - Filename to use if none provided in headers
|
||||
* @returns File object
|
||||
*/
|
||||
export const createFileFromApiResponse = (
|
||||
responseData: any,
|
||||
headers: any,
|
||||
fallbackFilename: string
|
||||
): File => {
|
||||
const contentType = headers?.['content-type'] || 'application/octet-stream';
|
||||
const contentDisposition = headers?.['content-disposition'] || '';
|
||||
|
||||
const filename = getFilenameFromHeaders(contentDisposition) || fallbackFilename;
|
||||
const blob = new Blob([responseData], { type: contentType });
|
||||
|
||||
return new File([blob], filename, { type: contentType });
|
||||
};
|
||||
53
frontend/src/core/utils/fileUtils.ts
Normal file
53
frontend/src/core/utils/fileUtils.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// Pure utility functions for file operations
|
||||
|
||||
/**
|
||||
* Consolidated file size formatting utility
|
||||
*/
|
||||
export function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file date as string
|
||||
*/
|
||||
export function getFileDate(file: File | { lastModified: number }): string {
|
||||
if (file.lastModified) {
|
||||
return new Date(file.lastModified).toLocaleString();
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file size as string (legacy method for backward compatibility)
|
||||
*/
|
||||
export function getFileSize(file: File | { size: number }): string {
|
||||
if (!file.size) return "Unknown";
|
||||
return formatFileSize(file.size);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Detects and normalizes file extension from filename
|
||||
* @param filename - The filename to extract extension from
|
||||
* @returns Normalized file extension in lowercase, empty string if no extension
|
||||
*/
|
||||
export function detectFileExtension(filename: string): string {
|
||||
if (!filename || typeof filename !== 'string') return '';
|
||||
|
||||
const parts = filename.split('.');
|
||||
// If there's no extension (no dots or only one part), return empty string
|
||||
if (parts.length <= 1) return '';
|
||||
|
||||
// Get the last part (extension) in lowercase
|
||||
let extension = parts[parts.length - 1].toLowerCase();
|
||||
|
||||
// Normalize common extension variants
|
||||
if (extension === 'jpeg') extension = 'jpg';
|
||||
|
||||
return extension;
|
||||
}
|
||||
121
frontend/src/core/utils/fuzzySearch.ts
Normal file
121
frontend/src/core/utils/fuzzySearch.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
// Lightweight fuzzy search helpers without external deps
|
||||
// Provides diacritics-insensitive normalization and Levenshtein distance scoring
|
||||
|
||||
function normalizeText(text: string): string {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.normalize('NFD')
|
||||
.replace(/\p{Diacritic}+/gu, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
// Basic Levenshtein distance (iterative with two rows)
|
||||
function levenshtein(a: string, b: string): number {
|
||||
if (a === b) return 0;
|
||||
const aLen = a.length;
|
||||
const bLen = b.length;
|
||||
if (aLen === 0) return bLen;
|
||||
if (bLen === 0) return aLen;
|
||||
|
||||
const prev = new Array(bLen + 1);
|
||||
const curr = new Array(bLen + 1);
|
||||
|
||||
for (let j = 0; j <= bLen; j++) prev[j] = j;
|
||||
|
||||
for (let i = 1; i <= aLen; i++) {
|
||||
curr[0] = i;
|
||||
const aChar = a.charCodeAt(i - 1);
|
||||
for (let j = 1; j <= bLen; j++) {
|
||||
const cost = aChar === b.charCodeAt(j - 1) ? 0 : 1;
|
||||
curr[j] = Math.min(
|
||||
prev[j] + 1, // deletion
|
||||
curr[j - 1] + 1, // insertion
|
||||
prev[j - 1] + cost // substitution
|
||||
);
|
||||
}
|
||||
for (let j = 0; j <= bLen; j++) prev[j] = curr[j];
|
||||
}
|
||||
return curr[bLen];
|
||||
}
|
||||
|
||||
// Compute a heuristic match score (higher is better)
|
||||
// 1) Exact/substring hits get high base; 2) otherwise use normalized Levenshtein distance
|
||||
export function scoreMatch(queryRaw: string, targetRaw: string): number {
|
||||
const query = normalizeText(queryRaw);
|
||||
const target = normalizeText(targetRaw);
|
||||
if (!query) return 0;
|
||||
if (target.includes(query)) {
|
||||
// Reward earlier/shorter substring matches
|
||||
const pos = target.indexOf(query);
|
||||
return 100 - pos - Math.max(0, target.length - query.length);
|
||||
}
|
||||
|
||||
// Token-aware: check each word token too, but require better similarity
|
||||
const tokens = target.split(/[^a-z0-9]+/g).filter(Boolean);
|
||||
for (const token of tokens) {
|
||||
if (token.includes(query)) {
|
||||
// Only give high score if the match is substantial (not just "and" matching)
|
||||
const similarity = query.length / Math.max(query.length, token.length);
|
||||
if (similarity >= 0.6) { // Require at least 60% similarity
|
||||
return 80 - Math.abs(token.length - query.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const distance = levenshtein(query, target.length > 64 ? target.slice(0, 64) : target);
|
||||
const maxLen = Math.max(query.length, target.length, 1);
|
||||
const similarity = 1 - distance / maxLen; // 0..1
|
||||
return Math.floor(similarity * 60); // scale below substring scores
|
||||
}
|
||||
|
||||
export function minScoreForQuery(query: string): number {
|
||||
const len = normalizeText(query).length;
|
||||
if (len <= 3) return 40;
|
||||
if (len <= 6) return 30;
|
||||
return 25;
|
||||
}
|
||||
|
||||
// Decide if a target matches a query based on a threshold
|
||||
export function isFuzzyMatch(query: string, target: string, minScore?: number): boolean {
|
||||
const threshold = typeof minScore === 'number' ? minScore : minScoreForQuery(query);
|
||||
return scoreMatch(query, target) >= threshold;
|
||||
}
|
||||
|
||||
// Convenience: rank a list of items by best score across provided getters
|
||||
export function rankByFuzzy<T>(items: T[], query: string, getters: Array<(item: T) => string>, minScore?: number): Array<{ item: T; score: number; matchedText?: string }>{
|
||||
const results: Array<{ item: T; score: number; matchedText?: string }> = [];
|
||||
const threshold = typeof minScore === 'number' ? minScore : minScoreForQuery(query);
|
||||
for (const item of items) {
|
||||
let best = 0;
|
||||
let matchedText = '';
|
||||
for (const get of getters) {
|
||||
const value = get(item);
|
||||
if (!value) continue;
|
||||
const s = scoreMatch(query, value);
|
||||
if (s > best) {
|
||||
best = s;
|
||||
matchedText = value;
|
||||
}
|
||||
if (best >= 95) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (best >= threshold) results.push({ item, score: best, matchedText });
|
||||
}
|
||||
results.sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
|
||||
export function normalizeForSearch(text: string): string {
|
||||
return normalizeText(text);
|
||||
}
|
||||
|
||||
// Convert ids like "addPassword", "add-password", "add_password" to words for matching
|
||||
export function idToWords(id: string): string {
|
||||
const spaced = id
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1 $2')
|
||||
.replace(/[._-]+/g, ' ');
|
||||
return normalizeText(spaced);
|
||||
}
|
||||
|
||||
|
||||
42
frontend/src/core/utils/genericUtils.ts
Normal file
42
frontend/src/core/utils/genericUtils.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* DOM utility functions for common operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* Clamps a value between a minimum and maximum
|
||||
* @param value - The value to clamp
|
||||
* @param min - The minimum allowed value
|
||||
* @param max - The maximum allowed value
|
||||
* @returns The clamped value
|
||||
*/
|
||||
export function clamp(value: number, min: number, max: number): number {
|
||||
return Math.min(Math.max(value, min), max);
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely adds an event listener with proper cleanup
|
||||
* @param target - The target element or window/document
|
||||
* @param event - The event type
|
||||
* @param handler - The event handler function
|
||||
* @param options - Event listener options
|
||||
* @returns A cleanup function to remove the listener
|
||||
*/
|
||||
export function addEventListenerWithCleanup(
|
||||
target: EventTarget,
|
||||
event: string,
|
||||
handler: EventListener,
|
||||
options?: boolean | AddEventListenerOptions
|
||||
): () => void {
|
||||
target.addEventListener(event, handler, options);
|
||||
return () => target.removeEventListener(event, handler, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a click event occurred outside of a specified element
|
||||
* @param event - The click event
|
||||
* @param element - The element to check against
|
||||
* @returns True if the click was outside the element
|
||||
*/
|
||||
export function isClickOutside(event: MouseEvent, element: HTMLElement | null): boolean {
|
||||
return element ? !element.contains(event.target as Node) : true;
|
||||
}
|
||||
191
frontend/src/core/utils/hotkeys.ts
Normal file
191
frontend/src/core/utils/hotkeys.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { KeyboardEvent as ReactKeyboardEvent } from 'react';
|
||||
|
||||
export interface HotkeyBinding {
|
||||
code: string;
|
||||
alt?: boolean;
|
||||
ctrl?: boolean;
|
||||
meta?: boolean;
|
||||
shift?: boolean;
|
||||
}
|
||||
|
||||
const MODIFIER_CODES = new Set([
|
||||
'ShiftLeft',
|
||||
'ShiftRight',
|
||||
'ControlLeft',
|
||||
'ControlRight',
|
||||
'AltLeft',
|
||||
'AltRight',
|
||||
'MetaLeft',
|
||||
'MetaRight',
|
||||
]);
|
||||
|
||||
const CODE_LABEL_MAP: Record<string, string> = {
|
||||
Minus: '-',
|
||||
Equal: '=',
|
||||
Backquote: '`',
|
||||
BracketLeft: '[',
|
||||
BracketRight: ']',
|
||||
Backslash: '\\',
|
||||
IntlBackslash: '\\',
|
||||
Semicolon: ';',
|
||||
Quote: '\'',
|
||||
Comma: ',',
|
||||
Period: '.',
|
||||
Slash: '/',
|
||||
Space: 'Space',
|
||||
Tab: 'Tab',
|
||||
Escape: 'Esc',
|
||||
Enter: 'Enter',
|
||||
NumpadEnter: 'Num Enter',
|
||||
NumpadAdd: 'Num +',
|
||||
NumpadSubtract: 'Num -',
|
||||
NumpadMultiply: 'Num *',
|
||||
NumpadDivide: 'Num /',
|
||||
NumpadDecimal: 'Num .',
|
||||
NumpadComma: 'Num ,',
|
||||
NumpadEqual: 'Num =',
|
||||
};
|
||||
|
||||
export const isMacLike = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
const platform = navigator.platform?.toLowerCase() ?? '';
|
||||
const userAgent = navigator.userAgent?.toLowerCase() ?? '';
|
||||
return /mac|iphone|ipad|ipod/.test(platform) || /mac|iphone|ipad|ipod/.test(userAgent);
|
||||
};
|
||||
|
||||
export const isModifierCode = (code: string): boolean => MODIFIER_CODES.has(code);
|
||||
|
||||
const isFunctionKey = (code: string): boolean => /^F\d{1,2}$/.test(code);
|
||||
|
||||
export const bindingEquals = (a?: HotkeyBinding | null, b?: HotkeyBinding | null): boolean => {
|
||||
if (!a && !b) return true;
|
||||
if (!a || !b) return false;
|
||||
return (
|
||||
a.code === b.code &&
|
||||
Boolean(a.alt) === Boolean(b.alt) &&
|
||||
Boolean(a.ctrl) === Boolean(b.ctrl) &&
|
||||
Boolean(a.meta) === Boolean(b.meta) &&
|
||||
Boolean(a.shift) === Boolean(b.shift)
|
||||
);
|
||||
};
|
||||
|
||||
export const bindingMatchesEvent = (binding: HotkeyBinding, event: KeyboardEvent): boolean => {
|
||||
return (
|
||||
event.code === binding.code &&
|
||||
event.altKey === Boolean(binding.alt) &&
|
||||
event.ctrlKey === Boolean(binding.ctrl) &&
|
||||
event.metaKey === Boolean(binding.meta) &&
|
||||
event.shiftKey === Boolean(binding.shift)
|
||||
);
|
||||
};
|
||||
|
||||
export const eventToBinding = (event: KeyboardEvent | ReactKeyboardEvent): HotkeyBinding | null => {
|
||||
const code = event.code;
|
||||
if (!code || isModifierCode(code)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const binding: HotkeyBinding = {
|
||||
code,
|
||||
alt: event.altKey,
|
||||
ctrl: event.ctrlKey,
|
||||
meta: event.metaKey,
|
||||
shift: event.shiftKey,
|
||||
};
|
||||
|
||||
// Require at least one modifier to avoid clashing with text input
|
||||
if (!binding.alt && !binding.ctrl && !binding.meta) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return binding;
|
||||
};
|
||||
|
||||
const getKeyLabel = (code: string): string => {
|
||||
if (CODE_LABEL_MAP[code]) {
|
||||
return CODE_LABEL_MAP[code];
|
||||
}
|
||||
|
||||
if (code.startsWith('Key')) {
|
||||
return code.slice(3);
|
||||
}
|
||||
|
||||
if (code.startsWith('Digit')) {
|
||||
return code.slice(5);
|
||||
}
|
||||
|
||||
if (code.startsWith('Numpad')) {
|
||||
const remainder = code.slice(6);
|
||||
if (/^[0-9]$/.test(remainder)) {
|
||||
return `Num ${remainder}`;
|
||||
}
|
||||
return `Num ${remainder}`;
|
||||
}
|
||||
|
||||
// Match function keys (F1-F12)
|
||||
if (isFunctionKey(code)) {
|
||||
return code;
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case 'ArrowUp':
|
||||
return '↑';
|
||||
case 'ArrowDown':
|
||||
return '↓';
|
||||
case 'ArrowLeft':
|
||||
return '←';
|
||||
case 'ArrowRight':
|
||||
return '→';
|
||||
default:
|
||||
return code;
|
||||
}
|
||||
};
|
||||
|
||||
export const getDisplayParts = (binding: HotkeyBinding | null | undefined, macLike: boolean): string[] => {
|
||||
if (!binding) return [];
|
||||
const parts: string[] = [];
|
||||
if (binding.meta) {
|
||||
parts.push(macLike ? '⌘' : 'Win');
|
||||
}
|
||||
if (binding.ctrl) {
|
||||
parts.push(macLike ? '⌃' : 'Ctrl');
|
||||
}
|
||||
if (binding.alt) {
|
||||
parts.push(macLike ? '⌥' : 'Alt');
|
||||
}
|
||||
if (binding.shift) {
|
||||
parts.push(macLike ? '⇧' : 'Shift');
|
||||
}
|
||||
parts.push(getKeyLabel(binding.code));
|
||||
return parts;
|
||||
};
|
||||
|
||||
export const serializeBindings = (bindings: Record<string, HotkeyBinding>): string => {
|
||||
return JSON.stringify(bindings);
|
||||
};
|
||||
|
||||
export const deserializeBindings = (value: string | null | undefined): Record<string, HotkeyBinding> => {
|
||||
if (!value) {
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(value) as Record<string, HotkeyBinding>;
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
return {};
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
console.warn('Failed to parse stored hotkey bindings', error);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const normalizeBinding = (binding: HotkeyBinding): HotkeyBinding => ({
|
||||
code: binding.code,
|
||||
alt: Boolean(binding.alt),
|
||||
ctrl: Boolean(binding.ctrl),
|
||||
meta: Boolean(binding.meta),
|
||||
shift: Boolean(binding.shift),
|
||||
});
|
||||
1020
frontend/src/core/utils/languageMapping.ts
Normal file
1020
frontend/src/core/utils/languageMapping.ts
Normal file
File diff suppressed because it is too large
Load Diff
23
frontend/src/core/utils/pageSelection.ts
Normal file
23
frontend/src/core/utils/pageSelection.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export const validatePageNumbers = (pageNumbers: string): boolean => {
|
||||
if (!pageNumbers.trim()) return false;
|
||||
|
||||
// Normalize input for validation: remove spaces around commas and other spaces
|
||||
const normalized = pageNumbers.replace(/\s*,\s*/g, ',').replace(/\s+/g, '');
|
||||
const parts = normalized.split(',');
|
||||
|
||||
// Regular expressions for different page number formats
|
||||
const allToken = /^all$/i; // Select all pages
|
||||
const singlePageRegex = /^[1-9]\d*$/; // Single page: positive integers only (no 0)
|
||||
const rangeRegex = /^[1-9]\d*-(?:[1-9]\d*)?$/; // Range: 1-5 or open range 10-
|
||||
const mathRegex = /^(?=.*n)[0-9n+\-*/() ]+$/; // Mathematical expressions with n and allowed chars
|
||||
|
||||
return parts.every(part => {
|
||||
if (!part) return false;
|
||||
return (
|
||||
allToken.test(part) ||
|
||||
singlePageRegex.test(part) ||
|
||||
rangeRegex.test(part) ||
|
||||
mathRegex.test(part)
|
||||
);
|
||||
});
|
||||
};
|
||||
71
frontend/src/core/utils/resourceManager.ts
Normal file
71
frontend/src/core/utils/resourceManager.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* Utilities for managing file resources and blob URLs
|
||||
*/
|
||||
|
||||
import { useCallback } from 'react';
|
||||
import { AUTOMATION_CONSTANTS } from '@app/constants/automation';
|
||||
|
||||
export class ResourceManager {
|
||||
private static blobUrls = new Set<string>();
|
||||
|
||||
/**
|
||||
* Create a blob URL and track it for cleanup
|
||||
*/
|
||||
static createBlobUrl(blob: Blob): string {
|
||||
const url = URL.createObjectURL(blob);
|
||||
this.blobUrls.add(url);
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a specific blob URL
|
||||
*/
|
||||
static revokeBlobUrl(url: string): void {
|
||||
if (this.blobUrls.has(url)) {
|
||||
URL.revokeObjectURL(url);
|
||||
this.blobUrls.delete(url);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke all tracked blob URLs
|
||||
*/
|
||||
static revokeAllBlobUrls(): void {
|
||||
this.blobUrls.forEach(url => URL.revokeObjectURL(url));
|
||||
this.blobUrls.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a File with proper naming convention
|
||||
*/
|
||||
static createResultFile(
|
||||
data: BlobPart,
|
||||
originalName: string,
|
||||
prefix: string = AUTOMATION_CONSTANTS.PROCESSED_FILE_PREFIX,
|
||||
type: string = 'application/pdf'
|
||||
): File {
|
||||
return new File([data], `${prefix}${originalName}`, { type });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a timestamped file for responses
|
||||
*/
|
||||
static createTimestampedFile(
|
||||
data: BlobPart,
|
||||
prefix: string,
|
||||
extension: string = '.pdf',
|
||||
type: string = 'application/pdf'
|
||||
): File {
|
||||
const timestamp = Date.now();
|
||||
return new File([data], `${prefix}${timestamp}${extension}`, { type });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for automatic cleanup on component unmount
|
||||
*/
|
||||
export function useResourceCleanup(): () => void {
|
||||
return useCallback(() => {
|
||||
ResourceManager.revokeAllBlobUrls();
|
||||
}, []);
|
||||
}
|
||||
95
frontend/src/core/utils/scarfTracking.ts
Normal file
95
frontend/src/core/utils/scarfTracking.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Scarf analytics pixel tracking utility
|
||||
*
|
||||
* This module provides a firePixel function that can be called from anywhere,
|
||||
* including non-React utility functions. Configuration and consent state are
|
||||
* injected via setScarfConfig() which should be called from a React hook
|
||||
* during app initialization.
|
||||
*
|
||||
* IMPORTANT: setScarfConfig() must be called before firePixel() will work.
|
||||
* The initialization hook (useScarfTracking) is mounted in App.tsx.
|
||||
*
|
||||
* For testing: Use resetScarfConfig() to clear module state between tests.
|
||||
*/
|
||||
|
||||
// Module-level state
|
||||
let configured: boolean = false;
|
||||
let enableScarf: boolean | null = null;
|
||||
let isServiceAccepted: ((service: string, category: string) => boolean) | null = null;
|
||||
let lastFiredPathname: string | null = null;
|
||||
let lastFiredTime = 0;
|
||||
|
||||
/**
|
||||
* Configure scarf tracking with app config and consent checker
|
||||
* Should be called from a React hook during app initialization (see useScarfTracking)
|
||||
*
|
||||
* @param scarfEnabled - Whether scarf tracking is enabled globally
|
||||
* @param consentChecker - Function to check if user has accepted scarf service
|
||||
*/
|
||||
export function setScarfConfig(
|
||||
scarfEnabled: boolean | null,
|
||||
consentChecker: (service: string, category: string) => boolean
|
||||
): void {
|
||||
configured = true;
|
||||
enableScarf = scarfEnabled;
|
||||
isServiceAccepted = consentChecker;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fire scarf pixel for analytics tracking
|
||||
* Only fires if:
|
||||
* - Scarf tracking has been initialized via setScarfConfig()
|
||||
* - Scarf is globally enabled in config
|
||||
* - User has accepted scarf service via cookie consent
|
||||
* - Pathname has changed or enough time has passed since last fire
|
||||
*
|
||||
* @param pathname - The pathname to track (usually window.location.pathname)
|
||||
*/
|
||||
export function firePixel(pathname: string): void {
|
||||
// Dev-mode warning if called before initialization
|
||||
if (!configured) {
|
||||
console.warn(
|
||||
'[scarfTracking] firePixel() called before setScarfConfig(). ' +
|
||||
'Ensure useScarfTracking() hook is mounted in App.tsx.'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if Scarf is globally disabled
|
||||
if (enableScarf === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if consent checker is available and scarf service is accepted
|
||||
if (!isServiceAccepted || !isServiceAccepted('scarf', 'analytics')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
// Only fire if pathname changed or it's been at least 250ms since last fire
|
||||
if (pathname === lastFiredPathname && now - lastFiredTime < 250) {
|
||||
return;
|
||||
}
|
||||
|
||||
lastFiredPathname = pathname;
|
||||
lastFiredTime = now;
|
||||
|
||||
const url = 'https://static.scarf.sh/a.png?x-pxid=3c1d68de-8945-4e9f-873f-65320b6fabf7'
|
||||
+ '&path=' + encodeURIComponent(pathname);
|
||||
|
||||
const img = new Image();
|
||||
img.referrerPolicy = "no-referrer-when-downgrade";
|
||||
img.src = url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset scarf tracking configuration and state
|
||||
* Useful for testing to ensure clean state between test runs
|
||||
*/
|
||||
export function resetScarfConfig(): void {
|
||||
enableScarf = null;
|
||||
isServiceAccepted = null;
|
||||
lastFiredPathname = null;
|
||||
lastFiredTime = 0;
|
||||
}
|
||||
55
frontend/src/core/utils/scriptLoader.ts
Normal file
55
frontend/src/core/utils/scriptLoader.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* Utility for dynamically loading external scripts
|
||||
*/
|
||||
|
||||
interface ScriptLoadOptions {
|
||||
src: string;
|
||||
id?: string;
|
||||
async?: boolean;
|
||||
defer?: boolean;
|
||||
onLoad?: () => void;
|
||||
}
|
||||
|
||||
const loadedScripts = new Set<string>();
|
||||
|
||||
export function loadScript({ src, id, async = true, defer = false, onLoad }: ScriptLoadOptions): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Check if already loaded
|
||||
const scriptId = id || src;
|
||||
if (loadedScripts.has(scriptId)) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if script already exists in DOM
|
||||
const existingScript = id ? document.getElementById(id) : document.querySelector(`script[src="${src}"]`);
|
||||
if (existingScript) {
|
||||
loadedScripts.add(scriptId);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Create and append script
|
||||
const script = document.createElement('script');
|
||||
script.src = src;
|
||||
if (id) script.id = id;
|
||||
script.async = async;
|
||||
script.defer = defer;
|
||||
|
||||
script.onload = () => {
|
||||
loadedScripts.add(scriptId);
|
||||
if (onLoad) onLoad();
|
||||
resolve();
|
||||
};
|
||||
|
||||
script.onerror = () => {
|
||||
reject(new Error(`Failed to load script: ${src}`));
|
||||
};
|
||||
|
||||
document.head.appendChild(script);
|
||||
});
|
||||
}
|
||||
|
||||
export function isScriptLoaded(idOrSrc: string): boolean {
|
||||
return loadedScripts.has(idOrSrc);
|
||||
}
|
||||
34
frontend/src/core/utils/sidebarUtils.ts
Normal file
34
frontend/src/core/utils/sidebarUtils.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { SidebarRefs, SidebarState, SidebarInfo } from '@app/types/sidebar';
|
||||
|
||||
/**
|
||||
* Gets the All tools sidebar information using React refs and state
|
||||
* @param refs - Object containing refs to sidebar elements
|
||||
* @param state - Current sidebar state
|
||||
* @returns Object containing the sidebar rect and whether the tool panel is active
|
||||
*/
|
||||
export function getSidebarInfo(refs: SidebarRefs, state: SidebarState): SidebarInfo {
|
||||
const { quickAccessRef, toolPanelRef } = refs;
|
||||
const { sidebarsVisible, readerMode } = state;
|
||||
|
||||
// Determine if tool panel should be active based on state
|
||||
const isToolPanelActive = sidebarsVisible && !readerMode;
|
||||
|
||||
let rect: DOMRect | null = null;
|
||||
|
||||
if (isToolPanelActive && toolPanelRef.current) {
|
||||
// Tool panel is expanded: use its rect
|
||||
rect = toolPanelRef.current.getBoundingClientRect();
|
||||
} else if (quickAccessRef.current) {
|
||||
// Fall back to quick access bar
|
||||
// This probably isn't needed but if we ever have tooltips or modals that need to be positioned relative to the quick access bar, we can use this
|
||||
rect = quickAccessRef.current.getBoundingClientRect();
|
||||
}
|
||||
|
||||
return {
|
||||
rect,
|
||||
isToolPanelActive,
|
||||
sidebarState: state
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
322
frontend/src/core/utils/signatureFlattening.ts
Normal file
322
frontend/src/core/utils/signatureFlattening.ts
Normal file
@@ -0,0 +1,322 @@
|
||||
import { PDFDocument, rgb } from 'pdf-lib';
|
||||
import { generateThumbnailWithMetadata } from '@app/utils/thumbnailUtils';
|
||||
import { createProcessedFile, createChildStub } from '@app/contexts/file/fileActions';
|
||||
import { createStirlingFile, StirlingFile, FileId, StirlingFileStub } from '@app/types/fileContext';
|
||||
import type { SignatureAPI } from '@app/components/viewer/viewerTypes';
|
||||
|
||||
interface MinimalFileContextSelectors {
|
||||
getAllFileIds: () => FileId[];
|
||||
getStirlingFileStub: (id: FileId) => StirlingFileStub | undefined;
|
||||
getFile: (id: FileId) => StirlingFile | undefined;
|
||||
}
|
||||
|
||||
interface SignatureFlatteningOptions {
|
||||
signatureApiRef: React.RefObject<SignatureAPI | null>;
|
||||
getImageData: (id: string) => string | undefined;
|
||||
exportActions?: {
|
||||
saveAsCopy: () => Promise<ArrayBuffer | null>;
|
||||
};
|
||||
selectors: MinimalFileContextSelectors;
|
||||
originalFile?: StirlingFile;
|
||||
getScrollState: () => { currentPage: number; totalPages: number };
|
||||
activeFileIndex?: number;
|
||||
}
|
||||
|
||||
export interface SignatureFlatteningResult {
|
||||
inputFileIds: FileId[];
|
||||
outputStirlingFile: StirlingFile;
|
||||
outputStub: StirlingFileStub;
|
||||
}
|
||||
|
||||
export async function flattenSignatures(options: SignatureFlatteningOptions): Promise<SignatureFlatteningResult | null> {
|
||||
const { signatureApiRef, getImageData, exportActions, selectors, originalFile, getScrollState, activeFileIndex } = options;
|
||||
|
||||
try {
|
||||
// Step 1: Extract all annotations from EmbedPDF before export
|
||||
const allAnnotations: Array<{pageIndex: number, annotations: any[]}> = [];
|
||||
|
||||
if (signatureApiRef?.current) {
|
||||
|
||||
// Get actual page count from viewer
|
||||
const scrollState = getScrollState();
|
||||
const totalPages = scrollState.totalPages;
|
||||
|
||||
// Check only actual pages that exist in the document
|
||||
for (let pageIndex = 0; pageIndex < totalPages; pageIndex++) {
|
||||
try {
|
||||
const pageAnnotations = await signatureApiRef.current.getPageAnnotations(pageIndex);
|
||||
if (pageAnnotations && pageAnnotations.length > 0) {
|
||||
// Filter to only include annotations added in this session
|
||||
const sessionAnnotations = pageAnnotations.filter(annotation => {
|
||||
// Check if this annotation has stored image data (indicates it was added this session)
|
||||
const hasStoredImageData = annotation.id && getImageData(annotation.id);
|
||||
|
||||
// Also check if it has image data directly in the annotation (new signatures)
|
||||
const hasDirectImageData = annotation.imageData || annotation.appearance ||
|
||||
annotation.stampData || annotation.imageSrc ||
|
||||
annotation.contents || annotation.data;
|
||||
|
||||
const isSessionAnnotation = hasStoredImageData || (hasDirectImageData && typeof hasDirectImageData === 'string' && hasDirectImageData.startsWith('data:image'));
|
||||
|
||||
|
||||
return isSessionAnnotation;
|
||||
});
|
||||
|
||||
if (sessionAnnotations.length > 0) {
|
||||
allAnnotations.push({pageIndex, annotations: sessionAnnotations});
|
||||
}
|
||||
}
|
||||
} catch (pageError) {
|
||||
console.warn(`Error extracting annotations from page ${pageIndex + 1}:`, pageError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Delete ONLY session annotations from EmbedPDF before export (they'll be rendered manually)
|
||||
// Leave old annotations alone - they will remain as annotations in the PDF
|
||||
if (allAnnotations.length > 0 && signatureApiRef?.current) {
|
||||
for (const pageData of allAnnotations) {
|
||||
for (const annotation of pageData.annotations) {
|
||||
try {
|
||||
await signatureApiRef.current.deleteAnnotation(annotation.id, pageData.pageIndex);
|
||||
} catch (deleteError) {
|
||||
console.warn(`Failed to delete annotation ${annotation.id}:`, deleteError);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Use EmbedPDF's saveAsCopy to get the base PDF (now without annotations)
|
||||
if (!exportActions) {
|
||||
console.error('No export actions available');
|
||||
return null;
|
||||
}
|
||||
const pdfArrayBuffer = await exportActions.saveAsCopy();
|
||||
|
||||
if (pdfArrayBuffer) {
|
||||
|
||||
// Try loading with more permissive PDF-lib options
|
||||
|
||||
// Convert ArrayBuffer to File
|
||||
const blob = new Blob([pdfArrayBuffer], { type: 'application/pdf' });
|
||||
|
||||
// Get the current file - try from originalFile first, then from all files
|
||||
let currentFile = originalFile;
|
||||
if (!currentFile) {
|
||||
const allFileIds = selectors.getAllFileIds();
|
||||
if (allFileIds.length > 0) {
|
||||
// Use activeFileIndex if provided, otherwise default to 0
|
||||
const fileIndex = activeFileIndex !== undefined && activeFileIndex < allFileIds.length ? activeFileIndex : 0;
|
||||
const fileStub = selectors.getStirlingFileStub(allFileIds[fileIndex]);
|
||||
const fileObject = selectors.getFile(allFileIds[fileIndex]);
|
||||
if (fileStub && fileObject) {
|
||||
currentFile = createStirlingFile(fileObject, allFileIds[fileIndex] as FileId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!currentFile) {
|
||||
console.error('No file available to replace');
|
||||
return null;
|
||||
}
|
||||
|
||||
let signedFile = new File([blob], currentFile.name, { type: 'application/pdf' });
|
||||
|
||||
// Step 4: Manually render extracted annotations onto the PDF using PDF-lib
|
||||
if (allAnnotations.length > 0) {
|
||||
try {
|
||||
const pdfArrayBufferForFlattening = await signedFile.arrayBuffer();
|
||||
|
||||
// Try different loading options to handle problematic PDFs
|
||||
let pdfDoc: PDFDocument;
|
||||
try {
|
||||
pdfDoc = await PDFDocument.load(pdfArrayBufferForFlattening, {
|
||||
ignoreEncryption: true,
|
||||
capNumbers: false,
|
||||
throwOnInvalidObject: false
|
||||
});
|
||||
} catch {
|
||||
console.warn('Failed to load with standard options, trying createProxy...');
|
||||
try {
|
||||
// Create a fresh PDF and copy pages instead of modifying
|
||||
pdfDoc = await PDFDocument.create();
|
||||
const sourcePdf = await PDFDocument.load(pdfArrayBufferForFlattening, {
|
||||
ignoreEncryption: true,
|
||||
throwOnInvalidObject: false
|
||||
});
|
||||
const pageIndices = sourcePdf.getPages().map((_, i) => i);
|
||||
const copiedPages = await pdfDoc.copyPages(sourcePdf, pageIndices);
|
||||
copiedPages.forEach(page => pdfDoc.addPage(page));
|
||||
} catch (copyError) {
|
||||
console.error('Failed to load PDF with any method:', copyError);
|
||||
throw copyError;
|
||||
}
|
||||
}
|
||||
|
||||
const pages = pdfDoc.getPages();
|
||||
|
||||
for (const pageData of allAnnotations) {
|
||||
const { pageIndex, annotations } = pageData;
|
||||
|
||||
if (pageIndex < pages.length) {
|
||||
const page = pages[pageIndex];
|
||||
const { height: pageHeight } = page.getSize();
|
||||
|
||||
for (const annotation of annotations) {
|
||||
try {
|
||||
|
||||
const rect = annotation.rect || annotation.bounds || annotation.rectangle || annotation.position;
|
||||
|
||||
if (rect) {
|
||||
// Extract original annotation position and size
|
||||
const originalX = rect.origin?.x || rect.x || rect.left || 0;
|
||||
const originalY = rect.origin?.y || rect.y || rect.top || 0;
|
||||
const width = rect.size?.width || rect.width || 100;
|
||||
const height = rect.size?.height || rect.height || 50;
|
||||
|
||||
// Convert EmbedPDF coordinates to PDF-lib coordinates
|
||||
const pdfX = originalX;
|
||||
const pdfY = pageHeight - originalY - height;
|
||||
|
||||
|
||||
// Try to get annotation image data
|
||||
let imageDataUrl = annotation.imageData || annotation.appearance || annotation.stampData ||
|
||||
annotation.imageSrc || annotation.contents || annotation.data;
|
||||
|
||||
// If no image data found directly, try to get it from storage
|
||||
if (!imageDataUrl && annotation.id) {
|
||||
const storedImageData = getImageData(annotation.id);
|
||||
if (storedImageData) {
|
||||
imageDataUrl = storedImageData;
|
||||
}
|
||||
}
|
||||
|
||||
if (imageDataUrl && typeof imageDataUrl === 'string' && imageDataUrl.startsWith('data:image')) {
|
||||
try {
|
||||
|
||||
// Convert data URL to bytes
|
||||
const base64Data = imageDataUrl.split(',')[1];
|
||||
const imageBytes = Uint8Array.from(atob(base64Data), c => c.charCodeAt(0));
|
||||
|
||||
// Embed image in PDF based on data URL type
|
||||
let image;
|
||||
if (imageDataUrl.includes('data:image/jpeg') || imageDataUrl.includes('data:image/jpg')) {
|
||||
image = await pdfDoc.embedJpg(imageBytes);
|
||||
} else if (imageDataUrl.includes('data:image/png')) {
|
||||
image = await pdfDoc.embedPng(imageBytes);
|
||||
} else {
|
||||
image = await pdfDoc.embedPng(imageBytes);
|
||||
}
|
||||
|
||||
// Draw image on page at annotation position
|
||||
page.drawImage(image, {
|
||||
x: pdfX,
|
||||
y: pdfY,
|
||||
width: width,
|
||||
height: height,
|
||||
});
|
||||
|
||||
} catch (imageError) {
|
||||
console.error('Failed to render image annotation:', imageError);
|
||||
}
|
||||
} else if (annotation.content || annotation.text) {
|
||||
console.warn('Rendering text annotation instead');
|
||||
// Handle text annotations
|
||||
page.drawText(annotation.content || annotation.text, {
|
||||
x: pdfX,
|
||||
y: pdfY + height - 12, // Adjust for text baseline
|
||||
size: 12,
|
||||
color: rgb(0, 0, 0)
|
||||
});
|
||||
} else if (annotation.type === 14 || annotation.type === 15) {
|
||||
// Handle ink annotations (drawn signatures)
|
||||
page.drawRectangle({
|
||||
x: pdfX,
|
||||
y: pdfY,
|
||||
width: width,
|
||||
height: height,
|
||||
borderColor: rgb(0, 0, 0),
|
||||
borderWidth: 2,
|
||||
color: rgb(0.9, 0.9, 0.9), // Light gray background
|
||||
opacity: 0.8
|
||||
});
|
||||
|
||||
page.drawText('Drawn Signature', {
|
||||
x: pdfX + 5,
|
||||
y: pdfY + height / 2,
|
||||
size: 10,
|
||||
color: rgb(0, 0, 0)
|
||||
});
|
||||
} else {
|
||||
// Handle other annotation types
|
||||
page.drawRectangle({
|
||||
x: pdfX,
|
||||
y: pdfY,
|
||||
width: width,
|
||||
height: height,
|
||||
borderColor: rgb(1, 0, 0),
|
||||
borderWidth: 2,
|
||||
color: rgb(1, 1, 0), // Yellow background
|
||||
opacity: 0.5
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (annotationError) {
|
||||
console.warn('Failed to render annotation:', annotationError);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Save the PDF with rendered annotations
|
||||
const flattenedPdfBytes = await pdfDoc.save({ useObjectStreams: false, addDefaultPage: false });
|
||||
|
||||
const arrayBuffer = new ArrayBuffer(flattenedPdfBytes.length);
|
||||
const uint8View = new Uint8Array(arrayBuffer);
|
||||
uint8View.set(flattenedPdfBytes);
|
||||
signedFile = new File([arrayBuffer], currentFile.name, { type: 'application/pdf' });
|
||||
|
||||
} catch (renderError) {
|
||||
console.error('Failed to manually render annotations:', renderError);
|
||||
console.warn('Signatures may only show as annotations');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate thumbnail and metadata for the signed file
|
||||
const thumbnailResult = await generateThumbnailWithMetadata(signedFile);
|
||||
const processedFileMetadata = createProcessedFile(thumbnailResult.pageCount, thumbnailResult.thumbnail);
|
||||
|
||||
// Prepare input file data for replacement
|
||||
const inputFileIds: FileId[] = [currentFile.fileId];
|
||||
|
||||
const record = selectors.getStirlingFileStub(currentFile.fileId);
|
||||
if (!record) {
|
||||
console.error('No file record found for:', currentFile.fileId);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Create output stub and file as a child of the original (increments version)
|
||||
const outputStub = createChildStub(
|
||||
record,
|
||||
{ toolId: 'sign', timestamp: Date.now() },
|
||||
signedFile,
|
||||
thumbnailResult.thumbnail,
|
||||
processedFileMetadata
|
||||
);
|
||||
const outputStirlingFile = createStirlingFile(signedFile, outputStub.id);
|
||||
|
||||
// Return the flattened file data for consumption by caller
|
||||
return {
|
||||
inputFileIds,
|
||||
outputStirlingFile,
|
||||
outputStub
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
console.error('Error flattening signatures:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
70
frontend/src/core/utils/storageUtils.ts
Normal file
70
frontend/src/core/utils/storageUtils.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { StorageStats } from "@app/services/fileStorage";
|
||||
|
||||
/**
|
||||
* Storage operation types for incremental updates
|
||||
*/
|
||||
export type StorageOperation = 'add' | 'remove' | 'clear';
|
||||
|
||||
/**
|
||||
* Update storage stats incrementally based on operation
|
||||
*/
|
||||
export function updateStorageStatsIncremental(
|
||||
currentStats: StorageStats,
|
||||
operation: StorageOperation,
|
||||
files: File[] = []
|
||||
): StorageStats {
|
||||
const filesSizeTotal = files.reduce((total, file) => total + file.size, 0);
|
||||
|
||||
switch (operation) {
|
||||
case 'add':
|
||||
return {
|
||||
...currentStats,
|
||||
used: currentStats.used + filesSizeTotal,
|
||||
available: currentStats.available - filesSizeTotal,
|
||||
fileCount: currentStats.fileCount + files.length
|
||||
};
|
||||
|
||||
case 'remove':
|
||||
return {
|
||||
...currentStats,
|
||||
used: Math.max(0, currentStats.used - filesSizeTotal),
|
||||
available: currentStats.available + filesSizeTotal,
|
||||
fileCount: Math.max(0, currentStats.fileCount - files.length)
|
||||
};
|
||||
|
||||
case 'clear':
|
||||
return {
|
||||
...currentStats,
|
||||
used: 0,
|
||||
available: currentStats.quota || currentStats.available,
|
||||
fileCount: 0
|
||||
};
|
||||
|
||||
default:
|
||||
return currentStats;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check storage usage and return warning message if needed
|
||||
*/
|
||||
export function checkStorageWarnings(stats: StorageStats): string | null {
|
||||
if (!stats.quota || stats.used === 0) return null;
|
||||
|
||||
const usagePercent = (stats.used / stats.quota) * 100;
|
||||
|
||||
if (usagePercent > 90) {
|
||||
return 'Warning: Storage is nearly full (>90%). Browser may start clearing data.';
|
||||
} else if (usagePercent > 80) {
|
||||
return 'Storage is getting full (>80%). Consider removing old files.';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate storage usage percentage
|
||||
*/
|
||||
export function getStorageUsagePercent(stats: StorageStats): number {
|
||||
return stats.quota ? (stats.used / stats.quota) * 100 : 0;
|
||||
}
|
||||
9
frontend/src/core/utils/textUtils.ts
Normal file
9
frontend/src/core/utils/textUtils.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Filters out emoji characters from a text string
|
||||
* @param text - The input text string
|
||||
* @returns The filtered text without emoji characters
|
||||
*/
|
||||
export const removeEmojis = (text: string): string => {
|
||||
// Filter out emoji characters (Unicode ranges for emojis)
|
||||
return text.replace(/[\u{1F600}-\u{1F64F}]|[\u{1F300}-\u{1F5FF}]|[\u{1F680}-\u{1F6FF}]|[\u{1F1E0}-\u{1F1FF}]|[\u{2600}-\u{26FF}]|[\u{2700}-\u{27BF}]/gu, '');
|
||||
};
|
||||
446
frontend/src/core/utils/thumbnailUtils.ts
Normal file
446
frontend/src/core/utils/thumbnailUtils.ts
Normal file
@@ -0,0 +1,446 @@
|
||||
import { pdfWorkerManager } from '@app/services/pdfWorkerManager';
|
||||
|
||||
export interface ThumbnailWithMetadata {
|
||||
thumbnail: string; // Always returns a thumbnail (placeholder if needed)
|
||||
pageCount: number;
|
||||
pageRotations?: number[]; // Rotation for each page (0, 90, 180, 270)
|
||||
}
|
||||
|
||||
interface ColorScheme {
|
||||
bgTop: string;
|
||||
bgBottom: string;
|
||||
border: string;
|
||||
icon: string;
|
||||
badge: string;
|
||||
textPrimary: string;
|
||||
textSecondary: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate thumbnail scale based on file size (modern 2024 scaling)
|
||||
*/
|
||||
export function calculateScaleFromFileSize(fileSize: number): number {
|
||||
const MB = 1024 * 1024;
|
||||
if (fileSize < 10 * MB) return 1.0; // Full quality for small files
|
||||
if (fileSize < 50 * MB) return 0.8; // High quality for common file sizes
|
||||
if (fileSize < 200 * MB) return 0.6; // Good quality for typical large files
|
||||
if (fileSize < 500 * MB) return 0.4; // Readable quality for large but manageable files
|
||||
return 0.3; // Still usable quality, not tiny
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate encrypted PDF thumbnail with lock icon
|
||||
*/
|
||||
function generateEncryptedPDFThumbnail(file: File): string {
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = 120;
|
||||
canvas.height = 150;
|
||||
const ctx = canvas.getContext('2d')!;
|
||||
|
||||
// Use PDF color scheme but with encrypted styling
|
||||
const colorScheme = getFileTypeColorScheme('PDF');
|
||||
|
||||
// Create gradient background
|
||||
const gradient = ctx.createLinearGradient(0, 0, 0, canvas.height);
|
||||
gradient.addColorStop(0, colorScheme.bgTop);
|
||||
gradient.addColorStop(1, colorScheme.bgBottom);
|
||||
|
||||
// Rounded rectangle background
|
||||
drawRoundedRect(ctx, 8, 8, canvas.width - 16, canvas.height - 16, 8);
|
||||
ctx.fillStyle = gradient;
|
||||
ctx.fill();
|
||||
|
||||
// Border with dashed pattern for encrypted indicator
|
||||
ctx.strokeStyle = colorScheme.border;
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([4, 4]);
|
||||
ctx.stroke();
|
||||
ctx.setLineDash([]); // Reset dash pattern
|
||||
|
||||
// Large lock icon as main element
|
||||
drawLargeLockIcon(ctx, canvas.width / 2, canvas.height / 2 - 10, colorScheme);
|
||||
|
||||
// "PDF" text under the lock
|
||||
ctx.font = 'bold 14px -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif';
|
||||
ctx.fillStyle = colorScheme.icon;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText('PDF', canvas.width / 2, canvas.height / 2 + 35);
|
||||
|
||||
// File size with subtle styling
|
||||
const sizeText = formatFileSize(file.size);
|
||||
ctx.font = '11px -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif';
|
||||
ctx.fillStyle = colorScheme.textSecondary;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText(sizeText, canvas.width / 2, canvas.height - 15);
|
||||
|
||||
return canvas.toDataURL();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate modern placeholder thumbnail with file extension
|
||||
*/
|
||||
function generatePlaceholderThumbnail(file: File): string {
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = 120;
|
||||
canvas.height = 150;
|
||||
const ctx = canvas.getContext('2d')!;
|
||||
|
||||
// Get file extension for color theming
|
||||
const extension = file.name.split('.').pop()?.toUpperCase() || 'FILE';
|
||||
const colorScheme = getFileTypeColorScheme(extension);
|
||||
|
||||
// Create gradient background
|
||||
const gradient = ctx.createLinearGradient(0, 0, 0, canvas.height);
|
||||
gradient.addColorStop(0, colorScheme.bgTop);
|
||||
gradient.addColorStop(1, colorScheme.bgBottom);
|
||||
|
||||
// Rounded rectangle background
|
||||
drawRoundedRect(ctx, 8, 8, canvas.width - 16, canvas.height - 16, 8);
|
||||
ctx.fillStyle = gradient;
|
||||
ctx.fill();
|
||||
|
||||
// Subtle shadow/border
|
||||
ctx.strokeStyle = colorScheme.border;
|
||||
ctx.lineWidth = 1.5;
|
||||
ctx.stroke();
|
||||
|
||||
// Modern document icon
|
||||
drawModernDocumentIcon(ctx, canvas.width / 2, 45, colorScheme.icon);
|
||||
|
||||
// Extension badge
|
||||
drawExtensionBadge(ctx, canvas.width / 2, canvas.height / 2 + 15, extension, colorScheme);
|
||||
|
||||
// File size with subtle styling
|
||||
const sizeText = formatFileSize(file.size);
|
||||
ctx.font = '11px -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif';
|
||||
ctx.fillStyle = colorScheme.textSecondary;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText(sizeText, canvas.width / 2, canvas.height - 15);
|
||||
|
||||
return canvas.toDataURL();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get color scheme based on file extension
|
||||
*/
|
||||
function getFileTypeColorScheme(extension: string): ColorScheme {
|
||||
const schemes: Record<string, ColorScheme> = {
|
||||
// Documents
|
||||
'PDF': { bgTop: '#FF6B6B20', bgBottom: '#FF6B6B10', border: '#FF6B6B40', icon: '#FF6B6B', badge: '#FF6B6B', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'DOC': { bgTop: '#4ECDC420', bgBottom: '#4ECDC410', border: '#4ECDC440', icon: '#4ECDC4', badge: '#4ECDC4', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'DOCX': { bgTop: '#4ECDC420', bgBottom: '#4ECDC410', border: '#4ECDC440', icon: '#4ECDC4', badge: '#4ECDC4', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'ODT': { bgTop: '#4ECDC420', bgBottom: '#4ECDC410', border: '#4ECDC440', icon: '#4ECDC4', badge: '#4ECDC4', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'TXT': { bgTop: '#95A5A620', bgBottom: '#95A5A610', border: '#95A5A640', icon: '#95A5A6', badge: '#95A5A6', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'RTF': { bgTop: '#95A5A620', bgBottom: '#95A5A610', border: '#95A5A640', icon: '#95A5A6', badge: '#95A5A6', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Spreadsheets
|
||||
'XLS': { bgTop: '#2ECC7120', bgBottom: '#2ECC7110', border: '#2ECC7140', icon: '#2ECC71', badge: '#2ECC71', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'XLSX': { bgTop: '#2ECC7120', bgBottom: '#2ECC7110', border: '#2ECC7140', icon: '#2ECC71', badge: '#2ECC71', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'ODS': { bgTop: '#2ECC7120', bgBottom: '#2ECC7110', border: '#2ECC7140', icon: '#2ECC71', badge: '#2ECC71', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'CSV': { bgTop: '#2ECC7120', bgBottom: '#2ECC7110', border: '#2ECC7140', icon: '#2ECC71', badge: '#2ECC71', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Presentations
|
||||
'PPT': { bgTop: '#E67E2220', bgBottom: '#E67E2210', border: '#E67E2240', icon: '#E67E22', badge: '#E67E22', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'PPTX': { bgTop: '#E67E2220', bgBottom: '#E67E2210', border: '#E67E2240', icon: '#E67E22', badge: '#E67E22', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'ODP': { bgTop: '#E67E2220', bgBottom: '#E67E2210', border: '#E67E2240', icon: '#E67E22', badge: '#E67E22', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Images
|
||||
'JPG': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'JPEG': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'PNG': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'GIF': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'BMP': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'TIFF': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'WEBP': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'SVG': { bgTop: '#FF9F4320', bgBottom: '#FF9F4310', border: '#FF9F4340', icon: '#FF9F43', badge: '#FF9F43', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Web
|
||||
'HTML': { bgTop: '#FD79A820', bgBottom: '#FD79A810', border: '#FD79A840', icon: '#FD79A8', badge: '#FD79A8', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'XML': { bgTop: '#FD79A820', bgBottom: '#FD79A810', border: '#FD79A840', icon: '#FD79A8', badge: '#FD79A8', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Text/Markup
|
||||
'MD': { bgTop: '#6C5CE720', bgBottom: '#6C5CE710', border: '#6C5CE740', icon: '#6C5CE7', badge: '#6C5CE7', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Email
|
||||
'EML': { bgTop: '#A29BFE20', bgBottom: '#A29BFE10', border: '#A29BFE40', icon: '#A29BFE', badge: '#A29BFE', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Archives
|
||||
'ZIP': { bgTop: '#9B59B620', bgBottom: '#9B59B610', border: '#9B59B640', icon: '#9B59B6', badge: '#9B59B6', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'RAR': { bgTop: '#9B59B620', bgBottom: '#9B59B610', border: '#9B59B640', icon: '#9B59B6', badge: '#9B59B6', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
'7Z': { bgTop: '#9B59B620', bgBottom: '#9B59B610', border: '#9B59B640', icon: '#9B59B6', badge: '#9B59B6', textPrimary: '#FFFFFF', textSecondary: '#666666' },
|
||||
|
||||
// Default
|
||||
'DEFAULT': { bgTop: '#74B9FF20', bgBottom: '#74B9FF10', border: '#74B9FF40', icon: '#74B9FF', badge: '#74B9FF', textPrimary: '#FFFFFF', textSecondary: '#666666' }
|
||||
};
|
||||
|
||||
return schemes[extension] || schemes['DEFAULT'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw rounded rectangle
|
||||
*/
|
||||
function drawRoundedRect(ctx: CanvasRenderingContext2D, x: number, y: number, width: number, height: number, radius: number) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x + radius, y);
|
||||
ctx.lineTo(x + width - radius, y);
|
||||
ctx.quadraticCurveTo(x + width, y, x + width, y + radius);
|
||||
ctx.lineTo(x + width, y + height - radius);
|
||||
ctx.quadraticCurveTo(x + width, y + height, x + width - radius, y + height);
|
||||
ctx.lineTo(x + radius, y + height);
|
||||
ctx.quadraticCurveTo(x, y + height, x, y + height - radius);
|
||||
ctx.lineTo(x, y + radius);
|
||||
ctx.quadraticCurveTo(x, y, x + radius, y);
|
||||
ctx.closePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw modern document icon
|
||||
*/
|
||||
function drawModernDocumentIcon(ctx: CanvasRenderingContext2D, centerX: number, centerY: number, color: string) {
|
||||
const size = 24;
|
||||
ctx.fillStyle = color;
|
||||
ctx.strokeStyle = color;
|
||||
ctx.lineWidth = 2;
|
||||
|
||||
// Document body
|
||||
drawRoundedRect(ctx, centerX - size/2, centerY - size/2, size, size * 1.2, 3);
|
||||
ctx.fill();
|
||||
|
||||
// Folded corner
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(centerX + size/2 - 6, centerY - size/2);
|
||||
ctx.lineTo(centerX + size/2, centerY - size/2 + 6);
|
||||
ctx.lineTo(centerX + size/2 - 6, centerY - size/2 + 6);
|
||||
ctx.closePath();
|
||||
ctx.fillStyle = '#FFFFFF40';
|
||||
ctx.fill();
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw large lock icon for encrypted PDFs
|
||||
*/
|
||||
function drawLargeLockIcon(ctx: CanvasRenderingContext2D, centerX: number, centerY: number, colorScheme: ColorScheme) {
|
||||
const size = 48;
|
||||
ctx.fillStyle = colorScheme.icon;
|
||||
ctx.strokeStyle = colorScheme.icon;
|
||||
ctx.lineWidth = 3;
|
||||
|
||||
// Lock body (rectangle)
|
||||
const bodyWidth = size;
|
||||
const bodyHeight = size * 0.75;
|
||||
const bodyX = centerX - bodyWidth / 2;
|
||||
const bodyY = centerY - bodyHeight / 4;
|
||||
|
||||
drawRoundedRect(ctx, bodyX, bodyY, bodyWidth, bodyHeight, 4);
|
||||
ctx.fill();
|
||||
|
||||
// Lock shackle (semicircle)
|
||||
const shackleRadius = size * 0.32;
|
||||
const shackleY = centerY - size * 0.25;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.arc(centerX, shackleY, shackleRadius, Math.PI, 2 * Math.PI);
|
||||
ctx.stroke();
|
||||
|
||||
// Keyhole
|
||||
const keyholeX = centerX;
|
||||
const keyholeY = bodyY + bodyHeight * 0.4;
|
||||
ctx.fillStyle = colorScheme.textPrimary;
|
||||
ctx.beginPath();
|
||||
ctx.arc(keyholeX, keyholeY, 4, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
ctx.fillRect(keyholeX - 2, keyholeY, 4, 8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate standard PDF thumbnail by rendering first page
|
||||
*/
|
||||
async function generateStandardPDFThumbnail(pdf: any, scale: number): Promise<string> {
|
||||
const page = await pdf.getPage(1);
|
||||
const viewport = page.getViewport({ scale });
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = viewport.width;
|
||||
canvas.height = viewport.height;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) {
|
||||
throw new Error('Could not get canvas context');
|
||||
}
|
||||
|
||||
await page.render({ canvasContext: context, viewport }).promise;
|
||||
return canvas.toDataURL();
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw extension badge
|
||||
*/
|
||||
function drawExtensionBadge(ctx: CanvasRenderingContext2D, centerX: number, centerY: number, extension: string, colorScheme: ColorScheme) {
|
||||
const badgeWidth = Math.max(extension.length * 8 + 16, 40);
|
||||
const badgeHeight = 22;
|
||||
|
||||
// Badge background
|
||||
drawRoundedRect(ctx, centerX - badgeWidth/2, centerY - badgeHeight/2, badgeWidth, badgeHeight, 11);
|
||||
ctx.fillStyle = colorScheme.badge;
|
||||
ctx.fill();
|
||||
|
||||
// Badge text
|
||||
ctx.font = 'bold 11px -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif';
|
||||
ctx.fillStyle = colorScheme.textPrimary;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText(extension, centerX, centerY + 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format file size for display
|
||||
*/
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
async function generatePDFThumbnail(arrayBuffer: ArrayBuffer, file: File, scale: number): Promise<string> {
|
||||
try {
|
||||
const pdf = await pdfWorkerManager.createDocument(arrayBuffer, {
|
||||
disableAutoFetch: true,
|
||||
disableStream: true
|
||||
});
|
||||
|
||||
const thumbnail = await generateStandardPDFThumbnail(pdf, scale);
|
||||
|
||||
// Immediately clean up memory after thumbnail generation using worker manager
|
||||
pdfWorkerManager.destroyDocument(pdf);
|
||||
return thumbnail;
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
// Check if PDF is encrypted
|
||||
if (error.name === "PasswordException") {
|
||||
return generateEncryptedPDFThumbnail(file);
|
||||
}
|
||||
}
|
||||
throw error; // Not an encryption issue, re-throw
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate thumbnail for any file type - always returns a thumbnail (placeholder if needed)
|
||||
*/
|
||||
export async function generateThumbnailForFile(file: File): Promise<string> {
|
||||
// Skip very large files
|
||||
if (file.size >= 100 * 1024 * 1024) {
|
||||
return generatePlaceholderThumbnail(file);
|
||||
}
|
||||
|
||||
// Handle image files - convert to data URL for persistence
|
||||
if (file.type.startsWith('image/')) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => resolve(reader.result as string);
|
||||
reader.onerror = () => reject(reader.error);
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
||||
|
||||
// Handle PDF files
|
||||
if (file.type.startsWith('application/pdf')) {
|
||||
const scale = calculateScaleFromFileSize(file.size);
|
||||
|
||||
// Only read first 2MB for thumbnail generation to save memory
|
||||
const chunkSize = 2 * 1024 * 1024; // 2MB
|
||||
const chunk = file.slice(0, Math.min(chunkSize, file.size));
|
||||
const arrayBuffer = await chunk.arrayBuffer();
|
||||
|
||||
try {
|
||||
return await generatePDFThumbnail(arrayBuffer, file, scale);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.name === 'InvalidPDFException') {
|
||||
console.warn(`PDF structure issue for ${file.name} - trying with full file`);
|
||||
try {
|
||||
// Try with full file instead of chunk
|
||||
const fullArrayBuffer = await file.arrayBuffer();
|
||||
return await generatePDFThumbnail(fullArrayBuffer, file, scale);
|
||||
} catch {
|
||||
console.warn(`Full file PDF processing also failed for ${file.name} - using placeholder`);
|
||||
return generatePlaceholderThumbnail(file);
|
||||
}
|
||||
}
|
||||
console.warn(`PDF processing failed for ${file.name} - using placeholder:`, error);
|
||||
return generatePlaceholderThumbnail(file);
|
||||
}
|
||||
}
|
||||
|
||||
// All other files get placeholder
|
||||
return generatePlaceholderThumbnail(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate thumbnail and extract page count for a PDF file - always returns a valid thumbnail
|
||||
* @param applyRotation - If true, render thumbnail with PDF rotation applied (for static display).
|
||||
* If false, render without rotation (for CSS-based rotation in PageEditor)
|
||||
*/
|
||||
export async function generateThumbnailWithMetadata(file: File, applyRotation: boolean = true): Promise<ThumbnailWithMetadata> {
|
||||
// Non-PDF files have no page count
|
||||
if (!file.type.startsWith('application/pdf')) {
|
||||
const thumbnail = await generateThumbnailForFile(file);
|
||||
return { thumbnail, pageCount: 0 };
|
||||
}
|
||||
|
||||
// Skip very large files
|
||||
if (file.size >= 100 * 1024 * 1024) {
|
||||
const thumbnail = generatePlaceholderThumbnail(file);
|
||||
return { thumbnail, pageCount: 1 };
|
||||
}
|
||||
|
||||
const scale = calculateScaleFromFileSize(file.size);
|
||||
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const pdf = await pdfWorkerManager.createDocument(arrayBuffer);
|
||||
|
||||
const pageCount = pdf.numPages;
|
||||
const page = await pdf.getPage(1);
|
||||
|
||||
// If applyRotation is false, render without rotation (for CSS-based rotation)
|
||||
// If applyRotation is true, let PDF.js apply rotation (for static display)
|
||||
const viewport = applyRotation
|
||||
? page.getViewport({ scale })
|
||||
: page.getViewport({ scale, rotation: 0 });
|
||||
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = viewport.width;
|
||||
canvas.height = viewport.height;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) {
|
||||
pdfWorkerManager.destroyDocument(pdf);
|
||||
throw new Error('Could not get canvas context');
|
||||
}
|
||||
|
||||
await page.render({ canvasContext: context, viewport, canvas }).promise;
|
||||
const thumbnail = canvas.toDataURL();
|
||||
|
||||
// Read rotation for all pages
|
||||
const pageRotations: number[] = [];
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
const p = await pdf.getPage(i);
|
||||
const rotation = p.rotate || 0;
|
||||
pageRotations.push(rotation);
|
||||
}
|
||||
|
||||
pdfWorkerManager.destroyDocument(pdf);
|
||||
return { thumbnail, pageCount, pageRotations };
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.name === "PasswordException") {
|
||||
// Handle encrypted PDFs
|
||||
const thumbnail = generateEncryptedPDFThumbnail(file);
|
||||
return { thumbnail, pageCount: 1 };
|
||||
}
|
||||
|
||||
const thumbnail = generatePlaceholderThumbnail(file);
|
||||
return { thumbnail, pageCount: 1 };
|
||||
}
|
||||
}
|
||||
33
frontend/src/core/utils/toolErrorHandler.ts
Normal file
33
frontend/src/core/utils/toolErrorHandler.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Standardized error handling utilities for tool operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* Default error extractor that follows the standard pattern
|
||||
*/
|
||||
export const extractErrorMessage = (error: any): string => {
|
||||
if (error.response?.data && typeof error.response.data === 'string') {
|
||||
return error.response.data;
|
||||
}
|
||||
if (error.message) {
|
||||
return error.message;
|
||||
}
|
||||
return 'There was an error processing your request.';
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a standardized error handler for tool operations
|
||||
* @param fallbackMessage - Message to show when no specific error can be extracted
|
||||
* @returns Error handler function that follows the standard pattern
|
||||
*/
|
||||
export const createStandardErrorHandler = (fallbackMessage: string) => {
|
||||
return (error: any): string => {
|
||||
if (error.response?.data && typeof error.response.data === 'string') {
|
||||
return error.response.data;
|
||||
}
|
||||
if (error.message) {
|
||||
return error.message;
|
||||
}
|
||||
return fallbackMessage;
|
||||
};
|
||||
};
|
||||
49
frontend/src/core/utils/toolResponseProcessor.ts
Normal file
49
frontend/src/core/utils/toolResponseProcessor.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
// Note: This utility should be used with useToolResources for ZIP operations
|
||||
import { getFilenameFromHeaders } from '@app/utils/fileResponseUtils';
|
||||
|
||||
export type ResponseHandler = (blob: Blob, originalFiles: File[]) => Promise<File[]> | File[];
|
||||
|
||||
/**
|
||||
* Processes a blob response into File(s).
|
||||
* - If a tool-specific responseHandler is provided, it is used.
|
||||
* - If responseHeaders provided and contains Content-Disposition, uses that filename.
|
||||
* - Otherwise, create a single file using the filePrefix + original name.
|
||||
* - If filePrefix is empty, preserves the original filename.
|
||||
*/
|
||||
export async function processResponse(
|
||||
blob: Blob,
|
||||
originalFiles: File[],
|
||||
filePrefix?: string,
|
||||
responseHandler?: ResponseHandler,
|
||||
responseHeaders?: Record<string, any>
|
||||
): Promise<File[]> {
|
||||
if (responseHandler) {
|
||||
const out = await responseHandler(blob, originalFiles);
|
||||
return Array.isArray(out) ? out : [out as unknown as File];
|
||||
}
|
||||
|
||||
// Check if we should use the backend-provided filename from headers
|
||||
// Only when responseHeaders are explicitly provided (indicating the operation requested this)
|
||||
if (responseHeaders) {
|
||||
const contentDisposition = responseHeaders['content-disposition'];
|
||||
const backendFilename = getFilenameFromHeaders(contentDisposition);
|
||||
if (backendFilename) {
|
||||
const type = blob.type || responseHeaders['content-type'] || 'application/octet-stream';
|
||||
return [new File([blob], backendFilename, { type })];
|
||||
}
|
||||
// If preserveBackendFilename was requested but no Content-Disposition header found,
|
||||
// fall back to default behavior (this handles cases where backend doesn't set the header)
|
||||
}
|
||||
|
||||
// Default behavior: use filePrefix + original name
|
||||
const original = originalFiles[0]?.name ?? 'result.pdf';
|
||||
// Only add prefix if it's not empty - this preserves original filenames for file history
|
||||
const name = filePrefix ? `${filePrefix}${original}` : original;
|
||||
const type = blob.type || 'application/octet-stream';
|
||||
|
||||
// File was modified by tool processing - set lastModified to current time
|
||||
return [new File([blob], name, {
|
||||
type,
|
||||
lastModified: Date.now()
|
||||
})];
|
||||
}
|
||||
98
frontend/src/core/utils/toolSearch.ts
Normal file
98
frontend/src/core/utils/toolSearch.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { ToolId } from "@app/types/toolId";
|
||||
import { ToolRegistryEntry, ToolRegistry } from "@app/data/toolsTaxonomy";
|
||||
import { scoreMatch, minScoreForQuery, normalizeForSearch } from "@app/utils/fuzzySearch";
|
||||
|
||||
export interface RankedToolItem {
|
||||
item: [ToolId, ToolRegistryEntry];
|
||||
matchedText?: string;
|
||||
}
|
||||
|
||||
export function filterToolRegistryByQuery(
|
||||
toolRegistry: Partial<ToolRegistry>,
|
||||
query: string
|
||||
): RankedToolItem[] {
|
||||
const entries = Object.entries(toolRegistry) as [ToolId, ToolRegistryEntry][];
|
||||
if (!query.trim()) {
|
||||
return entries.map(([id, tool]) => ({ item: [id, tool] as [ToolId, ToolRegistryEntry] }));
|
||||
}
|
||||
|
||||
const nq = normalizeForSearch(query);
|
||||
const threshold = minScoreForQuery(query);
|
||||
|
||||
const exactName: Array<{ id: ToolId; tool: ToolRegistryEntry; pos: number }> = [];
|
||||
const exactSyn: Array<{ id: ToolId; tool: ToolRegistryEntry; text: string; pos: number }> = [];
|
||||
const fuzzyName: Array<{ id: ToolId; tool: ToolRegistryEntry; score: number; text: string }> = [];
|
||||
const fuzzySyn: Array<{ id: ToolId; tool: ToolRegistryEntry; score: number; text: string }> = [];
|
||||
|
||||
for (const [id, tool] of entries) {
|
||||
const nameNorm = normalizeForSearch(tool.name || '');
|
||||
const pos = nameNorm.indexOf(nq);
|
||||
if (pos !== -1) {
|
||||
exactName.push({ id, tool, pos });
|
||||
continue;
|
||||
}
|
||||
|
||||
const syns = Array.isArray(tool.synonyms) ? tool.synonyms : [];
|
||||
let matchedExactSyn: { text: string; pos: number } | null = null;
|
||||
for (const s of syns) {
|
||||
const sn = normalizeForSearch(s);
|
||||
const sp = sn.indexOf(nq);
|
||||
if (sp !== -1) {
|
||||
matchedExactSyn = { text: s, pos: sp };
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (matchedExactSyn) {
|
||||
exactSyn.push({ id, tool, text: matchedExactSyn.text, pos: matchedExactSyn.pos });
|
||||
continue;
|
||||
}
|
||||
|
||||
// Fuzzy name
|
||||
const nameScore = scoreMatch(query, tool.name || '');
|
||||
if (nameScore >= threshold) {
|
||||
fuzzyName.push({ id, tool, score: nameScore, text: tool.name || '' });
|
||||
}
|
||||
|
||||
// Fuzzy synonyms (we'll consider these only if fuzzy name results are weak)
|
||||
let bestSynScore = 0;
|
||||
let bestSynText = '';
|
||||
for (const s of syns) {
|
||||
const synScore = scoreMatch(query, s);
|
||||
if (synScore > bestSynScore) {
|
||||
bestSynScore = synScore;
|
||||
bestSynText = s;
|
||||
}
|
||||
if (bestSynScore >= 95) break;
|
||||
}
|
||||
if (bestSynScore >= threshold) {
|
||||
fuzzySyn.push({ id, tool, score: bestSynScore, text: bestSynText });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort within buckets
|
||||
exactName.sort((a, b) => a.pos - b.pos || (a.tool.name || '').length - (b.tool.name || '').length);
|
||||
exactSyn.sort((a, b) => a.pos - b.pos || a.text.length - b.text.length);
|
||||
fuzzyName.sort((a, b) => b.score - a.score);
|
||||
fuzzySyn.sort((a, b) => b.score - a.score);
|
||||
|
||||
// Concatenate buckets with de-duplication by tool id
|
||||
const seen = new Set<string>();
|
||||
const ordered: RankedToolItem[] = [];
|
||||
|
||||
const push = (id: ToolId, tool: ToolRegistryEntry, matchedText?: string) => {
|
||||
if (seen.has(id)) return;
|
||||
seen.add(id);
|
||||
ordered.push({ item: [id, tool], matchedText });
|
||||
};
|
||||
|
||||
for (const { id, tool } of exactName) push(id as ToolId, tool, tool.name);
|
||||
for (const { id, tool, text } of exactSyn) push(id as ToolId, tool, text);
|
||||
for (const { id, tool, text } of fuzzyName) push(id as ToolId, tool, text);
|
||||
for (const { id, tool, text } of fuzzySyn) push(id as ToolId, tool, text);
|
||||
|
||||
if (ordered.length > 0) return ordered;
|
||||
|
||||
// Fallback: return everything unchanged
|
||||
return entries.map(([id, tool]) => ({ item: [id, tool] as [ToolId, ToolRegistryEntry] }));
|
||||
}
|
||||
|
||||
24
frontend/src/core/utils/toolSynonyms.ts
Normal file
24
frontend/src/core/utils/toolSynonyms.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { TFunction } from 'i18next';
|
||||
|
||||
// Helper function to get synonyms for a tool (only from translations)
|
||||
export const getSynonyms = (t: TFunction, toolId: string): string[] => {
|
||||
try {
|
||||
const tagsKey = `${toolId}.tags`;
|
||||
const tags = t(tagsKey) as unknown as string;
|
||||
|
||||
// If the translation key doesn't exist or returns the key itself, return empty array
|
||||
if (!tags || tags === tagsKey) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Split by comma and clean up the tags
|
||||
return tags
|
||||
.split(',')
|
||||
.map((tag: string) => tag.trim())
|
||||
.filter((tag: string) => tag.length > 0);
|
||||
} catch (error) {
|
||||
console.warn(`Failed to get translated synonyms for tool ${toolId}:`, error);
|
||||
return [];
|
||||
}};
|
||||
|
||||
|
||||
127
frontend/src/core/utils/urlMapping.ts
Normal file
127
frontend/src/core/utils/urlMapping.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { ToolId } from '@app/types/toolId';
|
||||
|
||||
// Map URL paths to tool keys (multiple URLs can map to same tool)
|
||||
export const URL_TO_TOOL_MAP: Record<string, ToolId> = {
|
||||
// Basic tools - standard patterns
|
||||
'/split': 'split',
|
||||
'/split-pdfs': 'split',
|
||||
'/merge': 'merge',
|
||||
'/merge-pdfs': 'merge',
|
||||
'/compress': 'compress',
|
||||
'/compress-pdf': 'compress',
|
||||
'/rotate': 'rotate',
|
||||
'/rotate-pdf': 'rotate',
|
||||
'/repair': 'repair',
|
||||
'/flatten': 'flatten',
|
||||
'/crop': 'crop',
|
||||
|
||||
// Convert tool and all its variants
|
||||
'/convert': 'convert',
|
||||
'/convert-pdf': 'convert',
|
||||
'/file-to-pdf': 'convert',
|
||||
'/eml-to-pdf': 'convert',
|
||||
'/html-to-pdf': 'convert',
|
||||
'/markdown-to-pdf': 'convert',
|
||||
'/pdf-to-csv': 'convert',
|
||||
'/pdf-to-img': 'convert',
|
||||
'/pdf-to-markdown': 'convert',
|
||||
'/pdf-to-pdfa': 'convert',
|
||||
'/pdf-to-word': 'convert',
|
||||
'/pdf-to-xml': 'convert',
|
||||
|
||||
// Security tools
|
||||
'/add-password': 'addPassword',
|
||||
'/remove-password': 'removePassword',
|
||||
'/change-permissions': 'changePermissions',
|
||||
'/cert-sign': 'certSign',
|
||||
'/manage-signatures': 'certSign',
|
||||
'/remove-certificate-sign': 'removeCertSign',
|
||||
'/remove-cert-sign': 'removeCertSign',
|
||||
'/unlock-pdf-forms': 'unlockPDFForms',
|
||||
'/validate-signature': 'validateSignature',
|
||||
|
||||
// Content manipulation
|
||||
'/sanitize': 'sanitize',
|
||||
'/sanitize-pdf': 'sanitize',
|
||||
'/ocr': 'ocr',
|
||||
'/ocr-pdf': 'ocr',
|
||||
'/watermark': 'watermark',
|
||||
'/add-watermark': 'watermark',
|
||||
'/add-image': 'addImage',
|
||||
'/add-stamp': 'addStamp',
|
||||
'/add-page-numbers': 'addPageNumbers',
|
||||
'/redact': 'redact',
|
||||
|
||||
// Page manipulation
|
||||
'/remove-pages': 'removePages',
|
||||
'/remove-blanks': 'removeBlanks',
|
||||
'/extract-pages': 'extractPages',
|
||||
'/reorganize-pages': 'reorganizePages',
|
||||
'/single-large-page': 'pdfToSinglePage',
|
||||
'/page-layout': 'pageLayout',
|
||||
'/scale-pages': 'scalePages',
|
||||
'/booklet-imposition': 'bookletImposition',
|
||||
|
||||
// Splitting tools
|
||||
'/auto-split-pdf': 'split',
|
||||
'/auto-size-split-pdf': 'split',
|
||||
'/scanner-image-split': 'scannerImageSplit',
|
||||
|
||||
// Annotation and content removal
|
||||
'/remove-annotations': 'removeAnnotations',
|
||||
'/remove-image': 'removeImage',
|
||||
|
||||
// Image and visual tools
|
||||
'/extract-images': 'extractImages',
|
||||
'/adjust-contrast': 'adjustContrast',
|
||||
'/fake-scan': 'scannerEffect',
|
||||
'/replace-color-pdf': 'replaceColor',
|
||||
|
||||
// Metadata and info
|
||||
'/change-metadata': 'changeMetadata',
|
||||
'/get-pdf-info': 'getPdfInfo',
|
||||
'/add-attachments': 'addAttachments',
|
||||
|
||||
// Advanced tools
|
||||
'/overlay-pdfs': 'overlayPdfs',
|
||||
'/edit-table-of-contents': 'editTableOfContents',
|
||||
'/auto-rename': 'autoRename',
|
||||
'/compare': 'compare',
|
||||
'/multi-tool': 'multiTool',
|
||||
'/show-js': 'showJS',
|
||||
|
||||
// Special/utility tools
|
||||
'/read': 'read',
|
||||
'/automate': 'automate',
|
||||
'/sign': 'sign',
|
||||
|
||||
// Developer tools
|
||||
'/dev-api': 'devApi',
|
||||
'/dev-folder-scanning': 'devFolderScanning',
|
||||
'/dev-sso-guide': 'devSsoGuide',
|
||||
'/dev-airgapped': 'devAirgapped',
|
||||
|
||||
// Legacy URL mappings from sitemap
|
||||
'/pdf-organizer': 'reorganizePages',
|
||||
'/multi-page-layout': 'pageLayout',
|
||||
'/extract-page': 'extractPages',
|
||||
'/pdf-to-single-page': 'pdfToSinglePage',
|
||||
'/img-to-pdf': 'convert',
|
||||
'/pdf-to-presentation': 'convert',
|
||||
'/pdf-to-text': 'convert',
|
||||
'/pdf-to-html': 'convert',
|
||||
'/auto-redact': 'redact',
|
||||
'/stamp': 'addStamp',
|
||||
'/view-pdf': 'read',
|
||||
'/get-info-on-pdf': 'getPdfInfo',
|
||||
'/remove-image-pdf': 'removeImage',
|
||||
'/replace-and-invert-color-pdf': 'replaceColor',
|
||||
'/pipeline': 'automate',
|
||||
'/extract-image-scans': 'scannerImageSplit',
|
||||
'/show-javascript': 'showJS',
|
||||
'/scanner-effect': 'scannerEffect',
|
||||
'/split-by-size-or-count': 'split',
|
||||
'/overlay-pdf': 'overlayPdfs',
|
||||
'/split-pdf-by-sections': 'split',
|
||||
'/split-pdf-by-chapters': 'split',
|
||||
};
|
||||
117
frontend/src/core/utils/urlRouting.ts
Normal file
117
frontend/src/core/utils/urlRouting.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* URL routing utilities for tool navigation with registry support
|
||||
*/
|
||||
|
||||
import { ToolRoute } from '@app/types/navigation';
|
||||
import { ToolId, isValidToolId } from '@app/types/toolId';
|
||||
import { getDefaultWorkbench } from '@app/types/workbench';
|
||||
import { ToolRegistry, getToolWorkbench, getToolUrlPath } from '@app/data/toolsTaxonomy';
|
||||
import { firePixel } from '@app/utils/scarfTracking';
|
||||
import { URL_TO_TOOL_MAP } from '@app/utils/urlMapping';
|
||||
import { BASE_PATH, withBasePath } from '@app/constants/app';
|
||||
|
||||
/**
|
||||
* Parse the current URL to extract tool routing information
|
||||
*/
|
||||
export function parseToolRoute(registry: ToolRegistry): ToolRoute {
|
||||
const fullPath = window.location.pathname;
|
||||
// Remove base path to get app-relative path
|
||||
const path = BASE_PATH && fullPath.startsWith(BASE_PATH)
|
||||
? fullPath.slice(BASE_PATH.length) || '/'
|
||||
: fullPath;
|
||||
const searchParams = new URLSearchParams(window.location.search);
|
||||
|
||||
// First, check URL mapping for multiple URL aliases
|
||||
const mappedToolId = URL_TO_TOOL_MAP[path];
|
||||
if (mappedToolId && registry[mappedToolId]) {
|
||||
const tool = registry[mappedToolId];
|
||||
return {
|
||||
workbench: getToolWorkbench(tool),
|
||||
toolId: mappedToolId
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback: Try to find tool by primary URL path in registry
|
||||
for (const [toolId, tool] of Object.entries(registry)) {
|
||||
const toolUrlPath = getToolUrlPath(toolId);
|
||||
if (path === toolUrlPath && isValidToolId(toolId)) {
|
||||
return {
|
||||
workbench: getToolWorkbench(tool),
|
||||
toolId
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for query parameter fallback (e.g., ?tool=split)
|
||||
const toolParam = searchParams.get('tool');
|
||||
if (toolParam && isValidToolId(toolParam) && registry[toolParam]) {
|
||||
const tool = registry[toolParam];
|
||||
return {
|
||||
workbench: getToolWorkbench(tool),
|
||||
toolId: toolParam
|
||||
};
|
||||
}
|
||||
|
||||
// Default to fileEditor workbench for home page
|
||||
return {
|
||||
workbench: getDefaultWorkbench(),
|
||||
toolId: null
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update URL and fire analytics pixel
|
||||
*/
|
||||
function updateUrl(newPath: string, searchParams: URLSearchParams, replace: boolean = false): void {
|
||||
const currentPath = window.location.pathname;
|
||||
const queryString = searchParams.toString();
|
||||
const fullUrl = newPath + (queryString ? `?${queryString}` : '');
|
||||
|
||||
// Only update URL and fire pixel if something actually changed
|
||||
if (currentPath !== newPath || window.location.search !== (queryString ? `?${queryString}` : '')) {
|
||||
if (replace) {
|
||||
window.history.replaceState(null, '', fullUrl);
|
||||
} else {
|
||||
window.history.pushState(null, '', fullUrl);
|
||||
}
|
||||
firePixel(newPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the URL to reflect the current tool selection
|
||||
*/
|
||||
export function updateToolRoute(toolId: ToolId, registry: ToolRegistry, replace: boolean = false): void {
|
||||
const tool = registry[toolId];
|
||||
if (!tool) {
|
||||
console.warn(`Tool ${toolId} not found in registry`);
|
||||
return;
|
||||
}
|
||||
|
||||
const toolPath = getToolUrlPath(toolId);
|
||||
const newPath = withBasePath(toolPath);
|
||||
const searchParams = new URLSearchParams(window.location.search);
|
||||
|
||||
// Remove tool query parameter since we're using path-based routing
|
||||
searchParams.delete('tool');
|
||||
|
||||
updateUrl(newPath, searchParams, replace);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear tool routing and return to home page
|
||||
*/
|
||||
export function clearToolRoute(replace: boolean = false): void {
|
||||
const searchParams = new URLSearchParams(window.location.search);
|
||||
searchParams.delete('tool');
|
||||
|
||||
updateUrl(withBasePath('/'), searchParams, replace);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get clean tool name for display purposes using registry
|
||||
*/
|
||||
export function getToolDisplayName(toolId: ToolId, registry: ToolRegistry): string {
|
||||
const tool = registry[toolId];
|
||||
return tool ? tool.name : toolId;
|
||||
}
|
||||
Reference in New Issue
Block a user