mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2025-09-08 17:51:20 +02:00
Merge branch 'V2' into feature/v2/Filewithid-implementation
This commit is contained in:
commit
9a8110a7cb
@ -15,17 +15,16 @@ export default defineConfig(
|
||||
},
|
||||
{
|
||||
rules: {
|
||||
"no-empty": "off", // Temporarily disabled until codebase conformant
|
||||
"no-empty-pattern": "off", // Temporarily disabled until codebase conformant
|
||||
"no-undef": "off", // Temporarily disabled until codebase conformant
|
||||
"no-useless-escape": "off", // Temporarily disabled until codebase conformant
|
||||
"no-case-declarations": "off", // Temporarily disabled until codebase conformant
|
||||
"prefer-const": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/ban-ts-comment": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/no-empty-object-type": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/no-empty-object-type": [
|
||||
"error",
|
||||
{
|
||||
// Allow empty extending interfaces because there's no real reason not to, and it makes it obvious where to put extra attributes in the future
|
||||
allowInterfaces: 'with-single-extends',
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/no-explicit-any": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/no-require-imports": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/no-unused-expressions": "off", // Temporarily disabled until codebase conformant
|
||||
"@typescript-eslint/no-unused-vars": "off", // Temporarily disabled until codebase conformant
|
||||
},
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ function getLicenseUrl(licenseType) {
|
||||
// Handle complex SPDX expressions like "(MIT AND Zlib)" or "(MIT OR CC0-1.0)"
|
||||
if (licenseType.includes('AND') || licenseType.includes('OR')) {
|
||||
// Extract the first license from compound expressions for URL
|
||||
const match = licenseType.match(/\(?\s*([A-Za-z0-9\-\.]+)/);
|
||||
const match = licenseType.match(/\(?\s*([A-Za-z0-9\-.]+)/);
|
||||
if (match && licenseUrls[match[1]]) {
|
||||
return licenseUrls[match[1]];
|
||||
}
|
||||
|
@ -17,8 +17,7 @@ import {
|
||||
getActiveNavButton,
|
||||
} from './quickAccessBar/QuickAccessBar';
|
||||
|
||||
const QuickAccessBar = forwardRef<HTMLDivElement>(({
|
||||
}, ref) => {
|
||||
const QuickAccessBar = forwardRef<HTMLDivElement>((_, ref) => {
|
||||
const { t } = useTranslation();
|
||||
const { isRainbowMode } = useRainbowThemeContext();
|
||||
const { openFilesModal, isFilesModalOpen } = useFilesModalContext();
|
||||
|
@ -82,8 +82,8 @@ export function adjustFontSizeToFit(
|
||||
|
||||
return () => {
|
||||
cancelAnimationFrame(raf);
|
||||
try { ro.disconnect(); } catch {}
|
||||
try { mo.disconnect(); } catch {}
|
||||
try { ro.disconnect(); } catch { /* Ignore errors */ }
|
||||
try { mo.disconnect(); } catch { /* Ignore errors */ }
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,6 @@ import { Stack, Text, Divider, Card, Group } from '@mantine/core';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSuggestedTools } from '../../../hooks/useSuggestedTools';
|
||||
|
||||
export interface SuggestedToolsSectionProps {}
|
||||
|
||||
export function SuggestedToolsSection(): React.ReactElement {
|
||||
const { t } = useTranslation();
|
||||
const suggestedTools = useSuggestedTools();
|
||||
|
@ -126,7 +126,7 @@ const ToolSearch = ({
|
||||
key={id}
|
||||
variant="subtle"
|
||||
onClick={() => {
|
||||
onToolSelect && onToolSelect(id);
|
||||
onToolSelect?.(id);
|
||||
setDropdownOpen(false);
|
||||
}}
|
||||
leftSection={<div style={{ color: "var(--tools-text-and-icon-color)" }}>{tool.icon}</div>}
|
||||
|
@ -347,9 +347,9 @@ describe('useConvertParameters - Auto Detection & Smart Conversion', () => {
|
||||
|
||||
const malformedFiles: Array<{name: string}> = [
|
||||
{ name: 'valid.pdf' },
|
||||
// @ts-ignore - Testing runtime resilience
|
||||
// @ts-expect-error - Testing runtime resilience
|
||||
{ name: null },
|
||||
// @ts-ignore
|
||||
// @ts-expect-error - Testing runtime resilience
|
||||
{ name: undefined }
|
||||
];
|
||||
|
||||
|
@ -172,7 +172,7 @@ export const useToolOperation = <TParams>(
|
||||
const validRegularFiles = extractFiles(validFiles);
|
||||
|
||||
switch (config.toolType) {
|
||||
case ToolType.singleFile:
|
||||
case ToolType.singleFile: {
|
||||
// Individual file processing - separate API call per file
|
||||
const apiCallsConfig: ApiCallsConfig<TParams> = {
|
||||
endpoint: config.endpoint,
|
||||
@ -188,8 +188,9 @@ export const useToolOperation = <TParams>(
|
||||
actions.setStatus
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
case ToolType.multiFile:
|
||||
case ToolType.multiFile: {
|
||||
// Multi-file processing - single API call with all files
|
||||
actions.setStatus('Processing files...');
|
||||
const formData = config.buildFormData(params, validRegularFiles);
|
||||
@ -202,7 +203,7 @@ export const useToolOperation = <TParams>(
|
||||
// Use custom responseHandler for multi-file (handles ZIP extraction)
|
||||
processedFiles = await config.responseHandler(response.data, validRegularFiles);
|
||||
} else if (response.data.type === 'application/pdf' ||
|
||||
(response.headers && response.headers['content-type'] === 'application/pdf')) {
|
||||
(response.headers && response.headers['content-type'] === 'application/pdf')) {
|
||||
// Single PDF response (e.g. split with merge option) - use original filename
|
||||
const originalFileName = validRegularFiles[0]?.name || 'document.pdf';
|
||||
const singleFile = new File([response.data], originalFileName, { type: 'application/pdf' });
|
||||
@ -217,6 +218,7 @@ export const useToolOperation = <TParams>(
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ToolType.custom:
|
||||
actions.setStatus('Processing files...');
|
||||
@ -253,9 +255,9 @@ export const useToolOperation = <TParams>(
|
||||
console.warn(`No file stub found for file: ${file.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const outputFileIds = await consumeFiles(inputFileIds, processedFiles);
|
||||
|
||||
|
||||
// Store operation data for undo (only store what we need to avoid memory bloat)
|
||||
lastOperationRef.current = {
|
||||
inputFiles: extractFiles(validFiles), // Convert to File objects for undo
|
||||
@ -319,16 +321,17 @@ export const useToolOperation = <TParams>(
|
||||
// Undo the consume operation
|
||||
await undoConsumeFiles(inputFiles, inputStirlingFileStubs, outputFileIds);
|
||||
|
||||
|
||||
// Clear results and operation tracking
|
||||
resetResults();
|
||||
lastOperationRef.current = null;
|
||||
|
||||
|
||||
// Show success message
|
||||
actions.setStatus(t('undoSuccess', 'Operation undone successfully'));
|
||||
|
||||
|
||||
} catch (error: any) {
|
||||
let errorMessage = extractErrorMessage(error);
|
||||
|
||||
|
||||
// Provide more specific error messages based on error type
|
||||
if (error.message?.includes('Mismatch between input files')) {
|
||||
errorMessage = t('undoDataMismatch', 'Cannot undo: operation data is corrupted');
|
||||
@ -337,9 +340,9 @@ export const useToolOperation = <TParams>(
|
||||
} else if (error.name === 'QuotaExceededError') {
|
||||
errorMessage = t('undoQuotaError', 'Cannot undo: insufficient storage space');
|
||||
}
|
||||
|
||||
|
||||
actions.setError(`${t('undoFailed', 'Failed to undo operation')}: ${errorMessage}`);
|
||||
|
||||
|
||||
// Don't clear the operation data if undo failed - user might want to try again
|
||||
}
|
||||
}, [undoConsumeFiles, resetResults, actions, t]);
|
||||
|
@ -35,8 +35,11 @@ function updatePosthogConsent(){
|
||||
return;
|
||||
}
|
||||
const optIn = (window.CookieConsent as any).acceptedCategory('analytics');
|
||||
optIn?
|
||||
posthog.opt_in_capturing() : posthog.opt_out_capturing();
|
||||
if (optIn) {
|
||||
posthog.opt_in_capturing();
|
||||
} else {
|
||||
posthog.opt_out_capturing();
|
||||
}
|
||||
|
||||
console.log("Updated analytics consent: ", optIn? "opted in" : "opted out");
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ export class EnhancedPDFProcessingService {
|
||||
): Promise<ProcessedFile> {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const pdf = await pdfWorkerManager.createDocument(arrayBuffer);
|
||||
|
||||
|
||||
try {
|
||||
const totalPages = pdf.numPages;
|
||||
|
||||
@ -460,11 +460,12 @@ export class EnhancedPDFProcessingService {
|
||||
case 'failed':
|
||||
this.metrics.failedFiles++;
|
||||
break;
|
||||
case 'cacheHit':
|
||||
case 'cacheHit': {
|
||||
// Update cache hit rate
|
||||
const totalAttempts = this.metrics.totalFiles + 1;
|
||||
this.metrics.cacheHitRate = (this.metrics.cacheHitRate * this.metrics.totalFiles + 1) / totalAttempts;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -520,10 +521,7 @@ export class EnhancedPDFProcessingService {
|
||||
this.notifyListeners();
|
||||
|
||||
// Force memory cleanup hint
|
||||
if (typeof window !== 'undefined' && window.gc) {
|
||||
let gc = window.gc;
|
||||
setTimeout(() => gc(), 100);
|
||||
}
|
||||
setTimeout(() => window.gc?.(), 100);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -148,15 +148,17 @@ export class FileAnalyzer {
|
||||
case 'immediate_full':
|
||||
return pageCount * baseTime;
|
||||
|
||||
case 'priority_pages':
|
||||
case 'priority_pages': {
|
||||
// Estimate time for priority pages (first 10)
|
||||
const priorityPages = Math.min(pageCount, 10);
|
||||
return priorityPages * baseTime;
|
||||
}
|
||||
|
||||
case 'progressive_chunked':
|
||||
case 'progressive_chunked': {
|
||||
// Estimate time for first chunk (20 pages)
|
||||
const firstChunk = Math.min(pageCount, 20);
|
||||
return firstChunk * baseTime;
|
||||
}
|
||||
|
||||
default:
|
||||
return pageCount * baseTime;
|
||||
|
@ -73,7 +73,7 @@ class IndexedDBManager {
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
console.log(`Successfully opened ${config.name}`);
|
||||
|
||||
|
||||
// Set up close handler to clean up our references
|
||||
db.onclose = () => {
|
||||
console.log(`Database ${config.name} closed`);
|
||||
@ -87,13 +87,11 @@ class IndexedDBManager {
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = request.result;
|
||||
const oldVersion = event.oldVersion;
|
||||
|
||||
|
||||
console.log(`Upgrading ${config.name} from v${oldVersion} to v${config.version}`);
|
||||
|
||||
// Create or update object stores
|
||||
config.stores.forEach(storeConfig => {
|
||||
let store: IDBObjectStore;
|
||||
|
||||
if (db.objectStoreNames.contains(storeConfig.name)) {
|
||||
// Store exists - for now, just continue (could add migration logic here)
|
||||
console.log(`Object store '${storeConfig.name}' already exists`);
|
||||
@ -109,7 +107,7 @@ class IndexedDBManager {
|
||||
options.autoIncrement = storeConfig.autoIncrement;
|
||||
}
|
||||
|
||||
store = db.createObjectStore(storeConfig.name, options);
|
||||
const store = db.createObjectStore(storeConfig.name, options);
|
||||
console.log(`Created object store '${storeConfig.name}'`);
|
||||
|
||||
// Create indexes
|
||||
@ -168,7 +166,7 @@ class IndexedDBManager {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const deleteRequest = indexedDB.deleteDatabase(name);
|
||||
|
||||
|
||||
deleteRequest.onerror = () => reject(deleteRequest.error);
|
||||
deleteRequest.onsuccess = () => {
|
||||
console.log(`Deleted database: ${name}`);
|
||||
@ -224,4 +222,4 @@ export const DATABASE_CONFIGS = {
|
||||
} as DatabaseConfig
|
||||
} as const;
|
||||
|
||||
export const indexedDBManager = IndexedDBManager.getInstance();
|
||||
export const indexedDBManager = IndexedDBManager.getInstance();
|
||||
|
@ -1,6 +1,6 @@
|
||||
/**
|
||||
* PDF.js Worker Manager - Centralized worker lifecycle management
|
||||
*
|
||||
*
|
||||
* Prevents infinite worker creation by managing PDF.js workers globally
|
||||
* and ensuring proper cleanup when operations complete.
|
||||
*/
|
||||
@ -86,7 +86,7 @@ class PDFWorkerManager {
|
||||
const pdf = await loadingTask.promise;
|
||||
this.activeDocuments.add(pdf);
|
||||
this.workerCount++;
|
||||
|
||||
|
||||
return pdf;
|
||||
} catch (error) {
|
||||
// If document creation fails, make sure to clean up the loading task
|
||||
@ -94,6 +94,7 @@ class PDFWorkerManager {
|
||||
try {
|
||||
loadingTask.destroy();
|
||||
} catch (destroyError) {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
@ -125,7 +126,7 @@ class PDFWorkerManager {
|
||||
documentsToDestroy.forEach(pdf => {
|
||||
this.destroyDocument(pdf);
|
||||
});
|
||||
|
||||
|
||||
this.activeDocuments.clear();
|
||||
this.workerCount = 0;
|
||||
}
|
||||
@ -166,9 +167,10 @@ class PDFWorkerManager {
|
||||
try {
|
||||
pdf.destroy();
|
||||
} catch (error) {
|
||||
// Ignore errors
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
this.activeDocuments.clear();
|
||||
this.workerCount = 0;
|
||||
}
|
||||
@ -182,4 +184,4 @@ class PDFWorkerManager {
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const pdfWorkerManager = PDFWorkerManager.getInstance();
|
||||
export const pdfWorkerManager = PDFWorkerManager.getInstance();
|
||||
|
@ -1,7 +1,6 @@
|
||||
// Base parameter interfaces for reusable patterns
|
||||
|
||||
export interface BaseParameters {
|
||||
// Base interface that all tool parameters should extend
|
||||
// Provides a foundation for adding common properties across all tools
|
||||
// Examples of future additions: userId, sessionId, commonFlags, etc.
|
||||
}
|
||||
// Base interface that all tool parameters should extend
|
||||
// Provides a foundation for adding common properties across all tools
|
||||
// Examples of future additions: userId, sessionId, commonFlags, etc.
|
||||
export type BaseParameters = object
|
||||
|
Loading…
Reference in New Issue
Block a user