This commit is contained in:
Ludy 2025-09-03 06:35:41 +00:00 committed by GitHub
commit 3015bb481c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 3091 additions and 63407 deletions

View File

@ -3,7 +3,15 @@ name: Auto PR V2 Deployment
on:
pull_request:
types: [opened, synchronize, reopened, closed]
workflow_dispatch:
inputs:
pr:
description: "PR number to deploy"
required: true
allow_fork:
description: "Allow deploying fork PR?"
required: false
default: "false"
permissions:
contents: read
@ -12,102 +20,93 @@ permissions:
jobs:
check-pr:
if: github.event.action != 'closed'
if: (github.event_name == 'pull_request' && github.event.action != 'closed') || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
outputs:
should_deploy: ${{ steps.check-conditions.outputs.should_deploy }}
pr_number: ${{ github.event.number }}
pr_repository: ${{ steps.get-pr-info.outputs.repository }}
pr_ref: ${{ steps.get-pr-info.outputs.ref }}
should_deploy: ${{ steps.decide.outputs.should_deploy }}
is_fork: ${{ steps.resolve.outputs.is_fork }}
allow_fork: ${{ steps.decide.outputs.allow_fork }}
pr_number: ${{ steps.resolve.outputs.pr_number }}
pr_repository: ${{ steps.resolve.outputs.repository }}
pr_ref: ${{ steps.resolve.outputs.ref }}
steps:
- name: Harden Runner
uses: step-security/harden-runner@002fdce3c6a235733a90a27c80493a3241e56863 # v2.12.1
uses: step-security/harden-runner@6c439dc8bdf85cadbbce9ed30d1c7b959517bc49 # v2.12.2
with:
egress-policy: audit
- name: Check deployment conditions
id: check-conditions
- name: Resolve PR info
id: resolve
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const { owner, repo } = context.repo;
let prNumber = context.eventName === 'workflow_dispatch'
? parseInt(process.env.INPUT_PR, 10)
: context.payload.number;
if (!Number.isInteger(prNumber)) { core.setFailed('Invalid PR number'); return; }
const { data: pr } = await github.rest.pulls.get({ owner, repo, pull_number: prNumber });
core.setOutput('pr_number', String(prNumber));
core.setOutput('repository', pr.head.repo.full_name);
core.setOutput('ref', pr.head.ref);
core.setOutput('is_fork', String(pr.head.repo.fork));
core.setOutput('base_ref', pr.base.ref);
core.setOutput('author', pr.user.login);
core.setOutput('state', pr.state);
- name: Decide deploy
id: decide
shell: bash
env:
EVENT_NAME: ${{ github.event_name }}
STATE: ${{ steps.resolve.outputs.state }}
IS_FORK: ${{ steps.resolve.outputs.is_fork }}
# nur bei workflow_dispatch gesetzt:
ALLOW_FORK_INPUT: ${{ inputs.allow_fork }}
# für Auto-PR-Logik:
PR_TITLE: ${{ github.event.pull_request.title }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_BASE: ${{ steps.resolve.outputs.base_ref }}
PR_AUTHOR: ${{ steps.resolve.outputs.author }}
run: |
echo "PR Title: $PR_TITLE"
echo "PR Author: $PR_AUTHOR"
echo "PR Branch: $PR_BRANCH"
echo "PR Base Branch: ${{ github.event.pull_request.base.ref }}"
# Define authorized users
authorized_users=(
"Frooodle"
"sf298"
"Ludy87"
"LaserKaspar"
"sbplat"
"reecebrowne"
"DarioGii"
"ConnorYoh"
"EthanHealy01"
"jbrunton96"
)
# Check if author is in the authorized list
is_authorized=false
for user in "${authorized_users[@]}"; do
if [[ "$PR_AUTHOR" == "$user" ]]; then
is_authorized=true
break
set -e
# Standard: nichts deployen
should=false
allow_fork="$(echo "${ALLOW_FORK_INPUT:-false}" | tr '[:upper:]' '[:lower:]')"
if [ "$EVENT_NAME" = "workflow_dispatch" ]; then
if [ "$STATE" != "open" ]; then
echo "PR not open -> skip"
else
if [ "$IS_FORK" = "true" ] && [ "$allow_fork" != "true" ]; then
echo "Fork PR and allow_fork=false -> skip"
else
should=true
fi
fi
done
# If PR is targeting V2 and user is authorized, deploy unconditionally
PR_BASE_BRANCH="${{ github.event.pull_request.base.ref }}"
if [[ "$PR_BASE_BRANCH" == "V2" && "$is_authorized" == "true" ]]; then
echo "✅ Deployment forced: PR targets V2 and author is authorized."
echo "should_deploy=true" >> $GITHUB_OUTPUT
exit 0
fi
# Otherwise, continue with original keyword checks
has_v2_keyword=false
if [[ "$PR_TITLE" =~ [Vv]2 ]] || [[ "$PR_TITLE" =~ [Vv]ersion.?2 ]] || [[ "$PR_TITLE" =~ [Vv]ersion.?[Tt]wo ]]; then
has_v2_keyword=true
fi
has_branch_keyword=false
if [[ "$PR_BRANCH" =~ [Vv]2 ]] || [[ "$PR_BRANCH" =~ [Rr]eact ]]; then
has_branch_keyword=true
fi
if [[ "$is_authorized" == "true" && ( "$has_v2_keyword" == "true" || "$has_branch_keyword" == "true" ) ]]; then
echo "✅ Deployment conditions met"
echo "should_deploy=true" >> $GITHUB_OUTPUT
else
echo "❌ Deployment conditions not met"
echo " - Authorized user: $is_authorized"
echo " - Has V2 keyword in title: $has_v2_keyword"
echo " - Has V2/React keyword in branch: $has_branch_keyword"
echo "should_deploy=false" >> $GITHUB_OUTPUT
auth_users=("Frooodle" "sf298" "Ludy87" "LaserKaspar" "sbplat" "reecebrowne" "DarioGii" "ConnorYoh" "EthanHealy01" "jbrunton96")
is_auth=false; for u in "${auth_users[@]}"; do [ "$u" = "$PR_AUTHOR" ] && is_auth=true && break; done
if [ "$PR_BASE" = "V2" ] && [ "$is_auth" = true ]; then
should=true
else
title_has_v2=false; echo "$PR_TITLE" | grep -qiE 'v2|version.?2|version.?two' && title_has_v2=true
branch_has_kw=false; echo "$PR_BRANCH" | grep -qiE 'v2|react' && branch_has_kw=true
if [ "$is_auth" = true ] && { [ "$title_has_v2" = true ] || [ "$branch_has_kw" = true ]; }; then
should=true
fi
fi
fi
- name: Get PR repository and ref
id: get-pr-info
if: steps.check-conditions.outputs.should_deploy == 'true'
run: |
# For forks, use the full repository name, for internal PRs use the current repo
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
repository="${{ github.event.pull_request.head.repo.full_name }}"
else
repository="${{ github.repository }}"
fi
echo "repository=$repository" >> $GITHUB_OUTPUT
echo "ref=${{ github.event.pull_request.head.ref }}" >> $GITHUB_OUTPUT
echo "should_deploy=$should" >> $GITHUB_OUTPUT
echo "allow_fork=${allow_fork:-false}" >> $GITHUB_OUTPUT
deploy-v2-pr:
needs: check-pr
runs-on: ubuntu-latest
if: needs.check-pr.outputs.should_deploy == 'true'
if: needs.check-pr.outputs.should_deploy == 'true' && (needs.check-pr.outputs.is_fork == 'false' || needs.check-pr.outputs.allow_fork == 'true')
# Concurrency control - only one deployment per PR at a time
concurrency:
group: v2-deploy-pr-${{ needs.check-pr.outputs.pr_number }}
@ -119,7 +118,7 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@002fdce3c6a235733a90a27c80493a3241e56863 # v2.12.1
uses: step-security/harden-runner@6c439dc8bdf85cadbbce9ed30d1c7b959517bc49 # v2.12.2
with:
egress-policy: audit
@ -154,13 +153,13 @@ jobs:
issue_number: prNumber,
per_page: 100
});
const v2Comments = comments.filter(comment =>
comment.body.includes('🚀 **Auto-deploying V2 version**') ||
comment.body.includes('## 🚀 V2 Auto-Deployment Complete!') ||
comment.body.includes('❌ **V2 Auto-deployment failed**')
);
for (const comment of v2Comments) {
console.log(`Deleting old V2 comment: ${comment.id}`);
await github.rest.issues.deleteComment({
@ -177,7 +176,6 @@ jobs:
issue_number: prNumber,
body: `🚀 **Auto-deploying V2 version** for PR #${prNumber}...\n\n_This is an automated deployment triggered by V2/version2 keywords in the PR title or V2/React keywords in the branch name._\n\n⚠ **Note:** If new commits are pushed during deployment, this build will be cancelled and replaced with the latest version.`
});
return newComment.id;
- name: Checkout PR
@ -188,7 +186,6 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0 # Fetch full history for commit hash detection
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
@ -212,7 +209,7 @@ jobs:
if [ -z "$FRONTEND_HASH" ]; then
FRONTEND_HASH="no-frontend-changes"
fi
# Get last commit that touched backend code, docker/backend, or docker/compose
BACKEND_HASH=$(git log -1 --format="%H" -- app/ docker/backend/ docker/compose/ 2>/dev/null || echo "")
if [ -z "$BACKEND_HASH" ]; then
@ -321,7 +318,7 @@ jobs:
SWAGGER_SERVER_URL: "https://${V2_PORT}.ssl.stirlingpdf.cloud"
baseUrl: "https://${V2_PORT}.ssl.stirlingpdf.cloud"
restart: on-failure:5
stirling-pdf-v2-frontend:
container_name: stirling-pdf-v2-frontend-pr-${{ needs.check-pr.outputs.pr_number }}
image: ${{ secrets.DOCKER_HUB_USERNAME }}/test:v2-frontend-${{ steps.commit-hashes.outputs.frontend_short }}
@ -354,7 +351,7 @@ jobs:
# Clean up unused Docker resources to save space
docker system prune -af --volumes || true
# Clean up old backend/frontend images (older than 2 weeks)
docker image prune -af --filter "until=336h" --filter "label!=keep=true" || true
ENDSSH
@ -492,7 +489,7 @@ jobs:
# Clean up old unused images (older than 2 weeks) but keep recent ones for reuse
docker image prune -af --filter "until=336h" --filter "label!=keep=true" || true
# Note: We don't remove the commit-based images since they can be reused across PRs
# Only remove PR-specific containers and directories
ENDSSH
@ -501,5 +498,4 @@ jobs:
if: always()
run: |
rm -f ../private.key
continue-on-error: true
continue-on-error: true

View File

@ -32,18 +32,29 @@ jobs:
with:
egress-policy: audit
- name: Check out code
- name: Checkout PR head (default)
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
persist-credentials: false
- name: Setup GitHub App Bot
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false)
id: setup-bot
uses: ./.github/actions/setup-bot
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Checkout BASE branch (safe script)
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.base.sha }}
path: base
fetch-depth: 1
persist-credentials: false
- name: Set up Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
@ -53,12 +64,45 @@ jobs:
- name: Install frontend dependencies
working-directory: frontend
run: npm ci
env:
NPM_CONFIG_IGNORE_SCRIPTS: "true"
run: npm ci --ignore-scripts --audit=false --fund=false
- name: Generate frontend license report
- name: Generate frontend license report (internal PR)
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
working-directory: frontend
env:
PR_IS_FORK: "false"
run: npm run generate-licenses
- name: Generate frontend license report (fork PRs, pinned)
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true
env:
NPM_CONFIG_IGNORE_SCRIPTS: "true"
working-directory: frontend
run: |
mkdir -p src/assets
npx --yes license-checker@25.0.1 --production --json > src/assets/3rdPartyLicenses.json
- name: Postprocess with project script (BASE version)
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true
env:
PR_IS_FORK: "true"
run: |
node base/frontend/scripts/generate-licenses.js \
--input frontend/src/assets/3rdPartyLicenses.json
- name: Copy postprocessed artifacts back (fork PRs)
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true
run: |
mkdir -p frontend/src/assets
if [ -f "base/frontend/src/assets/3rdPartyLicenses.json" ]; then
cp base/frontend/src/assets/3rdPartyLicenses.json frontend/src/assets/3rdPartyLicenses.json
fi
if [ -f "base/frontend/src/assets/license-warnings.json" ]; then
cp base/frontend/src/assets/license-warnings.json frontend/src/assets/license-warnings.json
fi
- name: Check for license warnings
run: |
if [ -f "frontend/src/assets/license-warnings.json" ]; then
@ -69,7 +113,7 @@ jobs:
# PR Event: Check licenses and comment on PR
- name: Delete previous license check comments
if: github.event_name == 'pull_request'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
@ -101,8 +145,28 @@ jobs:
});
}
- name: Summarize results (fork PRs)
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true
run: |
{
echo "## Frontend License Check"
echo ""
if [ "${LICENSE_WARNINGS_EXIST}" = "true" ]; then
echo "❌ **Failed** incompatible or unknown licenses found."
if [ -f "frontend/src/assets/license-warnings.json" ]; then
echo ""
echo "### Warnings"
jq -r '.warnings[] | "- \(.message)"' frontend/src/assets/license-warnings.json || true
fi
else
echo "✅ **Passed** no license warnings detected."
fi
echo ""
echo "_Note: This is a fork PR. PR comments are disabled; use this summary._"
} >> "$GITHUB_STEP_SUMMARY"
- name: Comment on PR - License Check Results
if: github.event_name == 'pull_request'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}

File diff suppressed because it is too large Load Diff

View File

@ -7,34 +7,30 @@
"dependencies": {
"@atlaskit/pragmatic-drag-and-drop": "^1.7.4",
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.0",
"@iconify/react": "^6.0.0",
"@mantine/core": "^8.0.1",
"@mantine/dropzone": "^8.0.1",
"@mantine/hooks": "^8.0.1",
"@mui/icons-material": "^7.1.0",
"@mui/material": "^7.1.0",
"@tailwindcss/postcss": "^4.1.8",
"@emotion/styled": "^11.14.1",
"@iconify/react": "^6.0.1",
"@mantine/core": "^8.2.8",
"@mantine/dropzone": "^8.2.8",
"@mantine/hooks": "^8.2.8",
"@mui/icons-material": "^7.3.2",
"@mui/material": "^7.3.2",
"@tailwindcss/postcss": "^4.1.12",
"@tanstack/react-virtual": "^3.13.12",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^13.5.0",
"autoprefixer": "^10.4.21",
"axios": "^1.9.0",
"i18next": "^25.2.1",
"i18next-browser-languagedetector": "^8.1.0",
"axios": "^1.11.0",
"i18next": "^25.4.2",
"i18next-browser-languagedetector": "^8.2.0",
"i18next-http-backend": "^3.0.2",
"jszip": "^3.10.1",
"pdf-lib": "^1.17.1",
"pdfjs-dist": "^3.11.174",
"posthog-js": "^1.261.0",
"react": "^19.1.0",
"react-dom": "^19.1.0",
"react-i18next": "^15.5.2",
"react-router-dom": "^7.6.0",
"tailwindcss": "^4.1.8",
"web-vitals": "^2.1.4"
"pdfjs-dist": "^5.4.149",
"posthog-js": "^1.261.4",
"react": "^19.1.1",
"react-dom": "^19.1.1",
"react-i18next": "^15.7.3",
"react-router-dom": "^7.8.2",
"tailwindcss": "^4.1.12",
"web-vitals": "^5.1.0"
},
"scripts": {
"predev": "npm run generate-icons",
@ -55,8 +51,9 @@
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:react-hooks/recommended"
]
},
"browserslist": {
@ -74,21 +71,33 @@
"devDependencies": {
"@iconify-json/material-symbols": "^1.2.33",
"@iconify/utils": "^3.0.1",
"@playwright/test": "^1.40.0",
"@types/node": "^24.2.1",
"@types/react": "^19.1.4",
"@types/react-dom": "^19.1.5",
"@vitejs/plugin-react": "^4.5.0",
"@vitest/coverage-v8": "^1.0.0",
"jsdom": "^23.0.0",
"@playwright/test": "^1.55.0",
"@testing-library/dom": "^10.4.1",
"@testing-library/jest-dom": "^6.8.0",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@types/node": "^24.3.0",
"@types/react": "^19.1.12",
"@types/react-dom": "^19.1.9",
"@typescript-eslint/eslint-plugin": "^8.42.0",
"@typescript-eslint/parser": "^8.42.0",
"@vitejs/plugin-react-swc": "^4.0.1",
"@vitest/coverage-v8": "^3.2.4",
"eslint": "^9.34.0",
"eslint-plugin-react-hooks": "^5.2.0",
"jsdom": "^26.1.0",
"license-checker": "^25.0.1",
"madge": "^8.0.0",
"postcss": "^8.5.3",
"postcss-cli": "^11.0.1",
"postcss-preset-mantine": "^1.17.0",
"postcss-simple-vars": "^7.0.1",
"typescript": "^5.8.3",
"vite": "^6.3.5",
"vitest": "^1.0.0"
"postcss": "^8.5.6",
"typescript": "^5.9.2",
"vite": "^7.1.4",
"vitest": "^3.2.4"
},
"depcheck": {
"ignoreMatches": [
"@emotion/*",
"tailwindcss",
"@testing-library/user-event",
"@vitest/coverage-v8"
]
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,47 +1,57 @@
#!/usr/bin/env node
const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');
import { execSync } from 'child_process';
import { existsSync, mkdirSync, writeFileSync } from 'fs';
import { join, dirname } from 'path';
import { argv } from 'node:process';
const inputIdx = argv.indexOf('--input');
const INPUT_FILE = inputIdx > -1 ? argv[inputIdx + 1] : null;
const POSTPROCESS_ONLY = !!INPUT_FILE;
/**
* Generate 3rd party licenses for frontend dependencies
* This script creates a JSON file similar to the Java backend's 3rdPartyLicenses.json
*/
const OUTPUT_FILE = path.join(__dirname, '..', 'src', 'assets', '3rdPartyLicenses.json');
const PACKAGE_JSON = path.join(__dirname, '..', 'package.json');
const OUTPUT_FILE = join(__dirname, '..', 'src', 'assets', '3rdPartyLicenses.json');
const PACKAGE_JSON = join(__dirname, '..', 'package.json');
// Ensure the output directory exists
const outputDir = path.dirname(OUTPUT_FILE);
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
const outputDir = dirname(OUTPUT_FILE);
if (!existsSync(outputDir)) {
mkdirSync(outputDir, { recursive: true });
}
console.log('🔍 Generating frontend license report...');
try {
// Install license-checker if not present
try {
require.resolve('license-checker');
} catch (e) {
console.log('📦 Installing license-checker...');
execSync('npm install --save-dev license-checker', { stdio: 'inherit' });
// Safety guard: don't run this script on fork PRs (workflow setzt PR_IS_FORK)
if (process.env.PR_IS_FORK === 'true' && !POSTPROCESS_ONLY) {
console.error('Fork PR detected: only --input (postprocess-only) mode is allowed.');
process.exit(2);
}
// Generate license report using license-checker (more reliable)
const licenseReport = execSync('npx license-checker --production --json', {
encoding: 'utf8',
cwd: path.dirname(PACKAGE_JSON)
});
let licenseData;
try {
licenseData = JSON.parse(licenseReport);
} catch (parseError) {
console.error('❌ Failed to parse license data:', parseError.message);
console.error('Raw output:', licenseReport.substring(0, 500) + '...');
process.exit(1);
// Generate license report using pinned license-checker; disable lifecycle scripts
if (POSTPROCESS_ONLY) {
licenseData = JSON.parse(require('fs').readFileSync(INPUT_FILE, 'utf8'));
} else {
const licenseReport = execSync(
'npx --yes license-checker@25.0.1 --production --json',
{
encoding: 'utf8',
cwd: dirname(PACKAGE_JSON),
env: { ...process.env, NPM_CONFIG_IGNORE_SCRIPTS: 'true' }
}
);
try {
licenseData = JSON.parse(licenseReport);
} catch (parseError) {
console.error('❌ Failed to parse license data:', parseError.message);
console.error('Raw output:', licenseReport.substring(0, 500) + '...');
process.exit(1);
}
}
if (!licenseData || typeof licenseData !== 'object') {
@ -152,8 +162,8 @@ try {
});
// Write license warnings to a separate file for CI/CD
const warningsFile = path.join(__dirname, '..', 'src', 'assets', 'license-warnings.json');
fs.writeFileSync(warningsFile, JSON.stringify({
const warningsFile = join(__dirname, '..', 'src', 'assets', 'license-warnings.json');
writeFileSync(warningsFile, JSON.stringify({
warnings: problematicLicenses,
generated: new Date().toISOString()
}, null, 2));
@ -163,7 +173,7 @@ try {
}
// Write to file
fs.writeFileSync(OUTPUT_FILE, JSON.stringify(transformedData, null, 4));
writeFileSync(OUTPUT_FILE, JSON.stringify(transformedData, null, 4));
console.log(`✅ License report generated successfully!`);
console.log(`📄 Found ${transformedData.dependencies.length} dependencies`);

View File

@ -15,4 +15,6 @@ declare module '../assets/material-symbols-icons.json' {
height?: number;
};
export default value;
}
}
declare module 'pdfjs-dist/legacy/build/pdf.mjs'

View File

@ -1,5 +1,5 @@
import { useState, useEffect } from 'react';
import * as pdfjsLib from 'pdfjs-dist';
import * as pdfjsLib from 'pdfjs-dist/legacy/build/pdf.mjs';
import { pdfWorkerManager } from '../services/pdfWorkerManager';
export interface PdfSignatureDetectionResult {
@ -25,7 +25,7 @@ export const usePdfSignatureDetection = (files: File[]): PdfSignatureDetectionRe
for (const file of files) {
const arrayBuffer = await file.arrayBuffer();
try {
const pdf = await pdfWorkerManager.createDocument(arrayBuffer);
@ -41,7 +41,7 @@ export const usePdfSignatureDetection = (files: File[]): PdfSignatureDetectionRe
if (foundSignature) break;
}
// Clean up PDF document using worker manager
pdfWorkerManager.destroyDocument(pdf);
} catch (error) {
@ -65,4 +65,4 @@ export const usePdfSignatureDetection = (files: File[]): PdfSignatureDetectionRe
hasDigitalSignatures,
isChecking
};
};
};

View File

@ -1,4 +1,4 @@
import * as pdfjsLib from 'pdfjs-dist';
import * as pdfjsLib from 'pdfjs-dist/legacy/build/pdf.mjs';
import { ProcessedFile, ProcessingState, PDFPage, ProcessingStrategy, ProcessingConfig, ProcessingMetrics } from '../types/processing';
import { ProcessingCache } from './processingCache';
import { FileHasher } from '../utils/fileHash';
@ -182,7 +182,7 @@ export class EnhancedPDFProcessingService {
): Promise<ProcessedFile> {
const arrayBuffer = await file.arrayBuffer();
const pdf = await pdfWorkerManager.createDocument(arrayBuffer);
try {
const totalPages = pdf.numPages;

View File

@ -4,7 +4,7 @@
* Called when files are added to FileContext, before any view sees them
*/
import * as pdfjsLib from 'pdfjs-dist';
import * as pdfjsLib from 'pdfjs-dist/legacy/build/pdf.mjs';
import { generateThumbnailForFile } from '../utils/thumbnailUtils';
import { pdfWorkerManager } from './pdfWorkerManager';
import { FileId } from '../types/file';

View File

@ -130,7 +130,7 @@ export class PDFExportService {
newDoc.setModificationDate(new Date());
const pdfBytes = await newDoc.save();
return new Blob([pdfBytes], { type: 'application/pdf' });
return new Blob([new Uint8Array(pdfBytes)], { type: 'application/pdf' });
}
/**
@ -176,7 +176,7 @@ export class PDFExportService {
newDoc.setModificationDate(new Date());
const pdfBytes = await newDoc.save();
return new Blob([pdfBytes], { type: 'application/pdf' });
return new Blob([new Uint8Array(pdfBytes)], { type: 'application/pdf' });
}

View File

@ -1,12 +1,12 @@
/**
* PDF.js Worker Manager - Centralized worker lifecycle management
*
*
* Prevents infinite worker creation by managing PDF.js workers globally
* and ensuring proper cleanup when operations complete.
*/
import * as pdfjsLib from 'pdfjs-dist';
const { getDocument, GlobalWorkerOptions } = pdfjsLib;
import { GlobalWorkerOptions, getDocument } from 'pdfjs-dist/legacy/build/pdf.mjs';
class PDFWorkerManager {
private static instance: PDFWorkerManager;
@ -31,7 +31,10 @@ class PDFWorkerManager {
*/
private initializeWorker(): void {
if (!this.isInitialized) {
GlobalWorkerOptions.workerSrc = '/pdf.worker.js';
GlobalWorkerOptions.workerSrc = new URL(
'pdfjs-dist/legacy/build/pdf.worker.min.mjs',
import.meta.url
).toString();
this.isInitialized = true;
}
}
@ -86,7 +89,7 @@ class PDFWorkerManager {
const pdf = await loadingTask.promise;
this.activeDocuments.add(pdf);
this.workerCount++;
return pdf;
} catch (error) {
// If document creation fails, make sure to clean up the loading task
@ -125,7 +128,7 @@ class PDFWorkerManager {
documentsToDestroy.forEach(pdf => {
this.destroyDocument(pdf);
});
this.activeDocuments.clear();
this.workerCount = 0;
}
@ -168,7 +171,7 @@ class PDFWorkerManager {
} catch (error) {
}
});
this.activeDocuments.clear();
this.workerCount = 0;
}
@ -182,4 +185,4 @@ class PDFWorkerManager {
}
// Export singleton instance
export const pdfWorkerManager = PDFWorkerManager.getInstance();
export const pdfWorkerManager = PDFWorkerManager.getInstance();

View File

@ -1,5 +1,5 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
import react from '@vitejs/plugin-react-swc';
export default defineConfig({
plugins: [react()],

View File

@ -1,5 +1,5 @@
import { defineConfig } from 'vitest/config'
import react from '@vitejs/plugin-react'
import react from '@vitejs/plugin-react-swc';
export default defineConfig({
plugins: [react()],
@ -37,4 +37,4 @@ export default defineConfig({
'@': '/src'
}
}
})
})