mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2025-04-08 01:16:26 +02:00
implemented new shared-traverser on browser
This commit is contained in:
parent
26cdb1d04f
commit
f6b4479200
@ -1,13 +1,13 @@
|
|||||||
import PDFLib from 'pdf-lib';
|
import PDFLib from 'pdf-lib';
|
||||||
import * as pdfcpuWraopper from "../public/wasm/pdfcpu-wrapper-node.js";
|
import * as pdfcpuWraopper from "./public/wasm/pdfcpu-wrapper-node.js";
|
||||||
|
|
||||||
import { extractPages as dependantExtractPages } from "../public/functions/extractPages.js";
|
import { extractPages as dependantExtractPages } from "./public/functions/extractPages.js";
|
||||||
import { impose as dependantImpose } from '../public/functions/impose.js';
|
import { impose as dependantImpose } from './public/functions/impose.js';
|
||||||
import { mergePDFs as dependantMergePDFs } from '../public/functions/mergePDFs.js';
|
import { mergePDFs as dependantMergePDFs } from './public/functions/mergePDFs.js';
|
||||||
import { rotatePages as dependantRotatePages } from '../public/functions/rotatePages.js';
|
import { rotatePages as dependantRotatePages } from './public/functions/rotatePages.js';
|
||||||
import { scaleContent as dependantScaleContent} from '../public/functions/scaleContent.js';
|
import { scaleContent as dependantScaleContent} from './public/functions/scaleContent.js';
|
||||||
import { scalePage as dependantScalePage } from '../public/functions/scalePage.js';
|
import { scalePage as dependantScalePage } from './public/functions/scalePage.js';
|
||||||
import { splitPDF as dependantSplitPDF } from '../public/functions/splitPDF.js';
|
import { splitPDF as dependantSplitPDF } from './public/functions/splitPDF.js';
|
||||||
|
|
||||||
export async function extractPages(snapshot, pagesToExtractArray) {
|
export async function extractPages(snapshot, pagesToExtractArray) {
|
||||||
return dependantExtractPages(snapshot, pagesToExtractArray, PDFLib);
|
return dependantExtractPages(snapshot, pagesToExtractArray, PDFLib);
|
38
public/functions.js
Normal file
38
public/functions.js
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
// PDFLib gets importet via index.html script-tag
|
||||||
|
import * as pdfcpuWraopper from "./wasm/pdfcpu-wrapper-browser.js";
|
||||||
|
|
||||||
|
import { extractPages as dependantExtractPages } from "./functions/extractPages.js";
|
||||||
|
import { impose as dependantImpose } from './functions/impose.js';
|
||||||
|
import { mergePDFs as dependantMergePDFs } from './functions/mergePDFs.js';
|
||||||
|
import { rotatePages as dependantRotatePages } from './functions/rotatePages.js';
|
||||||
|
import { scaleContent as dependantScaleContent} from './functions/scaleContent.js';
|
||||||
|
import { scalePage as dependantScalePage } from './functions/scalePage.js';
|
||||||
|
import { splitPDF as dependantSplitPDF } from './functions/splitPDF.js';
|
||||||
|
|
||||||
|
export async function extractPages(snapshot, pagesToExtractArray) {
|
||||||
|
return dependantExtractPages(snapshot, pagesToExtractArray, PDFLib);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function impose(snapshot, nup, format) {
|
||||||
|
return dependantImpose(snapshot, nup, format, pdfcpuWraopper);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function mergePDFs(snapshots) {
|
||||||
|
return dependantMergePDFs(snapshots, PDFLib);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function rotatePages(snapshot, rotation) {
|
||||||
|
return dependantRotatePages(snapshot, rotation, PDFLib);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function scaleContent(snapshot, scaleFactor) {
|
||||||
|
return dependantScaleContent(snapshot, scaleFactor, PDFLib);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function scalePage(snapshot, pageSize) {
|
||||||
|
return dependantScalePage(snapshot, pageSize, PDFLib);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function splitPDF(snapshot, splitAfterPageArray) {
|
||||||
|
return dependantSplitPDF(snapshot, splitAfterPageArray, PDFLib);
|
||||||
|
}
|
@ -2,12 +2,15 @@ import { scaleContent } from "./functions/scaleContent.js";
|
|||||||
import { scalePage, PageSize } from "./functions/scalePage.js";
|
import { scalePage, PageSize } from "./functions/scalePage.js";
|
||||||
import * as exampleWorkflows from "./exampleWorkflows.js";
|
import * as exampleWorkflows from "./exampleWorkflows.js";
|
||||||
import { traverseOperations } from "./traverseOperations.js";
|
import { traverseOperations } from "./traverseOperations.js";
|
||||||
|
import * as Functions from "./functions.js";
|
||||||
|
|
||||||
(async (workflow) => {
|
(async (workflow) => {
|
||||||
const pdfFileInput = document.getElementById('pdfFile');
|
const pdfFileInput = document.getElementById('pdfFile');
|
||||||
const doneButton = document.getElementById("doneButton");
|
const doneButton = document.getElementById("doneButton");
|
||||||
|
|
||||||
doneButton.addEventListener('click', async (e) => {
|
doneButton.addEventListener('click', async (e) => {
|
||||||
|
console.log("Starting...");
|
||||||
|
|
||||||
const files = Array.from(pdfFileInput.files);
|
const files = Array.from(pdfFileInput.files);
|
||||||
console.log(files);
|
console.log(files);
|
||||||
const inputs = await Promise.all(files.map(async file => {
|
const inputs = await Promise.all(files.map(async file => {
|
||||||
@ -19,10 +22,21 @@ import { traverseOperations } from "./traverseOperations.js";
|
|||||||
}));
|
}));
|
||||||
console.log(inputs);
|
console.log(inputs);
|
||||||
|
|
||||||
// TODO: This can also be run serverside
|
const traverse = traverseOperations(workflow.operations, inputs, Functions);
|
||||||
const results = await traverseOperations(workflow.operations, inputs);
|
|
||||||
|
let pdfResults;
|
||||||
|
let iteration;
|
||||||
|
while (true) {
|
||||||
|
iteration = await traverse.next();
|
||||||
|
if (iteration.done) {
|
||||||
|
pdfResults = iteration.value;
|
||||||
|
console.log(`data: processing done\n\n`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
console.log(`data: ${iteration.value}\n\n`);
|
||||||
|
}
|
||||||
|
|
||||||
results.forEach(result => {
|
pdfResults.forEach(result => {
|
||||||
download(result.buffer, result.fileName, "application/pdf");
|
download(result.buffer, result.fileName, "application/pdf");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,207 +1,133 @@
|
|||||||
import { extractPages } from "./functions/extractPages.js";
|
|
||||||
import { impose } from "./functions/impose.js";
|
|
||||||
import { mergePDFs } from "./functions/mergePDFs.js";
|
|
||||||
import { rotatePages } from "./functions/rotatePDF.js";
|
|
||||||
import { splitPDF } from "./functions/splitPDF.js";
|
|
||||||
import { organizeWaitOperations } from "./organizeWaitOperations.js";
|
import { organizeWaitOperations } from "./organizeWaitOperations.js";
|
||||||
|
|
||||||
// TODO: Make this run with feedback like the server side func & The serverside func also got now functionality & fixes for split, so copy it and make it work with the new readsteam system on frontend
|
export async function * traverseOperations(operations, input, Functions) {
|
||||||
export async function traverseOperations(operations, input) {
|
|
||||||
const waitOperations = organizeWaitOperations(operations);
|
const waitOperations = organizeWaitOperations(operations);
|
||||||
const results = [];
|
let results = [];
|
||||||
await nextOperation(operations, input);
|
yield* nextOperation(operations, input);
|
||||||
|
console.log("Done2");
|
||||||
return results;
|
return results;
|
||||||
|
|
||||||
async function nextOperation(operations, input) {
|
async function * nextOperation(operations, input) {
|
||||||
if(Array.isArray(operations) && operations.length == 0) { // isEmpty
|
if(Array.isArray(operations) && operations.length == 0) { // isEmpty
|
||||||
console.log("operation done: " + input.fileName);
|
if(Array.isArray(input)) {
|
||||||
results.push(input);
|
console.log("operation done: " + input[0].fileName + input.length > 1 ? "+" : "");
|
||||||
return;
|
results = results.concat(input);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.log("operation done: " + input.fileName);
|
||||||
|
results.push(input);
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < operations.length; i++) {
|
for (let i = 0; i < operations.length; i++) {
|
||||||
await computeOperation(operations[i], structuredClone(input)); // break references
|
yield* computeOperation(operations[i], structuredClone(input));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function computeOperation(operation, input) {
|
async function * computeOperation(operation, input) {
|
||||||
|
yield "Starting: " + operation.type;
|
||||||
switch (operation.type) {
|
switch (operation.type) {
|
||||||
case "done":
|
case "done": // Skip this, because it is a valid node.
|
||||||
console.log("Done operation will get called if all waits are done. Skipping for now.")
|
|
||||||
break;
|
break;
|
||||||
case "wait":
|
case "wait":
|
||||||
const waitOperation = waitOperations[operation.values.id];
|
const waitOperation = waitOperations[operation.values.id];
|
||||||
waitOperation.input.push(input);
|
|
||||||
|
if(Array.isArray(input)) {
|
||||||
|
waitOperation.input.concat(input); // TODO: May have unexpected concequences. Needs further testing!
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
waitOperation.input.push(input);
|
||||||
|
}
|
||||||
|
|
||||||
waitOperation.waitCount--;
|
waitOperation.waitCount--;
|
||||||
if(waitOperation.waitCount == 0) {
|
if(waitOperation.waitCount == 0) {
|
||||||
await nextOperation(waitOperation.doneOperation.operations, waitOperation.input);
|
yield* nextOperation(waitOperation.doneOperation.operations, waitOperation.input);
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "removeObjects":
|
|
||||||
console.warn("RemoveObjects not implemented yet.")
|
|
||||||
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input[i].fileName += "_removedObjects";
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input.fileName += "_removedObjects";
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case "extract":
|
case "extract":
|
||||||
if(Array.isArray(input)) {
|
yield* nToN(input, operation, async (input) => {
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
input[i].fileName += "_extractedPages";
|
|
||||||
input[i].buffer = await extractPages(input[i].buffer, operation.values["pagesToExtractArray"]);
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
input.fileName += "_extractedPages";
|
input.fileName += "_extractedPages";
|
||||||
input.buffer = await extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
input.buffer = await Functions.extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
||||||
await nextOperation(operation.operations, input);
|
});
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "split":
|
|
||||||
// TODO: When a split goes into a wait function it might break the done condition, as it will count multiplpe times.
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
const splits = await splitPDF(input[i].buffer, operation.values["pagesToSplitAfterArray"]);
|
|
||||||
|
|
||||||
for (let j = 0; j < splits.length; j++) {
|
|
||||||
const split = {};
|
|
||||||
split.originalFileName = input[i].originalFileName;
|
|
||||||
split.fileName = input[i].fileName + "_split";
|
|
||||||
split.buffer = splits[j];
|
|
||||||
await nextOperation(operation.operations, split);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const splits = await splitPDF(input.buffer, operation.values["pagesToSplitAfterArray"]);
|
|
||||||
|
|
||||||
for (let j = 0; j < splits.length; j++) {
|
|
||||||
const split = {};
|
|
||||||
split.originalFileName = input.originalFileName;
|
|
||||||
split.fileName = input.fileName + "_split";
|
|
||||||
split.buffer = splits[j];
|
|
||||||
await nextOperation(operation.operations, split);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "fillField":
|
|
||||||
console.warn("FillField not implemented yet.")
|
|
||||||
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input[i].fileName += "_filledField";
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input.fileName += "_filledField";
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "extractImages":
|
|
||||||
console.warn("ExtractImages not implemented yet.")
|
|
||||||
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input[i].fileName += "_extractedImages";
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input.fileName += "_extractedImages";
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "merge":
|
|
||||||
if(Array.isArray(input) && input.length > 1) {
|
|
||||||
const inputs = input;
|
|
||||||
input = {
|
|
||||||
originalFileName: inputs.map(input => input.originalFileName).join("_and_"),
|
|
||||||
fileName: inputs.map(input => input.fileName).join("_and_") + "_merged",
|
|
||||||
buffer: await mergePDFs(inputs.map(input => input.buffer))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Only one input, no need to merge
|
|
||||||
input.fileName += "_merged";
|
|
||||||
}
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
break;
|
|
||||||
case "transform": {
|
|
||||||
console.warn("Transform not implemented yet.")
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input[i].fileName += "_transformed";
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// TODO: modfiy input
|
|
||||||
input.fileName += "_transformed";
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "extract":
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
input[i].fileName += "_extractedPages";
|
|
||||||
input[i].buffer = await extractPages(input[i].buffer, operation.values["pagesToExtractArray"]);
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
input.fileName += "_extractedPages";
|
|
||||||
input.buffer = await extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "rotate":
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
input[i].fileName += "_turned";
|
|
||||||
input[i].buffer = await rotatePages(input[i].buffer, operation.values["rotation"]);
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
input.fileName += "_turned";
|
|
||||||
input.buffer = await rotatePages(input.buffer, operation.values["rotation"]);
|
|
||||||
await nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case "impose":
|
case "impose":
|
||||||
if(Array.isArray(input)) {
|
yield* nToN(input, operation, async (input) => {
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
input[i].fileName += "_imposed";
|
|
||||||
input[i].buffer = await impose(input[i].buffer, operation.values["nup"], operation.values["format"]);
|
|
||||||
await nextOperation(operation.operations, input[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
input.fileName += "_imposed";
|
input.fileName += "_imposed";
|
||||||
input.buffer = await impose(input.buffer, operation.values["nup"], operation.values["format"]);
|
input.buffer = await Functions.impose(input.buffer, operation.values["nup"], operation.values["format"]);
|
||||||
await nextOperation(operation.operations, input);
|
});
|
||||||
}
|
break;
|
||||||
|
case "merge":
|
||||||
|
yield* nToOne(input, operation, async (inputs) => {
|
||||||
|
return {
|
||||||
|
originalFileName: inputs.map(input => input.originalFileName).join("_and_"),
|
||||||
|
fileName: inputs.map(input => input.fileName).join("_and_") + "_merged",
|
||||||
|
buffer: await Functions.mergePDFs(inputs.map(input => input.buffer))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case "rotate":
|
||||||
|
yield* nToN(input, operation, async (input) => {
|
||||||
|
input.fileName += "_turned";
|
||||||
|
input.buffer = await Functions.rotatePages(input.buffer, operation.values["rotation"]);
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case "split":
|
||||||
|
// TODO: A split might break the done condition, it may count multiple times. Needs further testing!
|
||||||
|
yield* oneToN(input, operation, async (input) => {
|
||||||
|
const splitResult = await Functions.splitPDF(input.buffer, operation.values["pagesToSplitAfterArray"]);
|
||||||
|
|
||||||
|
const splits = [];
|
||||||
|
for (let j = 0; j < splitResult.length; j++) {
|
||||||
|
splits.push({
|
||||||
|
originalFileName: input.originalFileName,
|
||||||
|
fileName: input.fileName + "_split" + j,
|
||||||
|
buffer: splitResult[j]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
input = splits;
|
||||||
|
});
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
console.log("operation type unknown: ", operation.type);
|
throw new Error(`${operation.type} not implemented yet.`);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function * nToOne(inputs, operation, callback) {
|
||||||
|
if(!Array.isArray(inputs)) {
|
||||||
|
inputs = [inputs];
|
||||||
|
}
|
||||||
|
|
||||||
|
inputs = await callback(inputs);
|
||||||
|
yield* nextOperation(operation.operations, inputs);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function * oneToN(input, operation, callback) {
|
||||||
|
if(Array.isArray(input)) {
|
||||||
|
for (let i = 0; i < input.length; i++) {
|
||||||
|
await callback(input[i]);
|
||||||
|
}
|
||||||
|
yield* nextOperation(operation.operations, input);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await callback(input);
|
||||||
|
yield* nextOperation(operation.operations, input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function * nToN(input, operation, callback) {
|
||||||
|
if(Array.isArray(input)) {
|
||||||
|
for (let i = 0; i < input.length; i++) {
|
||||||
|
await callback(input[i]);
|
||||||
|
}
|
||||||
|
yield* nextOperation(operation.operations, input);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await callback(input);
|
||||||
|
yield* nextOperation(operation.operations, input);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -75,7 +75,7 @@ export async function impose(snapshot, nup, format) {
|
|||||||
export async function oneToOne(wasmArray, snapshot) {
|
export async function oneToOne(wasmArray, snapshot) {
|
||||||
await loadFileAsync(Buffer.from(snapshot));
|
await loadFileAsync(Buffer.from(snapshot));
|
||||||
|
|
||||||
console.error("Nuping File");
|
console.log("Nuping File");
|
||||||
let exitcode = await runWasm(wasmArray);
|
let exitcode = await runWasm(wasmArray);
|
||||||
|
|
||||||
if (exitcode !== 0) {
|
if (exitcode !== 0) {
|
||||||
|
@ -3,7 +3,8 @@ import crypto from 'crypto';
|
|||||||
import stream from "stream";
|
import stream from "stream";
|
||||||
import Archiver from 'archiver';
|
import Archiver from 'archiver';
|
||||||
|
|
||||||
import { traverseOperations } from "../../traverseOperations.js";
|
import * as Functions from "../../functions.js";
|
||||||
|
import { traverseOperations } from "../../public/traverseOperations.js";
|
||||||
|
|
||||||
const activeWorkflows = {};
|
const activeWorkflows = {};
|
||||||
|
|
||||||
@ -38,7 +39,7 @@ router.post("/:workflowUuid?", [
|
|||||||
if(req.body.async === "false") {
|
if(req.body.async === "false") {
|
||||||
console.log("Don't do async");
|
console.log("Don't do async");
|
||||||
|
|
||||||
const traverse = traverseOperations(workflow.operations, inputs);
|
const traverse = traverseOperations(workflow.operations, inputs, Functions);
|
||||||
|
|
||||||
let pdfResults;
|
let pdfResults;
|
||||||
let iteration;
|
let iteration;
|
||||||
|
@ -1,134 +0,0 @@
|
|||||||
import * as Functions from "./functions/index.js";
|
|
||||||
import { organizeWaitOperations } from "./public/organizeWaitOperations.js";
|
|
||||||
|
|
||||||
export async function * traverseOperations(operations, input) {
|
|
||||||
const waitOperations = organizeWaitOperations(operations);
|
|
||||||
let results = [];
|
|
||||||
yield* nextOperation(operations, input);
|
|
||||||
console.log("Done2");
|
|
||||||
return results;
|
|
||||||
|
|
||||||
async function * nextOperation(operations, input) {
|
|
||||||
if(Array.isArray(operations) && operations.length == 0) { // isEmpty
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
console.log("operation done: " + input[0].fileName + input.length > 1 ? "+" : "");
|
|
||||||
results = results.concat(input);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("operation done: " + input.fileName);
|
|
||||||
results.push(input);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < operations.length; i++) {
|
|
||||||
yield* computeOperation(operations[i], structuredClone(input));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function * computeOperation(operation, input) {
|
|
||||||
yield "Starting: " + operation.type;
|
|
||||||
switch (operation.type) {
|
|
||||||
case "done": // Skip this, because it is a valid node.
|
|
||||||
break;
|
|
||||||
case "wait":
|
|
||||||
const waitOperation = waitOperations[operation.values.id];
|
|
||||||
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
waitOperation.input.concat(input); // TODO: May have unexpected concequences. Needs further testing!
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
waitOperation.input.push(input);
|
|
||||||
}
|
|
||||||
|
|
||||||
waitOperation.waitCount--;
|
|
||||||
if(waitOperation.waitCount == 0) {
|
|
||||||
yield* nextOperation(waitOperation.doneOperation.operations, waitOperation.input);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "extract":
|
|
||||||
yield* nToN(input, operation, async (input) => {
|
|
||||||
input.fileName += "_extractedPages";
|
|
||||||
input.buffer = await Functions.extractPages(input.buffer, operation.values["pagesToExtractArray"]);
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case "impose":
|
|
||||||
yield* nToN(input, operation, async (input) => {
|
|
||||||
input.fileName += "_imposed";
|
|
||||||
input.buffer = await Functions.impose(input.buffer, operation.values["nup"], operation.values["format"]);
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case "merge":
|
|
||||||
yield* nToOne(input, operation, async (inputs) => {
|
|
||||||
return {
|
|
||||||
originalFileName: inputs.map(input => input.originalFileName).join("_and_"),
|
|
||||||
fileName: inputs.map(input => input.fileName).join("_and_") + "_merged",
|
|
||||||
buffer: await Functions.mergePDFs(inputs.map(input => input.buffer))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case "rotate":
|
|
||||||
yield* nToN(input, operation, async (input) => {
|
|
||||||
input.fileName += "_turned";
|
|
||||||
input.buffer = await Functions.rotatePages(input.buffer, operation.values["rotation"]);
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case "split":
|
|
||||||
// TODO: A split might break the done condition, it may count multiple times. Needs further testing!
|
|
||||||
yield* oneToN(input, operation, async (input) => {
|
|
||||||
const splitResult = await Functions.splitPDF(input.buffer, operation.values["pagesToSplitAfterArray"]);
|
|
||||||
|
|
||||||
const splits = [];
|
|
||||||
for (let j = 0; j < splitResult.length; j++) {
|
|
||||||
splits.push({
|
|
||||||
originalFileName: input.originalFileName,
|
|
||||||
fileName: input.fileName + "_split" + j,
|
|
||||||
buffer: splitResult[j]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
input = splits;
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`${operation.type} not implemented yet.`);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function * nToOne(inputs, operation, callback) {
|
|
||||||
if(!Array.isArray(inputs)) {
|
|
||||||
inputs = [inputs];
|
|
||||||
}
|
|
||||||
|
|
||||||
inputs = await callback(inputs);
|
|
||||||
yield* nextOperation(operation.operations, inputs);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function * oneToN(input, operation, callback) {
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
await callback(input[i]);
|
|
||||||
}
|
|
||||||
yield* nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
await callback(input);
|
|
||||||
yield* nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function * nToN(input, operation, callback) {
|
|
||||||
if(Array.isArray(input)) {
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
await callback(input[i]);
|
|
||||||
}
|
|
||||||
yield* nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
await callback(input);
|
|
||||||
yield* nextOperation(operation.operations, input);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user