This commit is contained in:
Anthony Stirling 2025-10-12 14:06:13 +01:00
parent b54beaa66b
commit 7eafe68e58
39 changed files with 1783 additions and 171 deletions

View File

@ -87,7 +87,8 @@ public class AutoJobAspect {
},
timeout,
queueable,
resourceWeight);
resourceWeight,
trackProgress);
} else {
// Use retry logic
return executeWithRetries(
@ -220,7 +221,8 @@ public class AutoJobAspect {
},
timeout,
queueable,
resourceWeight);
resourceWeight,
trackProgress);
}
/**

View File

@ -0,0 +1,44 @@
package stirling.software.common.context;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* Holds contextual information for the currently executing job. Backed by a {@link ThreadLocal} so
* worker threads can retrieve the job ID and progress tracking preference while processing
* asynchronous work dispatched by {@link stirling.software.common.service.JobExecutorService
* JobExecutorService}.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class JobContextHolder {
private static final ThreadLocal<String> JOB_ID = new ThreadLocal<>();
private static final ThreadLocal<Boolean> PROGRESS_ENABLED = new ThreadLocal<>();
/** Store context for the current thread. */
public static void setContext(String jobId, boolean progressEnabled) {
if (jobId == null) {
clear();
return;
}
JOB_ID.set(jobId);
PROGRESS_ENABLED.set(progressEnabled);
}
/** Get the job ID bound to the current thread, or {@code null} if none. */
public static String getJobId() {
return JOB_ID.get();
}
/** Whether progress tracking is enabled for the current job (defaults to {@code false}). */
public static boolean isProgressEnabled() {
Boolean enabled = PROGRESS_ENABLED.get();
return enabled != null && enabled;
}
/** Remove all context associated with the current thread. */
public static void clear() {
JOB_ID.remove();
PROGRESS_ENABLED.remove();
}
}

View File

@ -41,6 +41,18 @@ public class JobResult {
/** The actual result object, if not a file */
private Object result;
/** Whether detailed progress tracking is enabled for this job. */
@Builder.Default private boolean trackProgress = true;
/** Most recent percentage update (0-100) if progress tracking is enabled. */
private Integer progressPercent;
/** Human readable progress message (e.g. current stage) when progress is enabled. */
private String progressMessage;
/** Timestamp of the last progress update. */
private LocalDateTime progressUpdatedAt;
/**
* Notes attached to this job for tracking purposes. Uses CopyOnWriteArrayList for thread safety
* when notes are added concurrently.
@ -54,11 +66,28 @@ public class JobResult {
* @return A new JobResult
*/
public static JobResult createNew(String jobId) {
return JobResult.builder()
.jobId(jobId)
.complete(false)
.createdAt(LocalDateTime.now())
.build();
return createNew(jobId, true);
}
/**
* Create a new JobResult with the given job ID and progress tracking preference.
*
* @param jobId The job ID
* @param trackProgress Whether detailed progress should be tracked
* @return A new JobResult
*/
public static JobResult createNew(String jobId, boolean trackProgress) {
JobResult result =
JobResult.builder()
.jobId(jobId)
.complete(false)
.createdAt(LocalDateTime.now())
.trackProgress(trackProgress)
.build();
if (trackProgress) {
result.updateProgressInternal(0, "Pending");
}
return result;
}
/**
@ -70,6 +99,9 @@ public class JobResult {
this.complete = true;
this.result = result;
this.completedAt = LocalDateTime.now();
if (trackProgress) {
updateProgressInternal(100, "Completed");
}
}
/**
@ -81,6 +113,9 @@ public class JobResult {
this.complete = true;
this.error = error;
this.completedAt = LocalDateTime.now();
if (trackProgress) {
updateProgressInternal(100, error != null ? error : "Failed");
}
}
/**
@ -92,6 +127,9 @@ public class JobResult {
this.complete = true;
this.resultFiles = new ArrayList<>(resultFiles);
this.completedAt = LocalDateTime.now();
if (trackProgress) {
updateProgressInternal(100, "Completed");
}
}
/**
@ -161,4 +199,26 @@ public class JobResult {
public List<String> getNotes() {
return Collections.unmodifiableList(notes);
}
/**
* Update the progress information if tracking is enabled.
*
* @param percent The percent complete (0-100)
* @param message Optional descriptive message
*/
public void updateProgress(int percent, String message) {
if (!trackProgress) {
return;
}
updateProgressInternal(percent, message);
}
private void updateProgressInternal(int percent, String message) {
int clamped = Math.min(100, Math.max(0, percent));
this.progressPercent = clamped;
if (message != null && !message.isBlank()) {
this.progressMessage = message;
}
this.progressUpdatedAt = LocalDateTime.now();
}
}

View File

@ -20,6 +20,7 @@ import jakarta.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.context.JobContextHolder;
import stirling.software.common.model.job.JobResponse;
import stirling.software.common.util.ExecutorFactory;
@ -65,7 +66,7 @@ public class JobExecutorService {
* @return The response
*/
public ResponseEntity<?> runJobGeneric(boolean async, Supplier<Object> work) {
return runJobGeneric(async, work, -1);
return runJobGeneric(async, work, -1, false, 50, true);
}
/**
@ -78,7 +79,7 @@ public class JobExecutorService {
*/
public ResponseEntity<?> runJobGeneric(
boolean async, Supplier<Object> work, long customTimeoutMs) {
return runJobGeneric(async, work, customTimeoutMs, false, 50);
return runJobGeneric(async, work, customTimeoutMs, false, 50, true);
}
/**
@ -96,7 +97,8 @@ public class JobExecutorService {
Supplier<Object> work,
long customTimeoutMs,
boolean queueable,
int resourceWeight) {
int resourceWeight,
boolean trackProgress) {
String jobId = UUID.randomUUID().toString();
// Store the job ID in the request for potential use by other components
@ -138,6 +140,8 @@ public class JobExecutorService {
&& // Only async jobs can be queued
resourceMonitor.shouldQueueJob(resourceWeight);
boolean enableProgress = async && trackProgress;
if (shouldQueue) {
// Queue the job instead of executing immediately
log.debug(
@ -145,13 +149,18 @@ public class JobExecutorService {
jobId,
resourceWeight);
taskManager.createTask(jobId);
taskManager.createTask(jobId, trackProgress);
Supplier<Object> contextualWork = withJobContext(jobId, enableProgress, work);
// Create a specialized wrapper that updates the TaskManager
Supplier<Object> wrappedWork =
() -> {
try {
Object result = work.get();
if (enableProgress) {
taskManager.updateProgress(jobId, 5, null);
}
Object result = contextualWork.get();
processJobResult(jobId, result);
return result;
} catch (Exception e) {
@ -169,24 +178,49 @@ public class JobExecutorService {
// Return immediately with job ID
return ResponseEntity.ok().body(new JobResponse<>(true, jobId, null));
} else if (async) {
taskManager.createTask(jobId);
taskManager.createTask(jobId, trackProgress);
executor.execute(
() -> {
try {
log.debug(
"Running async job {} with timeout {} ms", jobId, timeoutToUse);
() ->
runWithJobContext(
jobId,
enableProgress,
() -> {
try {
log.debug(
"Running async job {} with timeout {} ms",
jobId,
timeoutToUse);
// Execute with timeout
Object result = executeWithTimeout(() -> work.get(), timeoutToUse);
processJobResult(jobId, result);
} catch (TimeoutException te) {
log.error("Job {} timed out after {} ms", jobId, timeoutToUse);
taskManager.setError(jobId, "Job timed out");
} catch (Exception e) {
log.error("Error executing job {}: {}", jobId, e.getMessage(), e);
taskManager.setError(jobId, e.getMessage());
}
});
Supplier<Object> contextualWork =
withJobContext(jobId, enableProgress, work);
if (enableProgress) {
taskManager.updateProgress(jobId, 5, null);
}
// Execute with timeout
Object result =
executeWithTimeout(
contextualWork,
timeoutToUse,
jobId,
enableProgress);
processJobResult(jobId, result);
} catch (TimeoutException te) {
log.error(
"Job {} timed out after {} ms",
jobId,
timeoutToUse);
taskManager.setError(jobId, "Job timed out");
} catch (Exception e) {
log.error(
"Error executing job {}: {}",
jobId,
e.getMessage(),
e);
taskManager.setError(jobId, e.getMessage());
}
}));
return ResponseEntity.ok().body(new JobResponse<>(true, jobId, null));
} else {
@ -194,7 +228,9 @@ public class JobExecutorService {
log.debug("Running sync job with timeout {} ms", timeoutToUse);
// Execute with timeout
Object result = executeWithTimeout(() -> work.get(), timeoutToUse);
Supplier<Object> contextualWork = withJobContext(jobId, enableProgress, work);
Object result =
executeWithTimeout(contextualWork, timeoutToUse, jobId, enableProgress);
// If the result is already a ResponseEntity, return it directly
if (result instanceof ResponseEntity) {
@ -452,12 +488,14 @@ public class JobExecutorService {
* @throws TimeoutException If the execution times out
* @throws Exception If the supplier throws an exception
*/
private <T> T executeWithTimeout(Supplier<T> supplier, long timeoutMs)
private <T> T executeWithTimeout(
Supplier<T> supplier, long timeoutMs, String jobId, boolean progressEnabled)
throws TimeoutException, Exception {
// Use the same executor as other async jobs for consistency
// This ensures all operations run on the same thread pool
java.util.concurrent.CompletableFuture<T> future =
java.util.concurrent.CompletableFuture.supplyAsync(supplier, executor);
java.util.concurrent.CompletableFuture.supplyAsync(
withJobContext(jobId, progressEnabled, supplier), executor);
try {
return future.get(timeoutMs, TimeUnit.MILLISECONDS);
@ -473,4 +511,62 @@ public class JobExecutorService {
throw new Exception("Execution was interrupted", e);
}
}
/** Backwards compatible helper used by tests via reflection. */
@SuppressWarnings("unused")
private <T> T executeWithTimeout(Supplier<T> supplier, long timeoutMs)
throws TimeoutException, Exception {
return executeWithTimeout(
supplier,
timeoutMs,
JobContextHolder.getJobId(),
JobContextHolder.isProgressEnabled());
}
private <T> Supplier<T> withJobContext(
String jobId, boolean progressEnabled, Supplier<T> delegate) {
if (jobId == null) {
return delegate;
}
return () -> {
String previousJobId = JobContextHolder.getJobId();
boolean previousProgress = JobContextHolder.isProgressEnabled();
JobContextHolder.setContext(jobId, progressEnabled);
try {
return delegate.get();
} finally {
if (previousJobId == null) {
JobContextHolder.clear();
} else {
JobContextHolder.setContext(previousJobId, previousProgress);
}
}
};
}
private void runWithJobContext(String jobId, boolean progressEnabled, Runnable runnable) {
if (jobId == null) {
runnable.run();
return;
}
Runnable contextualRunnable =
() -> {
String previousJobId = JobContextHolder.getJobId();
boolean previousProgress = JobContextHolder.isProgressEnabled();
JobContextHolder.setContext(jobId, progressEnabled);
try {
runnable.run();
} finally {
if (previousJobId == null) {
JobContextHolder.clear();
} else {
JobContextHolder.setContext(previousJobId, previousProgress);
}
}
};
contextualRunnable.run();
}
}

View File

@ -0,0 +1,56 @@
package stirling.software.common.service;
import org.springframework.stereotype.Service;
import lombok.RequiredArgsConstructor;
import stirling.software.common.context.JobContextHolder;
/**
* Convenience service that exposes a simple API for updating progress information from within job
* handlers executed through {@link JobExecutorService}. The service automatically ties progress
* updates to the current job (if any) and no-ops when progress tracking is disabled.
*/
@Service
@RequiredArgsConstructor
public class JobProgressService {
private final TaskManager taskManager;
/** Update the progress percentage for the current job. */
public boolean updateProgress(int percent, String message) {
String jobId = JobContextHolder.getJobId();
if (jobId == null || !JobContextHolder.isProgressEnabled()) {
return false;
}
return taskManager.updateProgress(jobId, percent, message);
}
/**
* Create a simple tracker that can be used to report progress across a fixed number of steps.
* When progress tracking is disabled for the current job, the returned tracker will be a
* lightweight no-op implementation.
*/
public JobProgressTracker tracker(int totalSteps) {
return tracker(totalSteps, null);
}
/**
* Create a tracker and optionally publish an initial message. Useful for multi-stage pipelines
* where the initial state should be visible to clients.
*/
public JobProgressTracker tracker(int totalSteps, String initialMessage) {
String jobId = JobContextHolder.getJobId();
boolean enabled = JobContextHolder.isProgressEnabled();
if (jobId == null || !enabled || totalSteps <= 0) {
return JobProgressTracker.disabled();
}
if (initialMessage != null && !initialMessage.isBlank()) {
taskManager.updateProgress(jobId, 0, initialMessage);
}
return new JobProgressTracker(taskManager, jobId, totalSteps, true);
}
}

View File

@ -0,0 +1,81 @@
package stirling.software.common.service;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
/**
* Utility that helps controllers report progress in a structured way by distributing the 0-100%
* range across a finite number of logical steps.
*/
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
public class JobProgressTracker {
private final TaskManager taskManager;
private final String jobId;
private final int totalSteps;
private final boolean enabled;
private int completedSteps;
static JobProgressTracker disabled() {
return new JobProgressTracker(null, null, 1, false);
}
/** Whether the tracker will emit updates. */
public boolean isEnabled() {
return enabled;
}
/** Advance the tracker by one step. */
public void advance() {
advanceBy(1, null);
}
/** Advance the tracker by {@code steps} steps. */
public void advanceBy(int steps, String message) {
if (!enabled) {
return;
}
int safeSteps = Math.max(0, steps);
completedSteps = Math.min(totalSteps, completedSteps + safeSteps);
publish(message);
}
/** Advance the tracker by {@code steps} steps without a message. */
public void advanceBy(int steps) {
advanceBy(steps, null);
}
/** Explicitly set the completed steps count. */
public void setStepsCompleted(int stepsCompleted, String message) {
if (!enabled) {
return;
}
completedSteps = Math.max(0, Math.min(totalSteps, stepsCompleted));
publish(message);
}
/** Explicitly set completed steps without a message. */
public void setStepsCompleted(int stepsCompleted) {
setStepsCompleted(stepsCompleted, null);
}
/** Mark the tracker as complete and emit a final message. */
public void complete(String message) {
if (!enabled) {
return;
}
completedSteps = totalSteps;
taskManager.updateProgress(jobId, 100, message);
}
/** Mark the tracker as complete without a message. */
public void complete() {
complete(null);
}
private void publish(String message) {
int percent = (int) Math.floor(((double) completedSteps / (double) totalSteps) * 100);
taskManager.updateProgress(jobId, percent, message);
}
}

View File

@ -65,7 +65,17 @@ public class TaskManager {
* @param jobId The job ID
*/
public void createTask(String jobId) {
jobResults.put(jobId, JobResult.createNew(jobId));
createTask(jobId, true);
}
/**
* Create a new task with the given job ID and progress tracking preference
*
* @param jobId The job ID
* @param trackProgress Whether detailed progress updates should be stored
*/
public void createTask(String jobId, boolean trackProgress) {
jobResults.put(jobId, JobResult.createNew(jobId, trackProgress));
log.debug("Created task with job ID: {}", jobId);
}
@ -165,6 +175,11 @@ public class TaskManager {
&& jobResult.getError() == null) {
// If no result or error has been set, mark it as complete with an empty result
jobResult.completeWithResult("Task completed successfully");
} else {
// Ensure progress is set to 100% with "Completed" message
if (jobResult.isTrackProgress()) {
jobResult.updateProgress(100, "Completed");
}
}
log.debug("Marked job ID: {} as complete", jobId);
}
@ -209,6 +224,33 @@ public class TaskManager {
return false;
}
/**
* Update the progress information for a task.
*
* @param jobId The job ID
* @param percent Percentage complete (0-100)
* @param message Descriptive message for the current stage
* @return true if the progress update was accepted
*/
public boolean updateProgress(String jobId, int percent, String message) {
JobResult jobResult = jobResults.get(jobId);
if (jobResult == null) {
log.debug("Ignoring progress update for unknown job ID: {}", jobId);
return false;
}
if (!jobResult.isTrackProgress()) {
log.trace("Progress tracking disabled for job ID: {}", jobId);
return false;
}
jobResult.updateProgress(percent, message);
log.debug(
"Updated progress for job {} to {}% with message: {}",
jobId, jobResult.getProgressPercent(), message);
return true;
}
/**
* Get statistics about all jobs in the system
*

View File

@ -1,6 +1,7 @@
package stirling.software.common.annotations;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@ -72,6 +73,8 @@ class AutoJobPostMappingIntegrationTest {
@Captor private ArgumentCaptor<Integer> resourceWeightCaptor;
@Captor private ArgumentCaptor<Boolean> trackProgressCaptor;
@Test
void shouldExecuteWithCustomParameters() throws Throwable {
// Given
@ -91,7 +94,12 @@ class AutoJobPostMappingIntegrationTest {
when(fileStorage.retrieveFile("test-file-id")).thenReturn(mockFile);
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
anyBoolean(),
any(Supplier.class),
anyLong(),
anyBoolean(),
anyInt(),
anyBoolean()))
.thenReturn(ResponseEntity.ok("success"));
// When
@ -106,12 +114,14 @@ class AutoJobPostMappingIntegrationTest {
workCaptor.capture(),
timeoutCaptor.capture(),
queueableCaptor.capture(),
resourceWeightCaptor.capture());
resourceWeightCaptor.capture(),
trackProgressCaptor.capture());
assertTrue(asyncCaptor.getValue(), "Async should be true");
assertEquals(60000L, timeoutCaptor.getValue(), "Timeout should be 60000ms");
assertTrue(queueableCaptor.getValue(), "Queueable should be true");
assertEquals(75, resourceWeightCaptor.getValue(), "Resource weight should be 75");
assertTrue(trackProgressCaptor.getValue(), "Track progress should propagate");
// Test that file was resolved
assertNotNull(pdfFile.getFileInput(), "File input should be set");
@ -135,7 +145,12 @@ class AutoJobPostMappingIntegrationTest {
// Mock jobExecutorService to execute the work immediately
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
anyBoolean(),
any(Supplier.class),
anyLong(),
anyBoolean(),
anyInt(),
anyBoolean()))
.thenAnswer(
invocation -> {
Supplier<Object> work = invocation.getArgument(1);
@ -150,6 +165,16 @@ class AutoJobPostMappingIntegrationTest {
// Verify that proceed was called twice (initial attempt + 1 retry)
verify(joinPoint, times(2)).proceed(any());
verify(jobExecutorService)
.runJobGeneric(
asyncCaptor.capture(),
workCaptor.capture(),
timeoutCaptor.capture(),
queueableCaptor.capture(),
resourceWeightCaptor.capture(),
trackProgressCaptor.capture());
assertFalse(trackProgressCaptor.getValue(), "Track progress should be false when disabled");
}
@Test
@ -168,7 +193,12 @@ class AutoJobPostMappingIntegrationTest {
// Mock job executor to return a successful response
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
anyBoolean(),
any(Supplier.class),
anyLong(),
anyBoolean(),
anyInt(),
anyBoolean()))
.thenReturn(ResponseEntity.ok("success"));
// When

View File

@ -94,7 +94,7 @@ class JobExecutorServiceTest {
assertNotNull(jobResponse.getJobId());
// Verify task manager was called
verify(taskManager).createTask(jobIdCaptor.capture());
verify(taskManager).createTask(jobIdCaptor.capture(), eq(true));
}
@Test
@ -129,7 +129,8 @@ class JobExecutorServiceTest {
when(jobQueue.queueJob(anyString(), eq(80), any(), anyLong())).thenReturn(future);
// When
ResponseEntity<?> response = jobExecutorService.runJobGeneric(true, work, 5000, true, 80);
ResponseEntity<?> response =
jobExecutorService.runJobGeneric(true, work, 5000, true, 80, true);
// Then
assertEquals(HttpStatus.OK, response.getStatusCode());
@ -137,7 +138,7 @@ class JobExecutorServiceTest {
// Verify job was queued
verify(jobQueue).queueJob(anyString(), eq(80), any(), eq(5000L));
verify(taskManager).createTask(anyString());
verify(taskManager).createTask(anyString(), eq(true));
}
@Test

View File

@ -0,0 +1,83 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import stirling.software.common.context.JobContextHolder;
class JobProgressServiceTest {
@Mock private TaskManager taskManager;
private JobProgressService jobProgressService;
private AutoCloseable mocks;
@BeforeEach
void setUp() {
mocks = MockitoAnnotations.openMocks(this);
jobProgressService = new JobProgressService(taskManager);
JobContextHolder.clear();
}
@AfterEach
void tearDown() throws Exception {
JobContextHolder.clear();
mocks.close();
}
@Test
void updateProgressReturnsFalseWhenNoContext() {
boolean updated = jobProgressService.updateProgress(10, "Stage");
assertFalse(updated);
verify(taskManager, never()).updateProgress(anyString(), anyInt(), anyString());
}
@Test
void updateProgressDelegatesToTaskManager() {
JobContextHolder.setContext("job-123", true);
when(taskManager.updateProgress("job-123", 20, "Processing")).thenReturn(true);
boolean updated = jobProgressService.updateProgress(20, "Processing");
assertTrue(updated);
verify(taskManager).updateProgress("job-123", 20, "Processing");
}
@Test
void trackerNoOpsWhenDisabled() {
JobContextHolder.setContext("job-123", false);
JobProgressTracker tracker = jobProgressService.tracker(5, "Start");
assertFalse(tracker.isEnabled());
tracker.advanceBy(1, "Step");
tracker.complete("Done");
verify(taskManager, never()).updateProgress(anyString(), anyInt(), anyString());
}
@Test
void trackerPublishesProgress() {
JobContextHolder.setContext("job-123", true);
JobProgressTracker tracker = jobProgressService.tracker(4, "Starting");
assertTrue(tracker.isEnabled());
verify(taskManager).updateProgress("job-123", 0, "Starting");
tracker.advanceBy(1, "25 percent");
tracker.advanceBy(1, "50 percent");
tracker.setStepsCompleted(3, "75 percent");
tracker.complete("Done");
verify(taskManager).updateProgress("job-123", 25, "25 percent");
verify(taskManager).updateProgress("job-123", 50, "50 percent");
verify(taskManager).updateProgress("job-123", 75, "75 percent");
verify(taskManager).updateProgress("job-123", 100, "Done");
}
}

View File

@ -50,6 +50,9 @@ class TaskManagerTest {
assertEquals(jobId, result.getJobId());
assertFalse(result.isComplete());
assertNotNull(result.getCreatedAt());
assertTrue(result.isTrackProgress());
assertEquals(0, result.getProgressPercent());
assertEquals("Pending", result.getProgressMessage());
}
@Test
@ -68,6 +71,8 @@ class TaskManagerTest {
assertTrue(result.isComplete());
assertEquals(resultObject, result.getResult());
assertNotNull(result.getCompletedAt());
assertEquals(100, result.getProgressPercent());
assertEquals("Completed", result.getProgressMessage());
}
@Test
@ -120,6 +125,8 @@ class TaskManagerTest {
assertTrue(result.isComplete());
assertEquals(errorMessage, result.getError());
assertNotNull(result.getCompletedAt());
assertEquals(100, result.getProgressPercent());
assertEquals(errorMessage, result.getProgressMessage());
}
@Test
@ -138,6 +145,7 @@ class TaskManagerTest {
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals(resultObject, result.getResult());
assertEquals(100, result.getProgressPercent());
}
@Test
@ -154,6 +162,8 @@ class TaskManagerTest {
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals("Task completed successfully", result.getResult());
assertEquals(100, result.getProgressPercent());
assertEquals("Completed", result.getProgressMessage());
}
@Test
@ -305,4 +315,45 @@ class TaskManagerTest {
// Assert
assertFalse(result);
}
@Test
void testCreateTaskWithoutProgressTracking() {
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId, false);
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertFalse(result.isTrackProgress());
assertNull(result.getProgressPercent());
assertNull(result.getProgressMessage());
}
@Test
void testUpdateProgress() {
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
boolean updated = taskManager.updateProgress(jobId, 50, "Halfway there");
assertTrue(updated);
JobResult result = taskManager.getJobResult(jobId);
assertEquals(50, result.getProgressPercent());
assertEquals("Halfway there", result.getProgressMessage());
assertNotNull(result.getProgressUpdatedAt());
}
@Test
void testUpdateProgressReturnsFalseWhenJobMissing() {
boolean updated = taskManager.updateProgress("missing", 10, "Stage");
assertFalse(updated);
}
@Test
void testUpdateProgressIgnoredWhenTrackingDisabled() {
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId, false);
boolean updated = taskManager.updateProgress(jobId, 75, "Stage");
assertFalse(updated);
}
}

View File

@ -22,12 +22,15 @@ import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.AnalysisApi;
import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
@AnalysisApi
@RequiredArgsConstructor
public class AnalysisController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/page-count", consumes = "multipart/form-data")
@JsonDataResponse
@ -89,12 +92,22 @@ public class AnalysisController {
try (PDDocument document = pdfDocumentFactory.load(file.getFileInput())) {
List<Map<String, Float>> dimensions = new ArrayList<>();
PDPageTree pages = document.getPages();
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, pages.getCount()));
boolean trackProgress = progressTracker.isEnabled();
for (PDPage page : pages) {
Map<String, Float> pageDim = new HashMap<>();
pageDim.put("width", page.getBBox().getWidth());
pageDim.put("height", page.getBBox().getHeight());
dimensions.add(pageDim);
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
return dimensions;
}
@ -134,13 +147,24 @@ public class AnalysisController {
Map<String, Object> annotInfo = new HashMap<>();
int totalAnnotations = 0;
Map<String, Integer> annotationTypes = new HashMap<>();
PDPageTree pages = document.getPages();
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, pages.getCount()));
boolean trackProgress = progressTracker.isEnabled();
for (PDPage page : document.getPages()) {
for (PDPage page : pages) {
for (PDAnnotation annot : page.getAnnotations()) {
totalAnnotations++;
String subType = annot.getSubtype();
annotationTypes.merge(subType, 1, Integer::sum);
}
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
annotInfo.put("totalCount", totalAnnotations);
@ -160,10 +184,22 @@ public class AnalysisController {
Map<String, Object> fontInfo = new HashMap<>();
Set<String> fontNames = new HashSet<>();
for (PDPage page : document.getPages()) {
PDPageTree pages = document.getPages();
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, pages.getCount()));
boolean trackProgress = progressTracker.isEnabled();
for (PDPage page : pages) {
for (COSName font : page.getResources().getFontNames()) {
fontNames.add(font.getName());
}
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
fontInfo.put("fontCount", fontNames.size());

View File

@ -28,6 +28,8 @@ import lombok.RequiredArgsConstructor;
import stirling.software.SPDF.model.api.general.BookletImpositionRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@RestController
@ -37,6 +39,7 @@ import stirling.software.common.util.WebResponseUtils;
public class BookletImpositionController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/booklet-imposition", consumes = "multipart/form-data")
@Operation(
@ -68,21 +71,31 @@ public class BookletImpositionController {
PDDocument sourceDocument = pdfDocumentFactory.load(file);
int totalPages = sourceDocument.getNumberOfPages();
List<Side> sides = saddleStitchSides(totalPages, doubleSided, duplexPass, flipOnShortEdge);
JobProgressTracker progressTracker = jobProgressService.tracker(Math.max(1, sides.size()));
boolean trackProgress = progressTracker.isEnabled();
// Create proper booklet with signature-based page ordering
PDDocument newDocument =
createSaddleBooklet(
sourceDocument,
totalPages,
addBorder,
spineLocation,
addGutter,
gutterSize,
doubleSided,
duplexPass,
flipOnShortEdge);
flipOnShortEdge,
sides,
progressTracker,
trackProgress);
sourceDocument.close();
if (trackProgress) {
progressTracker.complete();
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
newDocument.save(baos);
newDocument.close();
@ -154,14 +167,16 @@ public class BookletImpositionController {
private PDDocument createSaddleBooklet(
PDDocument src,
int totalPages,
boolean addBorder,
String spineLocation,
boolean addGutter,
float gutterSize,
boolean doubleSided,
String duplexPass,
boolean flipOnShortEdge)
boolean flipOnShortEdge,
List<Side> sides,
JobProgressTracker progressTracker,
boolean trackProgress)
throws IOException {
PDDocument dst = pdfDocumentFactory.createNewDocumentBasedOnOldDocument(src);
@ -176,8 +191,6 @@ public class BookletImpositionController {
if (gutterSize < 0) gutterSize = 0;
if (gutterSize >= pageSize.getWidth() / 2f) gutterSize = pageSize.getWidth() / 2f - 1f;
List<Side> sides = saddleStitchSides(totalPages, doubleSided, duplexPass, flipOnShortEdge);
for (Side side : sides) {
PDPage out = new PDPage(pageSize);
dst.addPage(out);
@ -234,6 +247,10 @@ public class BookletImpositionController {
cellH,
addBorder);
}
if (trackProgress) {
progressTracker.advance();
}
}
return dst;
}

View File

@ -22,6 +22,8 @@ import stirling.software.SPDF.model.api.general.CropPdfForm;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@GeneralApi
@ -29,6 +31,7 @@ import stirling.software.common.util.WebResponseUtils;
public class CropController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/crop", consumes = "multipart/form-data")
@StandardPdfResponse
@ -44,6 +47,8 @@ public class CropController {
pdfDocumentFactory.createNewDocumentBasedOnOldDocument(sourceDocument);
int totalPages = sourceDocument.getNumberOfPages();
JobProgressTracker progressTracker = jobProgressService.tracker(Math.max(1, totalPages));
boolean trackProgress = progressTracker.isEnabled();
LayerUtility layerUtility = new LayerUtility(newDocument);
@ -80,6 +85,10 @@ public class CropController {
request.getY(),
request.getWidth(),
request.getHeight()));
if (trackProgress) {
progressTracker.advance();
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -87,6 +96,10 @@ public class CropController {
newDocument.close();
sourceDocument.close();
if (trackProgress) {
progressTracker.complete();
}
byte[] pdfContent = baos.toByteArray();
return WebResponseUtils.bytesToWebResponse(
pdfContent,

View File

@ -37,6 +37,8 @@ import stirling.software.SPDF.model.api.general.MergePdfsRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.PdfErrorUtils;
@ -48,6 +50,7 @@ import stirling.software.common.util.WebResponseUtils;
public class MergeController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
// Merges a list of PDDocument objects into a single PDDocument
public PDDocument mergeDocuments(List<PDDocument> documents) throws IOException {
@ -204,6 +207,10 @@ public class MergeController {
getSortComparator(
request.getSortType())); // Sort files based on the given sort type
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, files.length) + 4);
boolean trackProgress = progressTracker.isEnabled();
PDFMergerUtility mergerUtility = new PDFMergerUtility();
long totalSize = 0;
List<Integer> invalidIndexes = new ArrayList<>();
@ -224,6 +231,10 @@ public class MergeController {
invalidIndexes.add(index);
}
mergerUtility.addSource(tempFile); // Add source file to the merger utility
if (trackProgress) {
progressTracker.advance();
}
}
if (!invalidIndexes.isEmpty()) {
@ -243,6 +254,8 @@ public class MergeController {
"{\"errorFileIds\":%s,\"message\":\"Some of the selected files can't be merged\"}",
errorFileIds.toString());
jobProgressService.updateProgress(100, null);
return ResponseEntity.status(HttpStatus.UNPROCESSABLE_ENTITY)
.header("Content-Type", MediaType.APPLICATION_JSON_VALUE)
.body(payload.getBytes(StandardCharsets.UTF_8));
@ -251,6 +264,10 @@ public class MergeController {
mergedTempFile = Files.createTempFile("merged-", ".pdf").toFile();
mergerUtility.setDestinationFileName(mergedTempFile.getAbsolutePath());
if (trackProgress) {
progressTracker.advance();
}
try {
mergerUtility.mergeDocuments(
pdfDocumentFactory.getStreamCacheFunction(
@ -289,10 +306,18 @@ public class MergeController {
addTableOfContents(mergedDocument, files);
}
if (trackProgress) {
progressTracker.advance();
}
// Save the modified document to a new ByteArrayOutputStream
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mergedDocument.save(baos);
if (trackProgress) {
progressTracker.complete();
}
String mergedFileName =
files[0].getOriginalFilename().replaceFirst("[.][^.]+$", "")
+ "_merged_unsigned.pdf";

View File

@ -25,6 +25,8 @@ import stirling.software.SPDF.model.api.general.MergeMultiplePagesRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@GeneralApi
@ -32,6 +34,7 @@ import stirling.software.common.util.WebResponseUtils;
public class MultiPageLayoutController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/multi-page-layout", consumes = "multipart/form-data")
@StandardPdfResponse
@ -66,6 +69,8 @@ public class MultiPageLayoutController {
newDocument.addPage(newPage);
int totalPages = sourceDocument.getNumberOfPages();
JobProgressTracker progressTracker = jobProgressService.tracker(Math.max(1, totalPages));
boolean trackProgress = progressTracker.isEnabled();
float cellWidth = newPage.getMediaBox().getWidth() / cols;
float cellHeight = newPage.getMediaBox().getHeight() / rows;
@ -126,6 +131,10 @@ public class MultiPageLayoutController {
contentStream.addRect(borderX, borderY, cellWidth, cellHeight);
contentStream.stroke();
}
if (trackProgress) {
progressTracker.advance();
}
}
contentStream.close(); // Close the final content stream
@ -135,6 +144,10 @@ public class MultiPageLayoutController {
newDocument.save(baos);
newDocument.close();
if (trackProgress) {
progressTracker.complete();
}
byte[] result = baos.toByteArray();
return WebResponseUtils.bytesToWebResponse(
result,

View File

@ -17,6 +17,8 @@ import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
/**
@ -32,6 +34,8 @@ public class PdfImageRemovalController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
/**
* Endpoint to remove images from a PDF file.
*
@ -54,8 +58,13 @@ public class PdfImageRemovalController {
// Load the PDF document
PDDocument document = pdfDocumentFactory.load(file);
int pageCount = Math.max(1, document.getNumberOfPages());
JobProgressTracker progressTracker = jobProgressService.tracker(pageCount + 1);
boolean trackProgress = progressTracker.isEnabled();
// Remove images from the PDF document using the service
PDDocument modifiedDocument = pdfImageRemovalService.removeImagesFromPdf(document);
PDDocument modifiedDocument =
pdfImageRemovalService.removeImagesFromPdf(document, progressTracker);
// Create a ByteArrayOutputStream to hold the modified PDF data
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
@ -69,6 +78,10 @@ public class PdfImageRemovalController {
file.getFileInput().getOriginalFilename().replaceFirst("[.][^.]+$", "")
+ "_removed_images.pdf";
if (trackProgress) {
progressTracker.complete();
}
// Convert the byte array to a web response and return it
return WebResponseUtils.bytesToWebResponse(outputStream.toByteArray(), mergedFileName);
}

View File

@ -27,6 +27,8 @@ import stirling.software.SPDF.model.api.general.OverlayPdfsRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.WebResponseUtils;
@ -35,6 +37,7 @@ import stirling.software.common.util.WebResponseUtils;
public class PdfOverlayController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/overlay-pdfs", consumes = "multipart/form-data")
@StandardPdfResponse
@ -63,13 +66,17 @@ public class PdfOverlayController {
try (PDDocument basePdf = pdfDocumentFactory.load(baseFile);
Overlay overlay = new Overlay()) {
JobProgressTracker progressTracker =
jobProgressService.tracker(basePdf.getNumberOfPages() + 1);
boolean trackProgress = progressTracker.isEnabled();
Map<Integer, String> overlayGuide =
prepareOverlayGuide(
basePdf.getNumberOfPages(),
overlayPdfFiles,
mode,
counts,
tempFiles);
tempFiles,
progressTracker);
overlay.setInputPDF(basePdf);
if (overlayPos == 0) {
@ -80,6 +87,13 @@ public class PdfOverlayController {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
overlay.overlay(overlayGuide).save(outputStream);
if (trackProgress) {
progressTracker.advance();
progressTracker.complete();
} else {
jobProgressService.updateProgress(100, null);
}
byte[] data = outputStream.toByteArray();
String outputFilename =
Filenames.toSimpleFileName(baseFile.getOriginalFilename())
@ -104,18 +118,25 @@ public class PdfOverlayController {
}
private Map<Integer, String> prepareOverlayGuide(
int basePageCount, File[] overlayFiles, String mode, int[] counts, List<File> tempFiles)
int basePageCount,
File[] overlayFiles,
String mode,
int[] counts,
List<File> tempFiles,
JobProgressTracker progressTracker)
throws IOException {
Map<Integer, String> overlayGuide = new HashMap<>();
switch (mode) {
case "SequentialOverlay":
sequentialOverlay(overlayGuide, overlayFiles, basePageCount, tempFiles);
sequentialOverlay(
overlayGuide, overlayFiles, basePageCount, tempFiles, progressTracker);
break;
case "InterleavedOverlay":
interleavedOverlay(overlayGuide, overlayFiles, basePageCount);
interleavedOverlay(overlayGuide, overlayFiles, basePageCount, progressTracker);
break;
case "FixedRepeatOverlay":
fixedRepeatOverlay(overlayGuide, overlayFiles, counts, basePageCount);
fixedRepeatOverlay(
overlayGuide, overlayFiles, counts, basePageCount, progressTracker);
break;
default:
throw new IllegalArgumentException("Invalid overlay mode");
@ -127,10 +148,12 @@ public class PdfOverlayController {
Map<Integer, String> overlayGuide,
File[] overlayFiles,
int basePageCount,
List<File> tempFiles)
List<File> tempFiles,
JobProgressTracker progressTracker)
throws IOException {
int overlayFileIndex = 0;
int pageCountInCurrentOverlay = 0;
boolean trackProgress = progressTracker.isEnabled();
for (int basePageIndex = 1; basePageIndex <= basePageCount; basePageIndex++) {
if (pageCountInCurrentOverlay == 0
@ -152,6 +175,10 @@ public class PdfOverlayController {
}
pageCountInCurrentOverlay++;
if (trackProgress) {
progressTracker.advance();
}
}
}
@ -162,8 +189,12 @@ public class PdfOverlayController {
}
private void interleavedOverlay(
Map<Integer, String> overlayGuide, File[] overlayFiles, int basePageCount)
Map<Integer, String> overlayGuide,
File[] overlayFiles,
int basePageCount,
JobProgressTracker progressTracker)
throws IOException {
boolean trackProgress = progressTracker.isEnabled();
for (int basePageIndex = 1; basePageIndex <= basePageCount; basePageIndex++) {
File overlayFile = overlayFiles[(basePageIndex - 1) % overlayFiles.length];
@ -174,17 +205,26 @@ public class PdfOverlayController {
overlayGuide.put(basePageIndex, overlayFile.getAbsolutePath());
}
}
if (trackProgress) {
progressTracker.advance();
}
}
}
private void fixedRepeatOverlay(
Map<Integer, String> overlayGuide, File[] overlayFiles, int[] counts, int basePageCount)
Map<Integer, String> overlayGuide,
File[] overlayFiles,
int[] counts,
int basePageCount,
JobProgressTracker progressTracker)
throws IOException {
if (overlayFiles.length != counts.length) {
throw new IllegalArgumentException(
"Counts array length must match the number of overlay files");
}
int currentPage = 1;
boolean trackProgress = progressTracker.isEnabled();
for (int i = 0; i < overlayFiles.length; i++) {
File overlayFile = overlayFiles[i];
int repeatCount = counts[i];
@ -196,6 +236,9 @@ public class PdfOverlayController {
for (int page = 0; page < overlayPageCount; page++) {
if (currentPage > basePageCount) break;
overlayGuide.put(currentPage++, overlayFile.getAbsolutePath());
if (trackProgress) {
progressTracker.advance();
}
}
}
}

View File

@ -24,6 +24,8 @@ import stirling.software.SPDF.model.api.general.RearrangePagesRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.WebResponseUtils;
@ -34,6 +36,7 @@ import stirling.software.common.util.WebResponseUtils;
public class RearrangePagesPDFController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(consumes = "multipart/form-data", value = "/remove-pages")
@StandardPdfResponse
@ -59,9 +62,20 @@ public class RearrangePagesPDFController {
Collections.sort(pagesToRemove);
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, pagesToRemove.size()));
boolean trackProgress = progressTracker.isEnabled();
for (int i = pagesToRemove.size() - 1; i >= 0; i--) {
int pageIndex = pagesToRemove.get(i);
document.removePage(pageIndex);
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.pdfDocToWebResponse(
document,
@ -272,14 +286,30 @@ public class RearrangePagesPDFController {
newPages.add(document.getPage(newPageOrder.get(i)));
}
int removalSteps = document.getNumberOfPages();
int additionSteps = newPages.size();
JobProgressTracker progressTracker =
jobProgressService.tracker(Math.max(1, removalSteps + additionSteps));
boolean trackProgress = progressTracker.isEnabled();
// Remove all the pages from the original document
for (int i = document.getNumberOfPages() - 1; i >= 0; i--) {
document.removePage(i);
if (trackProgress) {
progressTracker.advance();
}
}
// Add the pages in the new order
for (PDPage page : newPages) {
document.addPage(page);
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.pdfDocToWebResponse(

View File

@ -19,6 +19,8 @@ import stirling.software.SPDF.model.api.general.RotatePDFRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.WebResponseUtils;
@ -27,6 +29,7 @@ import stirling.software.common.util.WebResponseUtils;
public class RotationController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(consumes = "multipart/form-data", value = "/rotate-pdf")
@StandardPdfResponse
@ -51,9 +54,19 @@ public class RotationController {
// Get the list of pages in the document
PDPageTree pages = document.getPages();
int totalPages = Math.max(1, pages.getCount());
JobProgressTracker progressTracker = jobProgressService.tracker(totalPages);
boolean trackProgress = progressTracker.isEnabled();
for (PDPage page : pages) {
page.setRotation(page.getRotation() + angle);
if (trackProgress) {
progressTracker.advance();
}
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.pdfDocToWebResponse(

View File

@ -26,6 +26,8 @@ import stirling.software.SPDF.model.api.general.ScalePagesRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.WebResponseUtils;
@ -34,6 +36,7 @@ import stirling.software.common.util.WebResponseUtils;
public class ScalePagesController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/scale-pages", consumes = "multipart/form-data")
@StandardPdfResponse
@ -55,6 +58,8 @@ public class ScalePagesController {
PDRectangle targetSize = getTargetSize(targetPDRectangle, sourceDocument);
int totalPages = sourceDocument.getNumberOfPages();
JobProgressTracker progressTracker = jobProgressService.tracker(Math.max(1, totalPages));
boolean trackProgress = progressTracker.isEnabled();
for (int i = 0; i < totalPages; i++) {
PDPage sourcePage = sourceDocument.getPage(i);
PDRectangle sourceSize = sourcePage.getMediaBox();
@ -87,6 +92,10 @@ public class ScalePagesController {
contentStream.restoreGraphicsState();
contentStream.close();
if (trackProgress) {
progressTracker.advance();
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -94,6 +103,10 @@ public class ScalePagesController {
outputDocument.close();
sourceDocument.close();
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.bytesToWebResponse(
baos.toByteArray(),
Filenames.toSimpleFileName(file.getOriginalFilename()).replaceFirst("[.][^.]+$", "")

View File

@ -28,6 +28,8 @@ import stirling.software.SPDF.model.api.PDFWithPageNums;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.WebResponseUtils;
@ -37,6 +39,7 @@ import stirling.software.common.util.WebResponseUtils;
public class SplitPDFController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(consumes = "multipart/form-data", value = "/split-pages")
@MultiFileResponse
@ -77,6 +80,8 @@ public class SplitPDFController {
// split the document
splitDocumentsBoas = new ArrayList<>();
int previousPageNumber = 0;
JobProgressTracker progressTracker = jobProgressService.tracker(pageNumbers.size() + 2);
boolean trackProgress = progressTracker.isEnabled();
for (int splitPoint : pageNumbers) {
try (PDDocument splitDocument =
pdfDocumentFactory.createNewDocumentBasedOnOldDocument(document)) {
@ -94,6 +99,10 @@ public class SplitPDFController {
splitDocument.save(baos);
splitDocumentsBoas.add(baos);
if (trackProgress) {
progressTracker.advance();
}
} catch (Exception e) {
ExceptionUtils.logException("document splitting and saving", e);
throw e;
@ -128,11 +137,18 @@ public class SplitPDFController {
throw e;
}
if (trackProgress) {
progressTracker.advance();
}
log.debug("Successfully created zip file with split documents: {}", zipFile.toString());
byte[] data = Files.readAllBytes(zipFile);
Files.deleteIfExists(zipFile);
// return the Resource in the response
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.bytesToWebResponse(
data, filename + ".zip", MediaType.APPLICATION_OCTET_STREAM);

View File

@ -33,6 +33,8 @@ import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.model.PdfMetadata;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.service.PdfMetadataService;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.WebResponseUtils;
@ -45,6 +47,7 @@ public class SplitPdfByChaptersController {
private final PdfMetadataService pdfMetadataService;
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
private static List<Bookmark> extractOutlineItems(
PDDocument sourceDocument,
@ -178,8 +181,12 @@ public class SplitPdfByChaptersController {
bookmark.getStartPage(),
bookmark.getEndPage());
}
JobProgressTracker progressTracker = jobProgressService.tracker(bookmarks.size() + 1);
boolean trackProgress = progressTracker.isEnabled();
List<ByteArrayOutputStream> splitDocumentsBoas =
getSplitDocumentsBoas(sourceDocument, bookmarks, includeMetadata);
getSplitDocumentsBoas(
sourceDocument, bookmarks, includeMetadata, progressTracker);
zipFile = createZipFile(bookmarks, splitDocumentsBoas);
@ -190,6 +197,14 @@ public class SplitPdfByChaptersController {
Filenames.toSimpleFileName(file.getOriginalFilename())
.replaceFirst("[.][^.]+$", "");
sourceDocument.close();
if (trackProgress) {
progressTracker.advance();
progressTracker.complete();
} else {
jobProgressService.updateProgress(100, null);
}
return WebResponseUtils.bytesToWebResponse(
data, filename + ".zip", MediaType.APPLICATION_OCTET_STREAM);
} finally {
@ -265,13 +280,17 @@ public class SplitPdfByChaptersController {
}
public List<ByteArrayOutputStream> getSplitDocumentsBoas(
PDDocument sourceDocument, List<Bookmark> bookmarks, boolean includeMetadata)
PDDocument sourceDocument,
List<Bookmark> bookmarks,
boolean includeMetadata,
JobProgressTracker progressTracker)
throws Exception {
List<ByteArrayOutputStream> splitDocumentsBoas = new ArrayList<>();
PdfMetadata metadata = null;
if (includeMetadata) {
metadata = pdfMetadataService.extractMetadataFromPdf(sourceDocument);
}
boolean trackProgress = progressTracker.isEnabled();
for (Bookmark bookmark : bookmarks) {
try (PDDocument splitDocument = new PDDocument()) {
boolean isSinglePage = (bookmark.getStartPage() == bookmark.getEndPage());
@ -291,6 +310,9 @@ public class SplitPdfByChaptersController {
splitDocument.save(baos);
splitDocumentsBoas.add(baos);
if (trackProgress) {
progressTracker.advance();
}
} catch (Exception e) {
ExceptionUtils.logException("document splitting and saving", e);
throw e;

View File

@ -32,6 +32,8 @@ import stirling.software.SPDF.model.api.SplitPdfBySectionsRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@GeneralApi
@ -39,6 +41,7 @@ import stirling.software.common.util.WebResponseUtils;
public class SplitPdfBySectionsController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/split-pdf-by-sections", consumes = "multipart/form-data")
@MultiFileResponse
@ -59,15 +62,27 @@ public class SplitPdfBySectionsController {
int horiz = request.getHorizontalDivisions() + 1;
int verti = request.getVerticalDivisions() + 1;
boolean merge = Boolean.TRUE.equals(request.getMerge());
List<PDDocument> splitDocuments = splitPdfPages(sourceDocument, verti, horiz);
int totalSections = sourceDocument.getNumberOfPages() * horiz * verti;
JobProgressTracker progressTracker = jobProgressService.tracker(totalSections + 1);
boolean trackProgress = progressTracker.isEnabled();
List<PDDocument> splitDocuments =
splitPdfPages(sourceDocument, verti, horiz, progressTracker);
String filename =
Filenames.toSimpleFileName(file.getOriginalFilename())
.replaceFirst("[.][^.]+$", "");
if (merge) {
MergeController mergeController = new MergeController(pdfDocumentFactory);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mergeController.mergeDocuments(splitDocuments).save(baos);
if (trackProgress) {
progressTracker.advance();
}
mergeDocuments(splitDocuments).save(baos);
for (PDDocument doc : splitDocuments) {
doc.close();
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.bytesToWebResponse(baos.toByteArray(), filename + "_split.pdf");
}
for (PDDocument doc : splitDocuments) {
@ -99,6 +114,10 @@ public class SplitPdfBySectionsController {
zipOut.finish();
data = Files.readAllBytes(zipFile);
if (trackProgress) {
progressTracker.advance();
progressTracker.complete();
}
return WebResponseUtils.bytesToWebResponse(
data, filename + "_split.zip", MediaType.APPLICATION_OCTET_STREAM);
@ -107,11 +126,28 @@ public class SplitPdfBySectionsController {
}
}
private PDDocument mergeDocuments(List<PDDocument> documents) throws IOException {
PDDocument merged = pdfDocumentFactory.createNewDocument();
for (PDDocument doc : documents) {
for (PDPage page : doc.getPages()) {
merged.addPage(page);
}
}
return merged;
}
public List<PDDocument> splitPdfPages(
PDDocument document, int horizontalDivisions, int verticalDivisions)
PDDocument document,
int horizontalDivisions,
int verticalDivisions,
JobProgressTracker progressTracker)
throws IOException {
List<PDDocument> splitDocuments = new ArrayList<>();
int totalSections = document.getNumberOfPages() * horizontalDivisions * verticalDivisions;
int sectionCounter = 0;
boolean trackProgress = progressTracker.isEnabled();
for (PDPage originalPage : document.getPages()) {
PDRectangle originalMediaBox = originalPage.getMediaBox();
float width = originalMediaBox.getWidth();
@ -151,6 +187,10 @@ public class SplitPdfBySectionsController {
}
splitDocuments.add(subDoc);
sectionCounter++;
if (trackProgress) {
progressTracker.advance();
}
}
}
}

View File

@ -25,6 +25,8 @@ import stirling.software.SPDF.model.api.general.SplitPdfBySizeOrCountRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.WebResponseUtils;
@ -35,6 +37,7 @@ import stirling.software.common.util.WebResponseUtils;
public class SplitPdfBySizeController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(value = "/split-by-size-or-count", consumes = "multipart/form-data")
@MultiFileResponse
@ -78,21 +81,28 @@ public class SplitPdfBySizeController {
String value = request.getSplitValue();
log.debug("Split type: {}, Split value: {}", type, value);
JobProgressTracker progressTracker =
jobProgressService.tracker(sourceDocument.getNumberOfPages() + 2);
boolean trackProgress = progressTracker.isEnabled();
if (type == 0) {
log.debug("Processing split by size");
long maxBytes = GeneralUtils.convertSizeToBytes(value);
log.debug("Max bytes per document: {}", maxBytes);
handleSplitBySize(sourceDocument, maxBytes, zipOut, filename);
handleSplitBySize(
sourceDocument, maxBytes, zipOut, filename, progressTracker);
} else if (type == 1) {
log.debug("Processing split by page count");
int pageCount = Integer.parseInt(value);
log.debug("Pages per document: {}", pageCount);
handleSplitByPageCount(sourceDocument, pageCount, zipOut, filename);
handleSplitByPageCount(
sourceDocument, pageCount, zipOut, filename, progressTracker);
} else if (type == 2) {
log.debug("Processing split by document count");
int documentCount = Integer.parseInt(value);
log.debug("Total number of documents: {}", documentCount);
handleSplitByDocCount(sourceDocument, documentCount, zipOut, filename);
handleSplitByDocCount(
sourceDocument, documentCount, zipOut, filename, progressTracker);
} else {
log.error("Invalid split type: {}", type);
throw ExceptionUtils.createIllegalArgumentException(
@ -102,6 +112,13 @@ public class SplitPdfBySizeController {
}
log.debug("PDF splitting completed successfully");
if (trackProgress) {
progressTracker.advance();
progressTracker.complete();
} else {
jobProgressService.updateProgress(100, null);
}
} catch (Exception e) {
ExceptionUtils.logException("PDF document loading or processing", e);
throw e;
@ -138,7 +155,11 @@ public class SplitPdfBySizeController {
}
private void handleSplitBySize(
PDDocument sourceDocument, long maxBytes, ZipOutputStream zipOut, String baseFilename)
PDDocument sourceDocument,
long maxBytes,
ZipOutputStream zipOut,
String baseFilename,
JobProgressTracker progressTracker)
throws IOException {
log.debug("Starting handleSplitBySize with maxBytes={}", maxBytes);
@ -147,6 +168,7 @@ public class SplitPdfBySizeController {
int fileIndex = 1;
int totalPages = sourceDocument.getNumberOfPages();
int pageAdded = 0;
boolean trackProgress = progressTracker.isEnabled();
// Smart size check frequency - check more often with larger documents
int baseCheckFrequency = 5;
@ -251,6 +273,9 @@ public class SplitPdfBySizeController {
}
}
}
if (trackProgress) {
progressTracker.setStepsCompleted(Math.min(totalPages, pageIndex + 1));
}
}
// Save final document if it has any pages
@ -260,13 +285,20 @@ public class SplitPdfBySizeController {
currentDoc.getNumberOfPages(),
fileIndex);
saveDocumentToZip(currentDoc, zipOut, baseFilename, fileIndex++);
if (trackProgress) {
progressTracker.setStepsCompleted(totalPages);
}
}
log.debug("Completed handleSplitBySize with {} document parts created", fileIndex - 1);
}
private void handleSplitByPageCount(
PDDocument sourceDocument, int pageCount, ZipOutputStream zipOut, String baseFilename)
PDDocument sourceDocument,
int pageCount,
ZipOutputStream zipOut,
String baseFilename,
JobProgressTracker progressTracker)
throws IOException {
log.debug("Starting handleSplitByPageCount with pageCount={}", pageCount);
int currentPageCount = 0;
@ -284,12 +316,17 @@ public class SplitPdfBySizeController {
int pageIndex = 0;
int totalPages = sourceDocument.getNumberOfPages();
log.debug("Processing {} pages", totalPages);
boolean trackProgress = progressTracker.isEnabled();
try {
for (PDPage page : sourceDocument.getPages()) {
pageIndex++;
log.debug("Processing page {} of {}", pageIndex, totalPages);
if (trackProgress) {
progressTracker.setStepsCompleted(pageIndex);
}
try {
log.debug("Adding page {} to current document", pageIndex);
currentDoc.addPage(page);
@ -347,6 +384,9 @@ public class SplitPdfBySizeController {
log.error("Error saving final document part {}", fileIndex - 1, e);
throw e;
}
if (trackProgress) {
progressTracker.setStepsCompleted(totalPages);
}
} else {
log.debug("Final document has no pages, skipping");
}
@ -370,7 +410,8 @@ public class SplitPdfBySizeController {
PDDocument sourceDocument,
int documentCount,
ZipOutputStream zipOut,
String baseFilename)
String baseFilename,
JobProgressTracker progressTracker)
throws IOException {
log.debug("Starting handleSplitByDocCount with documentCount={}", documentCount);
int totalPageCount = sourceDocument.getNumberOfPages();
@ -382,6 +423,7 @@ public class SplitPdfBySizeController {
int currentPageIndex = 0;
int fileIndex = 1;
boolean trackProgress = progressTracker.isEnabled();
for (int i = 0; i < documentCount; i++) {
log.debug("Creating document {} of {}", i + 1, documentCount);
@ -407,6 +449,9 @@ public class SplitPdfBySizeController {
currentDoc.addPage(sourceDocument.getPage(currentPageIndex));
log.debug("Successfully added page {} to document {}", j + 1, i + 1);
currentPageIndex++;
if (trackProgress) {
progressTracker.setStepsCompleted(currentPageIndex);
}
} catch (Exception e) {
log.error("Error adding page {} to document {}", j + 1, i + 1, e);
throw ExceptionUtils.createFileProcessingException("split", e);

View File

@ -22,6 +22,8 @@ import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.GeneralApi;
import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@GeneralApi
@ -29,6 +31,7 @@ import stirling.software.common.util.WebResponseUtils;
public class ToSinglePageController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(consumes = "multipart/form-data", value = "/pdf-to-single-page")
@StandardPdfResponse
@ -45,6 +48,10 @@ public class ToSinglePageController {
// Load the source document
PDDocument sourceDocument = pdfDocumentFactory.load(request);
int totalPages = Math.max(1, sourceDocument.getNumberOfPages());
JobProgressTracker progressTracker = jobProgressService.tracker(totalPages + 1);
boolean trackProgress = progressTracker.isEnabled();
// Calculate total height and max width
float totalHeight = 0;
float maxWidth = 0;
@ -79,6 +86,9 @@ public class ToSinglePageController {
layerUtility.appendFormAsLayer(newPage, form, af, defaultLayerName);
yOffset -= page.getMediaBox().getHeight();
pageIndex++;
if (trackProgress) {
progressTracker.advance();
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -86,6 +96,10 @@ public class ToSinglePageController {
newDocument.close();
sourceDocument.close();
if (trackProgress) {
progressTracker.complete();
}
byte[] result = baos.toByteArray();
return WebResponseUtils.bytesToWebResponse(
result,

View File

@ -35,6 +35,8 @@ import stirling.software.SPDF.model.api.security.SanitizePdfRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.SecurityApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.WebResponseUtils;
@SecurityApi
@ -42,6 +44,7 @@ import stirling.software.common.util.WebResponseUtils;
public class SanitizeController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@AutoJobPostMapping(consumes = "multipart/form-data", value = "/sanitize-pdf")
@StandardPdfResponse
@ -61,28 +64,54 @@ public class SanitizeController {
boolean removeFonts = Boolean.TRUE.equals(request.getRemoveFonts());
PDDocument document = pdfDocumentFactory.load(inputFile, true);
int pageCount = Math.max(1, document.getNumberOfPages());
int pageSteps = 0;
if (removeJavaScript) pageSteps += pageCount;
if (removeEmbeddedFiles) pageSteps += pageCount;
if (removeLinks) pageSteps += pageCount;
if (removeFonts) pageSteps += pageCount;
int metadataSteps = 0;
if (removeXMPMetadata) metadataSteps++;
if (removeMetadata) metadataSteps++;
int totalSteps = Math.max(1, pageSteps + metadataSteps);
JobProgressTracker progressTracker = jobProgressService.tracker(totalSteps);
boolean trackProgress = progressTracker.isEnabled();
if (removeJavaScript) {
sanitizeJavaScript(document);
sanitizeJavaScript(document, progressTracker, trackProgress);
}
if (removeEmbeddedFiles) {
sanitizeEmbeddedFiles(document);
sanitizeEmbeddedFiles(document, progressTracker, trackProgress);
}
if (removeXMPMetadata) {
sanitizeXMPMetadata(document);
if (trackProgress) {
progressTracker.advance();
}
}
if (removeMetadata) {
sanitizeDocumentInfoMetadata(document);
if (trackProgress) {
progressTracker.advance();
}
}
if (removeLinks) {
sanitizeLinks(document);
sanitizeLinks(document, progressTracker, trackProgress);
}
if (removeFonts) {
sanitizeFonts(document);
sanitizeFonts(document, progressTracker, trackProgress);
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.pdfDocToWebResponse(
@ -92,7 +121,9 @@ public class SanitizeController {
+ "_sanitized.pdf");
}
private void sanitizeJavaScript(PDDocument document) throws IOException {
private void sanitizeJavaScript(
PDDocument document, JobProgressTracker progressTracker, boolean trackProgress)
throws IOException {
// Get the root dictionary (catalog) of the PDF
PDDocumentCatalog catalog = document.getDocumentCatalog();
@ -140,10 +171,15 @@ public class SanitizeController {
}
}
}
if (trackProgress) {
progressTracker.advance();
}
}
}
private void sanitizeEmbeddedFiles(PDDocument document) {
private void sanitizeEmbeddedFiles(
PDDocument document, JobProgressTracker progressTracker, boolean trackProgress) {
PDPageTree allPages = document.getPages();
for (PDPage page : allPages) {
@ -151,6 +187,9 @@ public class SanitizeController {
if (res != null && res.getCOSObject() != null) {
res.getCOSObject().removeItem(COSName.getPDFName("EmbeddedFiles"));
}
if (trackProgress) {
progressTracker.advance();
}
}
}
@ -171,7 +210,9 @@ public class SanitizeController {
}
}
private void sanitizeLinks(PDDocument document) throws IOException {
private void sanitizeLinks(
PDDocument document, JobProgressTracker progressTracker, boolean trackProgress)
throws IOException {
for (PDPage page : document.getPages()) {
for (PDAnnotation annotation : page.getAnnotations()) {
if (annotation != null && annotation instanceof PDAnnotationLink linkAnnotation) {
@ -183,16 +224,23 @@ public class SanitizeController {
}
}
}
if (trackProgress) {
progressTracker.advance();
}
}
}
private void sanitizeFonts(PDDocument document) {
private void sanitizeFonts(
PDDocument document, JobProgressTracker progressTracker, boolean trackProgress) {
for (PDPage page : document.getPages()) {
if (page != null
&& page.getResources() != null
&& page.getResources().getCOSObject() != null) {
page.getResources().getCOSObject().removeItem(COSName.getPDFName("Font"));
}
if (trackProgress) {
progressTracker.advance();
}
}
}
}

View File

@ -40,6 +40,8 @@ import stirling.software.SPDF.model.api.security.AddWatermarkRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.SecurityApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
import stirling.software.common.util.PdfUtils;
import stirling.software.common.util.WebResponseUtils;
@ -48,6 +50,7 @@ import stirling.software.common.util.WebResponseUtils;
public class WatermarkController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final JobProgressService jobProgressService;
@InitBinder
public void initBinder(WebDataBinder binder) {
@ -100,6 +103,10 @@ public class WatermarkController {
PDDocument document = pdfDocumentFactory.load(pdfFile);
// Create a page in the document
int totalPages = Math.max(1, document.getNumberOfPages());
JobProgressTracker progressTracker = jobProgressService.tracker(totalPages);
boolean trackProgress = progressTracker.isEnabled();
for (PDPage page : document.getPages()) {
// Get the page's content stream
@ -138,6 +145,10 @@ public class WatermarkController {
// Close the content stream
contentStream.close();
if (trackProgress) {
progressTracker.advance();
}
}
if (convertPdfToImage) {
@ -146,6 +157,10 @@ public class WatermarkController {
document = convertedPdf;
}
if (trackProgress) {
progressTracker.complete();
}
return WebResponseUtils.pdfDocToWebResponse(
document,
Filenames.toSimpleFileName(pdfFile.getOriginalFilename())

View File

@ -11,6 +11,8 @@ import org.apache.pdfbox.pdmodel.PDResources;
import org.apache.pdfbox.pdmodel.graphics.PDXObject;
import org.springframework.stereotype.Service;
import stirling.software.common.service.JobProgressTracker;
/** Service class responsible for removing image objects from a PDF document. */
@Service
public class PdfImageRemovalService {
@ -26,6 +28,13 @@ public class PdfImageRemovalService {
* @throws IOException If an error occurs while processing the PDF document.
*/
public PDDocument removeImagesFromPdf(PDDocument document) throws IOException {
return removeImagesFromPdf(document, null);
}
public PDDocument removeImagesFromPdf(PDDocument document, JobProgressTracker progressTracker)
throws IOException {
boolean trackProgress = progressTracker != null && progressTracker.isEnabled();
// Iterate over each page in the PDF document
for (PDPage page : document.getPages()) {
PDResources resources = page.getResources();
@ -45,6 +54,10 @@ public class PdfImageRemovalService {
for (COSName name : namesToRemove) {
resources.put(name, (PDXObject) null);
}
if (trackProgress) {
progressTracker.advance();
}
}
return document;
}

View File

@ -14,17 +14,19 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
class RearrangePagesPDFControllerTest {
@Mock private CustomPDFDocumentFactory mockPdfDocumentFactory;
@Mock private JobProgressService mockJobProgressService;
private RearrangePagesPDFController sut;
@BeforeEach
void setUp() {
MockitoAnnotations.openMocks(this);
sut = new RearrangePagesPDFController(mockPdfDocumentFactory);
sut = new RearrangePagesPDFController(mockPdfDocumentFactory, mockJobProgressService);
}
/** Tests the behavior of the oddEvenMerge method when there are no pages in the document. */

View File

@ -22,11 +22,14 @@ import org.springframework.mock.web.MockMultipartFile;
import stirling.software.SPDF.model.api.general.RotatePDFRequest;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.JobProgressService;
import stirling.software.common.service.JobProgressTracker;
@ExtendWith(MockitoExtension.class)
public class RotationControllerTest {
@Mock private CustomPDFDocumentFactory pdfDocumentFactory;
@Mock private JobProgressService jobProgressService;
@InjectMocks private RotationController rotationController;
@ -42,12 +45,16 @@ public class RotationControllerTest {
PDDocument mockDocument = mock(PDDocument.class);
PDPageTree mockPages = mock(PDPageTree.class);
PDPage mockPage = mock(PDPage.class);
JobProgressTracker mockTracker = mock(JobProgressTracker.class);
when(pdfDocumentFactory.load(request)).thenReturn(mockDocument);
when(mockDocument.getPages()).thenReturn(mockPages);
when(mockPages.getCount()).thenReturn(1);
when(mockPages.iterator())
.thenReturn(java.util.Collections.singletonList(mockPage).iterator());
when(mockPage.getRotation()).thenReturn(0);
when(jobProgressService.tracker(1)).thenReturn(mockTracker);
when(mockTracker.isEnabled()).thenReturn(false);
// Act
ResponseEntity<byte[]> response = rotationController.rotatePDF(request);

View File

@ -399,3 +399,101 @@
font-size: 0.875rem;
opacity: 0.8;
}
/* -----------------------
Async Job Progress HUD
----------------------- */
.jobProgressContainer {
position: absolute;
left: 8px;
right: 8px;
display: flex;
flex-direction: column;
gap: 10px;
z-index: 6;
pointer-events: none;
}
.jobProgressRow {
pointer-events: auto;
border-radius: 12px;
padding: 10px 14px 12px;
background: rgba(18, 26, 44, 0.88);
box-shadow: 0 18px 36px rgba(11, 16, 28, 0.25);
backdrop-filter: blur(12px);
color: #f1f6ff;
transition: transform 120ms ease;
}
.jobProgressRow[data-status='failed'] {
background: rgba(128, 28, 38, 0.9);
}
:global([data-mantine-color-scheme='dark']) .jobProgressRow {
background: rgba(28, 39, 58, 0.9);
color: var(--mantine-color-gray-2);
}
:global([data-mantine-color-scheme='dark']) .jobProgressRow[data-status='failed'] {
background: rgba(138, 34, 43, 0.92);
}
.jobProgressHeader {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
font-size: 0.78rem;
font-weight: 600;
letter-spacing: 0.01em;
}
.jobProgressLabel {
flex: 1;
min-width: 0;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.jobProgressValue {
font-variant-numeric: tabular-nums;
opacity: 0.92;
}
.jobProgressTrack {
position: relative;
margin-top: 8px;
height: 6px;
border-radius: 999px;
background: rgba(241, 246, 255, 0.25);
overflow: hidden;
}
.jobProgressRow[data-status='failed'] .jobProgressTrack {
background: rgba(255, 255, 255, 0.23);
}
.jobProgressFill {
position: absolute;
inset: 0;
width: 0;
height: 100%;
background: linear-gradient(90deg, #5aa5ff 0%, #206de5 100%);
transition: width 160ms ease;
}
.jobProgressRow[data-status='queued'] .jobProgressFill {
background: linear-gradient(90deg, #cfd8dc 0%, #a7b6bf 100%);
}
.jobProgressRow[data-status='failed'] .jobProgressFill {
background: linear-gradient(90deg, #ff8a80 0%, #e53935 100%);
}
.jobProgressError {
margin-top: 8px;
font-size: 0.72rem;
line-height: 1.3;
opacity: 0.92;
}

View File

@ -55,6 +55,9 @@ const FileEditorThumbnail = ({
const { pinFile, unpinFile, isFilePinned, activeFiles, actions: fileActions } = useFileContext();
const { state } = useFileState();
const hasError = state.ui.errorFileIds.includes(file.id);
const activeJobs = useMemo(() => file.activeJobs ?? [], [file.activeJobs]);
const visibleJobs = useMemo(() => activeJobs.filter(job => job.status !== 'completed'), [activeJobs]);
const hasActiveJobs = visibleJobs.length > 0;
// ---- Drag state ----
const [isDragging, setIsDragging] = useState(false);
@ -213,6 +216,9 @@ const FileEditorThumbnail = ({
};
const progressContainerBottom = file.toolHistory ? '56px' : '12px';
const toolChainBottom = hasActiveJobs ? '8px' : '4px';
return (
<div
ref={fileElementRef}
@ -436,6 +442,54 @@ const FileEditorThumbnail = ({
)}
</div>
{hasActiveJobs && (
<div
className={styles.jobProgressContainer}
style={{ bottom: progressContainerBottom }}
>
{visibleJobs.map(job => {
const queueSuffix = typeof job.queuePosition === 'number' ? ` (#${job.queuePosition + 1})` : '';
const label = job.message
|| (job.status === 'failed'
? t('async.jobFailed', 'Job failed')
: job.status === 'queued'
? `${t('async.jobQueued', 'Waiting in queue')}${queueSuffix}`
: t('async.jobProcessing', 'Processing…'));
const rawPercent = Number.isFinite(job.progressPercent) ? Math.round(job.progressPercent) : 0;
const percent = Math.max(0, Math.min(rawPercent, 100));
const displayValue = job.status === 'failed'
? t('async.jobFailedShort', 'Failed')
: job.status === 'queued'
? t('async.jobQueuedShort', 'Queued')
: `${percent}%`;
const fillBase = job.status === 'failed'
? 100
: Math.max(job.status === 'queued' ? 12 : 6, percent);
const fillWidth = Math.max(0, Math.min(fillBase, 100));
return (
<div
key={job.jobId}
className={styles.jobProgressRow}
data-status={job.status}
title={job.error || label}
>
<div className={styles.jobProgressHeader}>
<span className={styles.jobProgressLabel}>{label}</span>
<span className={styles.jobProgressValue}>{displayValue}</span>
</div>
<div className={styles.jobProgressTrack}>
<div className={styles.jobProgressFill} style={{ width: `${fillWidth}%` }} />
</div>
{job.status === 'failed' && job.error && (
<div className={styles.jobProgressError}>{job.error}</div>
)}
</div>
);
})}
</div>
)}
{/* Drag handle (span wrapper so we can attach a ref reliably) */}
<span ref={handleRef} className={styles.dragHandle} aria-hidden>
<DragIndicatorIcon fontSize="small" />
@ -445,7 +499,7 @@ const FileEditorThumbnail = ({
{file.toolHistory && (
<div style={{
position: 'absolute',
bottom: '4px',
bottom: toolChainBottom,
left: '4px',
right: '4px',
padding: '4px 6px',

View File

@ -136,7 +136,7 @@ export function createChildStub(
const originalFileId = parentStub.originalFileId || parentStub.id;
// Copy parent metadata but exclude processedFile to prevent stale data
const { processedFile: _processedFile, ...parentMetadata } = parentStub;
const { processedFile: _processedFile, activeJobs: _activeJobs, ...parentMetadata } = parentStub;
return {
// Copy parent metadata (excluding processedFile)

View File

@ -87,7 +87,11 @@ export function createFileSelectors(
return stateRef.current.files.ids
.map(id => {
const record = stateRef.current.files.byId[id];
return record ? `${id}:${record.size}:${record.lastModified}` : '';
if (!record) return '';
const jobsSignature = (record.activeJobs || [])
.map(job => `${job.jobId}:${job.status}:${Math.round(job.progressPercent)}`)
.join(';');
return `${id}:${record.size}:${record.lastModified}:${jobsSignature}`;
})
.join('|');
},

View File

@ -1,9 +1,19 @@
import { useCallback, useRef } from 'react';
import axios, {type CancelTokenSource} from 'axios'; // Real axios for static methods (CancelToken, isCancel)
import axios, { type CancelTokenSource } from 'axios'; // Real axios for static methods (CancelToken, isCancel)
import apiClient from '../../../services/apiClient'; // Our configured instance
import { processResponse, ResponseHandler } from '../../../utils/toolResponseProcessor';
import { isEmptyOutput } from '../../../services/errorUtils';
import {
ensureAsyncParam,
waitForJobCompletion,
fetchJobResult,
downloadResultFile,
readJobResponseBlob,
} from '../../../services/jobService';
import type { JobStatus } from '../../../services/jobService';
import type { ProcessingProgress } from './useToolState';
import type { FileId } from '../../../types/file';
import type { FileJobStatus } from '../../../types/fileContext';
export interface ApiCallsConfig<TParams = void> {
endpoint: string | ((params: TParams) => string);
@ -13,8 +23,233 @@ export interface ApiCallsConfig<TParams = void> {
preserveBackendFilename?: boolean;
}
export interface BatchApiCallsConfig<TParams = void> extends Omit<ApiCallsConfig<TParams>, 'buildFormData'> {
buildFormData: (params: TParams, files: File[]) => FormData;
}
export interface JobUpdate {
jobId: string;
status: FileJobStatus;
progressPercent: number;
message?: string;
queuePosition?: number | null;
error?: string;
}
type JobUpdateCallback = (fileIds: FileId[], update: JobUpdate) => void;
type BuildStatus = (status: JobStatus) => JobUpdate;
interface RunToolJobOptions<TParams> {
params: TParams;
endpoint: string;
formData: FormData;
originalFiles: File[];
filePrefix?: string;
responseHandler?: ResponseHandler;
preserveBackendFilename?: boolean;
onStatus: (status: string) => void;
onJobUpdate?: JobUpdateCallback;
buildStatus: BuildStatus;
isCancelled: () => boolean;
cancelToken?: CancelTokenSource | null;
}
const DEFAULT_PROGRESS_FALLBACK = 10;
function getFileIds(files: File[]): FileId[] {
return files
.map(file => (file as any)?.fileId)
.filter((id): id is FileId => typeof id === 'string' && id.length > 0);
}
export const useToolApiCalls = <TParams = void>() => {
const cancelTokenRef = useRef<CancelTokenSource | null>(null);
const isCancelledRef = useRef(false);
const jobStatusToUpdate = useCallback<BuildStatus>((status) => {
const hasError = Boolean(status.error);
const isComplete = status.complete && !hasError;
let derivedStatus: FileJobStatus = 'processing';
if (hasError) {
derivedStatus = 'failed';
} else if (status.inQueue && !status.complete) {
derivedStatus = 'queued';
} else if (isComplete) {
derivedStatus = 'completed';
}
const queueMessage = (() => {
if (!status.inQueue) return undefined;
if (typeof status.queuePosition === 'number' && status.queuePosition >= 0) {
return `Queued (#${status.queuePosition + 1})`;
}
return 'Queued';
})();
const message =
status.error ??
status.progressMessage ??
(isComplete ? 'Completed' : queueMessage);
const progress = typeof status.progressPercent === 'number'
? status.progressPercent
: derivedStatus === 'completed'
? 100
: derivedStatus === 'queued'
? 0
: DEFAULT_PROGRESS_FALLBACK;
return {
jobId: status.jobId,
status: derivedStatus,
progressPercent: Math.max(0, Math.min(progress, 100)),
message,
queuePosition: typeof status.queuePosition === 'number' ? status.queuePosition : null,
error: status.error ?? undefined,
};
}, []);
const runToolJob = useCallback(async <T>(options: RunToolJobOptions<T>): Promise<File[]> => {
const {
endpoint,
formData,
originalFiles,
filePrefix,
responseHandler,
preserveBackendFilename,
onStatus,
onJobUpdate,
buildStatus,
isCancelled,
cancelToken,
} = options;
const asyncEndpoint = ensureAsyncParam(endpoint);
const token = cancelToken?.token;
const response = await apiClient.post(asyncEndpoint, formData, {
responseType: 'blob',
cancelToken: token,
});
const headers = response.headers ?? {};
const contentType = (headers['content-type'] || '') as string;
if (contentType.includes('application/json')) {
const payload = await readJobResponseBlob(response.data);
if (payload && typeof payload === 'object' && payload.async && payload.jobId) {
const fileIds = getFileIds(originalFiles);
const initialUpdate: JobUpdate = {
jobId: payload.jobId,
status: 'queued',
progressPercent: 0,
message: 'Job submitted',
queuePosition: null,
};
onJobUpdate?.(fileIds, initialUpdate);
onStatus(initialUpdate.message ?? 'Job submitted');
const finalStatus = await waitForJobCompletion(payload.jobId, {
cancelToken: token,
isCancelled,
onUpdate: (status) => {
const update = buildStatus(status);
onJobUpdate?.(fileIds, update);
if (update.message) {
onStatus(update.message);
}
},
});
const completionUpdate = buildStatus(finalStatus);
if (completionUpdate.status === 'failed') {
onJobUpdate?.(fileIds, completionUpdate);
if (completionUpdate.message) {
onStatus(completionUpdate.message);
}
throw new Error(completionUpdate.error || 'Job failed');
}
const downloadUpdate: JobUpdate = {
jobId: completionUpdate.jobId,
status: 'processing',
progressPercent: Math.max(
96,
completionUpdate.progressPercent
? Math.min(completionUpdate.progressPercent, 98)
: 96,
),
message: 'Downloading results...',
queuePosition: null,
};
onJobUpdate?.(fileIds, downloadUpdate);
onStatus(downloadUpdate.message);
const jobResult = await fetchJobResult(payload.jobId, token);
const prepareUpdate: JobUpdate = {
jobId: completionUpdate.jobId,
status: 'processing',
progressPercent: Math.max(downloadUpdate.progressPercent, 98),
message: 'Preparing files...',
queuePosition: null,
};
let processedFiles: File[];
if (jobResult.type === 'blob') {
processedFiles = await processResponse(
jobResult.blob,
originalFiles,
filePrefix,
responseHandler,
preserveBackendFilename ? jobResult.headers : undefined,
);
} else if (jobResult.type === 'multipleFiles') {
processedFiles = await Promise.all(
jobResult.files.map(meta => downloadResultFile(meta, token))
);
} else {
throw new Error('Unsupported async job result format');
}
onJobUpdate?.(fileIds, prepareUpdate);
if (prepareUpdate.message) {
onStatus(prepareUpdate.message);
}
const finalNormalizedUpdate = {
...completionUpdate,
progressPercent: 100,
message: completionUpdate.message ?? 'Completed',
};
onJobUpdate?.(fileIds, finalNormalizedUpdate);
if (finalNormalizedUpdate.message) {
onStatus(finalNormalizedUpdate.message);
}
return processedFiles;
}
if (payload && typeof payload === 'object' && payload.error) {
throw new Error(payload.error);
}
throw new Error('Async job response missing jobId');
}
// Fallback: backend returned immediate blob (synchronous)
return processResponse(
response.data,
originalFiles,
filePrefix,
responseHandler,
preserveBackendFilename ? headers : undefined,
);
}, []);
const processFiles = useCallback(async (
params: TParams,
@ -23,70 +258,62 @@ export const useToolApiCalls = <TParams = void>() => {
onProgress: (progress: ProcessingProgress) => void,
onStatus: (status: string) => void,
markFileError?: (fileId: string) => void,
onJobUpdate?: JobUpdateCallback,
): Promise<{ outputFiles: File[]; successSourceIds: string[] }> => {
const processedFiles: File[] = [];
const successSourceIds: string[] = [];
const failedFiles: string[] = [];
const total = validFiles.length;
// Create cancel token for this operation
isCancelledRef.current = false;
cancelTokenRef.current = axios.CancelToken.source();
for (let i = 0; i < validFiles.length; i++) {
const file = validFiles[i];
try {
for (let i = 0; i < validFiles.length; i++) {
const file = validFiles[i];
onProgress({ current: i + 1, total, currentFileName: file.name });
onStatus(`Processing ${file.name} (${i + 1}/${total})`);
console.debug('[processFiles] Start', { index: i, total, name: file.name, fileId: (file as any).fileId });
onProgress({ current: i + 1, total, currentFileName: file.name });
onStatus(`Processing ${file.name} (${i + 1}/${total})`);
try {
const formData = config.buildFormData(params, file);
const endpoint = typeof config.endpoint === 'function' ? config.endpoint(params) : config.endpoint;
console.debug('[processFiles] POST', { endpoint, name: file.name });
const response = await apiClient.post(endpoint, formData, {
responseType: 'blob',
cancelToken: cancelTokenRef.current?.token,
});
console.debug('[processFiles] Response OK', { name: file.name, status: (response as any)?.status });
// Forward to shared response processor (uses tool-specific responseHandler if provided)
const responseFiles = await processResponse(
response.data,
[file],
config.filePrefix,
config.responseHandler,
config.preserveBackendFilename ? response.headers : undefined
);
// Guard: some endpoints may return an empty/0-byte file with 200
const empty = isEmptyOutput(responseFiles);
if (empty) {
console.warn('[processFiles] Empty output treated as failure', { name: file.name });
failedFiles.push(file.name);
try {
(markFileError as any)?.((file as any).fileId);
} catch (e) {
console.debug('markFileError', e);
}
continue;
}
processedFiles.push(...responseFiles);
// record source id as successful
successSourceIds.push((file as any).fileId);
console.debug('[processFiles] Success', { name: file.name, produced: responseFiles.length });
} catch (error) {
if (axios.isCancel(error)) {
throw new Error('Operation was cancelled');
}
console.error('[processFiles] Failed', { name: file.name, error });
failedFiles.push(file.name);
// mark errored file so UI can highlight
try {
(markFileError as any)?.((file as any).fileId);
} catch (e) {
console.debug('markFileError', e);
const formData = config.buildFormData(params, file);
const endpoint = typeof config.endpoint === 'function' ? config.endpoint(params) : config.endpoint;
const responseFiles = await runToolJob({
params,
endpoint,
formData,
originalFiles: [file],
filePrefix: config.filePrefix,
responseHandler: config.responseHandler,
preserveBackendFilename: config.preserveBackendFilename,
onStatus,
onJobUpdate,
buildStatus: jobStatusToUpdate,
isCancelled: () => isCancelledRef.current,
cancelToken: cancelTokenRef.current,
});
const empty = isEmptyOutput(responseFiles);
if (empty) {
failedFiles.push(file.name);
markFileError?.((file as any).fileId);
continue;
}
processedFiles.push(...responseFiles);
successSourceIds.push((file as any).fileId);
} catch (error) {
if (axios.isCancel(error) || (error as Error)?.message === 'Operation was cancelled') {
throw new Error('Operation was cancelled');
}
failedFiles.push(file.name);
markFileError?.((file as any).fileId);
console.error('[processFiles] Job failed', { name: file.name, error });
}
}
} finally {
cancelTokenRef.current = null;
}
if (failedFiles.length > 0 && processedFiles.length === 0) {
@ -99,11 +326,59 @@ export const useToolApiCalls = <TParams = void>() => {
onStatus(`Successfully processed ${processedFiles.length} file${processedFiles.length === 1 ? '' : 's'}`);
}
console.debug('[processFiles] Completed batch', { total, successes: successSourceIds.length, outputs: processedFiles.length, failed: failedFiles.length });
return { outputFiles: processedFiles, successSourceIds };
}, []);
}, [jobStatusToUpdate, runToolJob]);
const processBatchJob = useCallback(async (
params: TParams,
files: File[],
config: BatchApiCallsConfig<TParams>,
onProgress: (progress: ProcessingProgress) => void,
onStatus: (status: string) => void,
onJobUpdate?: JobUpdateCallback,
): Promise<{ outputFiles: File[]; successSourceIds: string[] }> => {
isCancelledRef.current = false;
cancelTokenRef.current = axios.CancelToken.source();
try {
onStatus('Processing files...');
onProgress({ current: 0, total: files.length, currentFileName: files[0]?.name });
const endpoint = typeof config.endpoint === 'function' ? config.endpoint(params) : config.endpoint;
const formData = config.buildFormData(params, files);
const responseFiles = await runToolJob({
params,
endpoint,
formData,
originalFiles: files,
filePrefix: config.filePrefix,
responseHandler: config.responseHandler,
preserveBackendFilename: config.preserveBackendFilename,
onStatus,
onJobUpdate,
buildStatus: jobStatusToUpdate,
isCancelled: () => isCancelledRef.current,
cancelToken: cancelTokenRef.current,
});
const empty = isEmptyOutput(responseFiles);
if (empty) {
throw new Error('No files produced by operation');
}
onProgress({ current: files.length, total: files.length, currentFileName: files[files.length - 1]?.name });
onStatus(`Successfully processed ${responseFiles.length} file${responseFiles.length === 1 ? '' : 's'}`);
const successIds = getFileIds(files).map(id => id as unknown as string);
return { outputFiles: responseFiles, successSourceIds: successIds };
} finally {
cancelTokenRef.current = null;
}
}, [jobStatusToUpdate, runToolJob]);
const cancelOperation = useCallback(() => {
isCancelledRef.current = true;
if (cancelTokenRef.current) {
cancelTokenRef.current.cancel('Operation cancelled by user');
cancelTokenRef.current = null;
@ -112,6 +387,7 @@ export const useToolApiCalls = <TParams = void>() => {
return {
processFiles,
processBatchJob,
cancelOperation,
};
};

View File

@ -1,12 +1,11 @@
import { useCallback, useRef, useEffect } from 'react';
import apiClient from '../../../services/apiClient';
import { useTranslation } from 'react-i18next';
import { useFileContext } from '../../../contexts/FileContext';
import { useToolState, type ProcessingProgress } from './useToolState';
import { useToolApiCalls, type ApiCallsConfig } from './useToolApiCalls';
import { useToolApiCalls, type ApiCallsConfig, type BatchApiCallsConfig, type JobUpdate } from './useToolApiCalls';
import { useToolResources } from './useToolResources';
import { extractErrorMessage } from '../../../utils/toolErrorHandler';
import { StirlingFile, extractFiles, FileId, StirlingFileStub, createStirlingFile } from '../../../types/fileContext';
import { StirlingFile, extractFiles, FileId, StirlingFileStub, createStirlingFile, FileJobProgress } from '../../../types/fileContext';
import { FILE_EVENTS } from '../../../services/errorUtils';
import { ResponseHandler } from '../../../utils/toolResponseProcessor';
import { createChildStub, generateProcessedFileMetadata } from '../../../contexts/file/fileActions';
@ -145,13 +144,13 @@ export const useToolOperation = <TParams>(
config: ToolOperationConfig<TParams>
): ToolOperationHook<TParams> => {
const { t } = useTranslation();
const { addFiles, consumeFiles, undoConsumeFiles, selectors } = useFileContext();
const { consumeFiles, undoConsumeFiles, selectors } = useFileContext();
// Composed hooks
const { state, actions } = useToolState();
const { actions: fileActions } = useFileContext();
const { processFiles, cancelOperation: cancelApiCalls } = useToolApiCalls<TParams>();
const { generateThumbnails, createDownloadInfo, cleanupBlobUrls, extractZipFiles, extractAllZipFiles } = useToolResources();
const { processFiles, processBatchJob, cancelOperation: cancelApiCalls } = useToolApiCalls<TParams>();
const { generateThumbnails, createDownloadInfo, cleanupBlobUrls } = useToolResources();
// Track last operation for undo functionality
const lastOperationRef = useRef<{
@ -160,6 +159,45 @@ export const useToolOperation = <TParams>(
outputFileIds: FileId[];
} | null>(null);
const handleJobUpdate = useCallback((fileIds: FileId[], update: JobUpdate) => {
if (!fileIds || fileIds.length === 0) {
return;
}
fileIds.forEach(fileId => {
const record = selectors.getStirlingFileStub(fileId);
if (!record) {
return;
}
const existing = record.activeJobs ?? [];
const entry: FileJobProgress = {
jobId: update.jobId,
status: update.status,
progressPercent: Math.max(0, Math.min(update.progressPercent, 100)),
message: update.message,
queuePosition: update.queuePosition ?? null,
error: update.error,
updatedAt: Date.now(),
};
let nextJobs: FileJobProgress[];
if (update.status === 'completed') {
nextJobs = existing.filter(job => job.jobId !== update.jobId);
} else {
const idx = existing.findIndex(job => job.jobId === update.jobId);
if (idx >= 0) {
nextJobs = [...existing];
nextJobs[idx] = entry;
} else {
nextJobs = [...existing, entry];
}
}
fileActions.updateStirlingFileStub(fileId, { activeJobs: nextJobs });
});
}, [selectors, fileActions]);
const executeOperation = useCallback(async (
params: TParams,
selectedFiles: StirlingFile[]
@ -230,7 +268,8 @@ export const useToolOperation = <TParams>(
apiCallsConfig,
actions.setProgress,
actions.setStatus,
fileActions.markFileError as any
fileActions.markFileError as any,
handleJobUpdate
);
processedFiles = result.outputFiles;
successSourceIds = result.successSourceIds as any;
@ -238,35 +277,25 @@ export const useToolOperation = <TParams>(
break;
}
case ToolType.multiFile: {
// Multi-file processing - single API call with all files
actions.setStatus('Processing files...');
const formData = config.buildFormData(params, filesForAPI);
const endpoint = typeof config.endpoint === 'function' ? config.endpoint(params) : config.endpoint;
const batchConfig: BatchApiCallsConfig<TParams> = {
endpoint: config.endpoint,
buildFormData: config.buildFormData,
filePrefix: config.filePrefix,
responseHandler: config.responseHandler,
preserveBackendFilename: config.preserveBackendFilename,
};
const response = await apiClient.post(endpoint, formData, { responseType: 'blob' });
const result = await processBatchJob(
params,
filesForAPI,
batchConfig,
actions.setProgress,
actions.setStatus,
handleJobUpdate
);
// Multi-file responses are typically ZIP files that need extraction, but some may return single PDFs
if (config.responseHandler) {
// Use custom responseHandler for multi-file (handles ZIP extraction)
processedFiles = await config.responseHandler(response.data, filesForAPI);
} else if (response.data.type === 'application/pdf' ||
(response.headers && response.headers['content-type'] === 'application/pdf')) {
// Single PDF response (e.g. split with merge option) - add prefix to first original filename
const filename = `${config.filePrefix}${filesForAPI[0]?.name || 'document.pdf'}`;
const singleFile = new File([response.data], filename, { type: 'application/pdf' });
processedFiles = [singleFile];
} else {
// Default: assume ZIP response for multi-file endpoints
// Note: extractZipFiles will check preferences.autoUnzip setting
processedFiles = await extractZipFiles(response.data);
if (processedFiles.length === 0) {
// Try the generic extraction as fallback
processedFiles = await extractAllZipFiles(response.data);
}
}
// Assume all inputs succeeded together unless server provided an error earlier
successSourceIds = validFiles.map(f => (f as any).fileId) as any;
processedFiles = result.outputFiles;
successSourceIds = result.successSourceIds as any;
break;
}
@ -446,7 +475,7 @@ export const useToolOperation = <TParams>(
actions.setLoading(false);
actions.setProgress(null);
}
}, [t, config, actions, addFiles, consumeFiles, processFiles, generateThumbnails, createDownloadInfo, cleanupBlobUrls, extractZipFiles, extractAllZipFiles]);
}, [t, config, actions, consumeFiles, processFiles, processBatchJob, generateThumbnails, createDownloadInfo, cleanupBlobUrls, handleJobUpdate]);
const cancelOperation = useCallback(() => {
cancelApiCalls();

View File

@ -0,0 +1,153 @@
import apiClient from './apiClient';
import type { CancelToken } from 'axios';
import { getFilenameFromHeaders } from '../utils/fileResponseUtils';
export interface JobStatus {
jobId: string;
complete: boolean;
error?: string | null;
progressPercent?: number | null;
progressMessage?: string | null;
inQueue?: boolean;
queuePosition?: number | null;
notes?: string[];
}
export interface JobResultFileMeta {
fileId: string;
fileName: string;
contentType: string;
fileSize: number;
}
export type JobResultData =
| { type: 'blob'; blob: Blob; headers: Record<string, any> }
| { type: 'multipleFiles'; files: JobResultFileMeta[] }
| { type: 'json'; data: any };
type FetchStatusResponse = JobStatus & {
[key: string]: any;
};
interface QueueInfo {
inQueue?: boolean;
position?: number;
}
export interface JobPollOptions {
cancelToken?: CancelToken;
intervalMs?: number;
isCancelled?: () => boolean;
onUpdate?: (status: JobStatus) => void;
}
const JOB_BASE_URL = '/api/v1/general/job';
export function ensureAsyncParam(endpoint: string): string {
if (endpoint.includes('async=')) {
return endpoint;
}
const separator = endpoint.includes('?') ? '&' : '?';
return `${endpoint}${separator}async=true`;
}
function normalizeJobStatus(data: any, queueInfo?: QueueInfo): JobStatus {
if (!data) {
return {
jobId: 'unknown',
complete: false,
};
}
const base: FetchStatusResponse = {
jobId: data.jobId ?? data.jobID ?? data.id ?? 'unknown',
complete: Boolean(data.complete),
error: data.error ?? null,
progressPercent: typeof data.progressPercent === 'number' ? data.progressPercent : undefined,
progressMessage: data.progressMessage ?? undefined,
notes: Array.isArray(data.notes) ? data.notes : undefined,
inQueue: queueInfo?.inQueue,
queuePosition: queueInfo?.position ?? null,
};
return base;
}
export async function fetchJobStatus(jobId: string, cancelToken?: CancelToken): Promise<JobStatus> {
const response = await apiClient.get(`${JOB_BASE_URL}/${jobId}`, { cancelToken });
const data = response.data;
if (data && typeof data === 'object' && 'jobResult' in data) {
const queue = data.queueInfo as QueueInfo | undefined;
return normalizeJobStatus((data as any).jobResult, queue);
}
return normalizeJobStatus(data);
}
export async function waitForJobCompletion(jobId: string, options: JobPollOptions = {}): Promise<JobStatus> {
const { intervalMs = 1000, onUpdate, isCancelled } = options;
for (;;) {
if (isCancelled?.()) {
throw new Error('Operation was cancelled');
}
const status = await fetchJobStatus(jobId, options.cancelToken);
onUpdate?.(status);
if (status.complete) {
return status;
}
await new Promise(resolve => setTimeout(resolve, intervalMs));
}
}
export async function fetchJobResult(jobId: string, cancelToken?: CancelToken): Promise<JobResultData> {
const response = await apiClient.get(`${JOB_BASE_URL}/${jobId}/result`, {
responseType: 'blob',
cancelToken,
});
const contentType = (response.headers?.['content-type'] || '') as string;
if (contentType.includes('application/json')) {
const text = await response.data.text();
let parsed: any;
try {
parsed = JSON.parse(text);
} catch (_error) {
throw new Error('Failed to parse async job result JSON');
}
if (parsed?.hasMultipleFiles && Array.isArray(parsed.files)) {
return { type: 'multipleFiles', files: parsed.files as JobResultFileMeta[] };
}
return { type: 'json', data: parsed };
}
return { type: 'blob', blob: response.data, headers: response.headers ?? {} };
}
export async function downloadResultFile(meta: JobResultFileMeta, cancelToken?: CancelToken): Promise<File> {
const response = await apiClient.get(`/api/v1/general/files/${meta.fileId}`, {
responseType: 'blob',
cancelToken,
});
const blob = response.data as Blob;
const type = blob.type || response.headers?.['content-type'] || meta.contentType || 'application/octet-stream';
const filename = meta.fileName || getFilenameFromHeaders(response.headers?.['content-disposition']) || 'download';
return new File([blob], filename, {
type,
lastModified: Date.now(),
});
}
export async function readJobResponseBlob(blob: Blob): Promise<any> {
const text = await blob.text();
return JSON.parse(text);
}

View File

@ -5,6 +5,18 @@
import { PageOperation } from './pageEditor';
import { FileId, BaseFileMetadata } from './file';
export type FileJobStatus = 'queued' | 'processing' | 'completed' | 'failed';
export interface FileJobProgress {
jobId: string;
status: FileJobStatus;
progressPercent: number;
message?: string;
queuePosition?: number | null;
error?: string;
updatedAt: number;
}
// Re-export FileId for convenience
export type { FileId };
@ -45,6 +57,7 @@ export interface StirlingFileStub extends BaseFileMetadata {
processedFile?: ProcessedFileMetadata; // PDF page data and processing results
insertAfterPageId?: string; // Page ID after which this file should be inserted
isPinned?: boolean; // Protected from tool consumption (replace/remove)
activeJobs?: FileJobProgress[]; // In-flight async operations associated with this file
// Note: File object stored in provider ref, not in state
}
@ -155,7 +168,8 @@ export function createNewStirlingFileStub(
isLeaf: true, // New files are leaf nodes by default
versionNumber: 1, // New files start at version 1
thumbnailUrl: thumbnail,
processedFile: processedFileMetadata
processedFile: processedFileMetadata,
activeJobs: []
};
}