mirror of
https://github.com/Frooodle/Stirling-PDF.git
synced 2025-12-18 20:04:17 +01:00
Merge remote-tracking branch 'origin/V2' into PaymentSelfhost
This commit is contained in:
commit
a17edaedea
@ -0,0 +1,37 @@
|
||||
package stirling.software.common.annotations.api;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
/**
|
||||
* Combined annotation for Invite management controllers.
|
||||
* Includes @RestController, @RequestMapping("/api/v1/invite"), and OpenAPI @Tag.
|
||||
*/
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/invite")
|
||||
@Tag(
|
||||
name = "Invite",
|
||||
description =
|
||||
"""
|
||||
Invite-link generation and acceptance endpoints for onboarding new users.
|
||||
|
||||
Provides the ability to issue invitation tokens, send optional email invites,
|
||||
validate and accept invite links, and manage pending invitations for teams.
|
||||
|
||||
Typical use cases include:
|
||||
• Admin workflows for issuing time-limited invitations to external users
|
||||
• Self-service invite acceptance and team assignment
|
||||
• License limit enforcement when provisioning new accounts
|
||||
|
||||
Target users: administrators and automation scripts orchestrating user onboarding.
|
||||
""")
|
||||
public @interface InviteApi {}
|
||||
@ -129,61 +129,53 @@ public class SecurityConfiguration {
|
||||
|
||||
@Bean
|
||||
public CorsConfigurationSource corsConfigurationSource() {
|
||||
// Read CORS allowed origins from settings
|
||||
if (applicationProperties.getSystem() != null
|
||||
&& applicationProperties.getSystem().getCorsAllowedOrigins() != null
|
||||
&& !applicationProperties.getSystem().getCorsAllowedOrigins().isEmpty()) {
|
||||
List<String> configuredOrigins = null;
|
||||
if (applicationProperties.getSystem() != null) {
|
||||
configuredOrigins = applicationProperties.getSystem().getCorsAllowedOrigins();
|
||||
}
|
||||
|
||||
List<String> allowedOrigins = applicationProperties.getSystem().getCorsAllowedOrigins();
|
||||
|
||||
CorsConfiguration cfg = new CorsConfiguration();
|
||||
|
||||
// Use setAllowedOriginPatterns for better wildcard and port support
|
||||
cfg.setAllowedOriginPatterns(allowedOrigins);
|
||||
CorsConfiguration cfg = new CorsConfiguration();
|
||||
if (configuredOrigins != null && !configuredOrigins.isEmpty()) {
|
||||
cfg.setAllowedOriginPatterns(configuredOrigins);
|
||||
log.debug(
|
||||
"CORS configured with allowed origin patterns from settings.yml: {}",
|
||||
allowedOrigins);
|
||||
|
||||
// Set allowed methods explicitly (including OPTIONS for preflight)
|
||||
cfg.setAllowedMethods(List.of("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
|
||||
|
||||
// Set allowed headers explicitly
|
||||
cfg.setAllowedHeaders(
|
||||
List.of(
|
||||
"Authorization",
|
||||
"Content-Type",
|
||||
"X-Requested-With",
|
||||
"Accept",
|
||||
"Origin",
|
||||
"X-API-KEY",
|
||||
"X-CSRF-TOKEN"));
|
||||
|
||||
// Set exposed headers (headers that the browser can access)
|
||||
cfg.setExposedHeaders(
|
||||
List.of(
|
||||
"WWW-Authenticate",
|
||||
"X-Total-Count",
|
||||
"X-Page-Number",
|
||||
"X-Page-Size",
|
||||
"Content-Disposition",
|
||||
"Content-Type"));
|
||||
|
||||
// Allow credentials (cookies, authorization headers)
|
||||
cfg.setAllowCredentials(true);
|
||||
|
||||
// Set max age for preflight cache
|
||||
cfg.setMaxAge(3600L);
|
||||
|
||||
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
|
||||
source.registerCorsConfiguration("/**", cfg);
|
||||
return source;
|
||||
configuredOrigins);
|
||||
} else {
|
||||
// No CORS origins configured - return null to disable CORS processing entirely
|
||||
// This avoids empty CORS policy that unexpectedly rejects preflights
|
||||
// Default to allowing all origins when nothing is configured
|
||||
cfg.setAllowedOriginPatterns(List.of("*"));
|
||||
log.info(
|
||||
"CORS is disabled - no allowed origins configured in settings.yml (system.corsAllowedOrigins)");
|
||||
return null;
|
||||
"No CORS allowed origins configured in settings.yml (system.corsAllowedOrigins); allowing all origins.");
|
||||
}
|
||||
|
||||
// Explicitly configure supported HTTP methods (include OPTIONS for preflight)
|
||||
cfg.setAllowedMethods(List.of("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
|
||||
|
||||
cfg.setAllowedHeaders(
|
||||
List.of(
|
||||
"Authorization",
|
||||
"Content-Type",
|
||||
"X-Requested-With",
|
||||
"Accept",
|
||||
"Origin",
|
||||
"X-API-KEY",
|
||||
"X-CSRF-TOKEN",
|
||||
"X-XSRF-TOKEN"));
|
||||
|
||||
cfg.setExposedHeaders(
|
||||
List.of(
|
||||
"WWW-Authenticate",
|
||||
"X-Total-Count",
|
||||
"X-Page-Number",
|
||||
"X-Page-Size",
|
||||
"Content-Disposition",
|
||||
"Content-Type"));
|
||||
|
||||
cfg.setAllowCredentials(true);
|
||||
cfg.setMaxAge(3600L);
|
||||
|
||||
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
|
||||
source.registerCorsConfiguration("/**", cfg);
|
||||
return source;
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@ -15,7 +15,7 @@ import jakarta.servlet.http.HttpServletRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import stirling.software.common.annotations.api.UserApi;
|
||||
import stirling.software.common.annotations.api.InviteApi;
|
||||
import stirling.software.common.model.ApplicationProperties;
|
||||
import stirling.software.common.model.enumeration.Role;
|
||||
import stirling.software.proprietary.model.Team;
|
||||
@ -26,11 +26,9 @@ import stirling.software.proprietary.security.service.EmailService;
|
||||
import stirling.software.proprietary.security.service.TeamService;
|
||||
import stirling.software.proprietary.security.service.UserService;
|
||||
|
||||
@UserApi
|
||||
@InviteApi
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/invite")
|
||||
public class InviteLinkController {
|
||||
|
||||
private final InviteTokenRepository inviteTokenRepository;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -41,6 +41,30 @@
|
||||
"cancel": "Cancel",
|
||||
"confirm": "Extract"
|
||||
},
|
||||
"defaultApp": {
|
||||
"title": "Set as Default PDF App",
|
||||
"message": "Would you like to set Stirling PDF as your default PDF editor?",
|
||||
"description": "You can change this later in your system settings.",
|
||||
"notNow": "Not Now",
|
||||
"setDefault": "Set Default",
|
||||
"dismiss": "Dismiss",
|
||||
"prompt": {
|
||||
"title": "Set as Default PDF Editor",
|
||||
"message": "Make Stirling PDF your default application for opening PDF files."
|
||||
},
|
||||
"success": {
|
||||
"title": "Default App Set",
|
||||
"message": "Stirling PDF is now your default PDF editor"
|
||||
},
|
||||
"settingsOpened": {
|
||||
"title": "Settings Opened",
|
||||
"message": "Please select Stirling PDF in your system settings"
|
||||
},
|
||||
"error": {
|
||||
"title": "Error",
|
||||
"message": "Failed to set default PDF handler"
|
||||
}
|
||||
},
|
||||
"language": {
|
||||
"direction": "ltr"
|
||||
},
|
||||
@ -332,7 +356,13 @@
|
||||
"mode": {
|
||||
"fullscreen": "Fullscreen",
|
||||
"sidebar": "Sidebar"
|
||||
}
|
||||
},
|
||||
"defaultPdfEditor": "Default PDF editor",
|
||||
"defaultPdfEditorActive": "Stirling PDF is your default PDF editor",
|
||||
"defaultPdfEditorInactive": "Another application is set as default",
|
||||
"defaultPdfEditorChecking": "Checking...",
|
||||
"defaultPdfEditorSet": "Already Default",
|
||||
"setAsDefault": "Set as Default"
|
||||
},
|
||||
"hotkeys": {
|
||||
"title": "Keyboard Shortcuts",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
11
frontend/src-tauri/Cargo.lock
generated
11
frontend/src-tauri/Cargo.lock
generated
@ -643,6 +643,15 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-services"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0aa845ab21b847ee46954be761815f18f16469b29ef3ba250241b1b8bab659a"
|
||||
dependencies = [
|
||||
"core-foundation 0.10.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.17"
|
||||
@ -3941,6 +3950,8 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
||||
name = "stirling-pdf"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"core-foundation 0.10.1",
|
||||
"core-services",
|
||||
"log",
|
||||
"reqwest 0.11.27",
|
||||
"serde",
|
||||
|
||||
@ -31,3 +31,7 @@ tauri-plugin-fs = "2.4.4"
|
||||
tauri-plugin-single-instance = "2.0.1"
|
||||
tokio = { version = "1.0", features = ["time"] }
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation = "0.10"
|
||||
core-services = "1.0"
|
||||
|
||||
217
frontend/src-tauri/src/commands/default_app.rs
Normal file
217
frontend/src-tauri/src/commands/default_app.rs
Normal file
@ -0,0 +1,217 @@
|
||||
use crate::utils::add_log;
|
||||
|
||||
#[cfg(any(target_os = "windows", target_os = "linux"))]
|
||||
use std::process::Command;
|
||||
|
||||
/// Check if Stirling PDF is the default PDF handler
|
||||
#[tauri::command]
|
||||
pub fn is_default_pdf_handler() -> Result<bool, String> {
|
||||
add_log("🔍 Checking if app is default PDF handler".to_string());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
check_default_windows()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
check_default_macos()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
check_default_linux()
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to set/prompt for Stirling PDF as default PDF handler
|
||||
#[tauri::command]
|
||||
pub fn set_as_default_pdf_handler() -> Result<String, String> {
|
||||
add_log("⚙️ Attempting to set as default PDF handler".to_string());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
set_default_windows()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
set_default_macos()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
set_default_linux()
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Windows Implementation
|
||||
// ============================================================================
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn check_default_windows() -> Result<bool, String> {
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
|
||||
// Query the default handler for .pdf extension
|
||||
let output = Command::new("cmd")
|
||||
.args(["/C", "assoc .pdf"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to check default app: {}", e))?;
|
||||
|
||||
let assoc = String::from_utf8_lossy(&output.stdout);
|
||||
add_log(format!("Windows PDF association: {}", assoc.trim()));
|
||||
|
||||
// Get the ProgID for .pdf files
|
||||
if let Some(prog_id) = assoc.trim().strip_prefix(".pdf=") {
|
||||
// Query what application handles this ProgID
|
||||
let output = Command::new("cmd")
|
||||
.args(["/C", &format!("ftype {}", prog_id)])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to query file type: {}", e))?;
|
||||
|
||||
let ftype = String::from_utf8_lossy(&output.stdout);
|
||||
add_log(format!("Windows file type: {}", ftype.trim()));
|
||||
|
||||
// Check if it contains "Stirling" or our app name
|
||||
let is_default = ftype.to_lowercase().contains("stirling");
|
||||
Ok(is_default)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn set_default_windows() -> Result<String, String> {
|
||||
// On Windows 10+, we need to open the Default Apps settings
|
||||
// as programmatic setting requires a signed installer
|
||||
Command::new("cmd")
|
||||
.args(["/C", "start", "ms-settings:defaultapps"])
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to open default apps settings: {}", e))?;
|
||||
|
||||
add_log("Opened Windows Default Apps settings".to_string());
|
||||
Ok("opened_settings".to_string())
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// macOS Implementation (using LaunchServices framework)
|
||||
// ============================================================================
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn check_default_macos() -> Result<bool, String> {
|
||||
use core_foundation::base::TCFType;
|
||||
use core_foundation::string::{CFString, CFStringRef};
|
||||
use std::os::raw::c_int;
|
||||
|
||||
// Define the LSCopyDefaultRoleHandlerForContentType function
|
||||
#[link(name = "CoreServices", kind = "framework")]
|
||||
extern "C" {
|
||||
fn LSCopyDefaultRoleHandlerForContentType(
|
||||
content_type: CFStringRef,
|
||||
role: c_int,
|
||||
) -> CFStringRef;
|
||||
}
|
||||
|
||||
const K_LS_ROLES_ALL: c_int = 0xFFFFFFFF_u32 as c_int;
|
||||
|
||||
unsafe {
|
||||
// Query the default handler for "com.adobe.pdf" (PDF UTI - standard macOS identifier)
|
||||
let pdf_uti = CFString::new("com.adobe.pdf");
|
||||
let handler_ref = LSCopyDefaultRoleHandlerForContentType(pdf_uti.as_concrete_TypeRef(), K_LS_ROLES_ALL);
|
||||
|
||||
if handler_ref.is_null() {
|
||||
add_log("No default PDF handler found".to_string());
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let handler = CFString::wrap_under_create_rule(handler_ref);
|
||||
let handler_str = handler.to_string();
|
||||
add_log(format!("macOS PDF handler: {}", handler_str));
|
||||
|
||||
// Check if it's our bundle identifier
|
||||
let is_default = handler_str == "stirling.pdf.dev";
|
||||
Ok(is_default)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn set_default_macos() -> Result<String, String> {
|
||||
use core_foundation::base::TCFType;
|
||||
use core_foundation::string::{CFString, CFStringRef};
|
||||
use std::os::raw::c_int;
|
||||
|
||||
// Define the LSSetDefaultRoleHandlerForContentType function
|
||||
#[link(name = "CoreServices", kind = "framework")]
|
||||
extern "C" {
|
||||
fn LSSetDefaultRoleHandlerForContentType(
|
||||
content_type: CFStringRef,
|
||||
role: c_int,
|
||||
handler_bundle_id: CFStringRef,
|
||||
) -> c_int; // OSStatus
|
||||
}
|
||||
|
||||
const K_LS_ROLES_ALL: c_int = 0xFFFFFFFF_u32 as c_int;
|
||||
|
||||
unsafe {
|
||||
// Set our app as the default handler for PDF files
|
||||
let pdf_uti = CFString::new("com.adobe.pdf");
|
||||
let our_bundle_id = CFString::new("stirling.pdf.dev");
|
||||
|
||||
let status = LSSetDefaultRoleHandlerForContentType(
|
||||
pdf_uti.as_concrete_TypeRef(),
|
||||
K_LS_ROLES_ALL,
|
||||
our_bundle_id.as_concrete_TypeRef(),
|
||||
);
|
||||
|
||||
if status == 0 {
|
||||
add_log("Successfully triggered default app dialog".to_string());
|
||||
Ok("set_successfully".to_string())
|
||||
} else {
|
||||
let error_msg = format!("LaunchServices returned status: {}", status);
|
||||
add_log(error_msg.clone());
|
||||
Err(error_msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Linux Implementation
|
||||
// ============================================================================
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn check_default_linux() -> Result<bool, String> {
|
||||
// Use xdg-mime to check the default application for PDF files
|
||||
let output = Command::new("xdg-mime")
|
||||
.args(["query", "default", "application/pdf"])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to check default app: {}", e))?;
|
||||
|
||||
let handler = String::from_utf8_lossy(&output.stdout);
|
||||
add_log(format!("Linux PDF handler: {}", handler.trim()));
|
||||
|
||||
// Check if it's our .desktop file
|
||||
let is_default = handler.trim() == "stirling-pdf.desktop";
|
||||
Ok(is_default)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn set_default_linux() -> Result<String, String> {
|
||||
// Use xdg-mime to set the default application for PDF files
|
||||
let result = Command::new("xdg-mime")
|
||||
.args(["default", "stirling-pdf.desktop", "application/pdf"])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to set default app: {}", e))?;
|
||||
|
||||
if result.status.success() {
|
||||
add_log("Set as default PDF handler on Linux".to_string());
|
||||
Ok("set_successfully".to_string())
|
||||
} else {
|
||||
let error = String::from_utf8_lossy(&result.stderr);
|
||||
add_log(format!("Failed to set default: {}", error));
|
||||
Err(format!("Failed to set as default: {}", error))
|
||||
}
|
||||
}
|
||||
@ -14,23 +14,11 @@ pub fn add_opened_file(file_path: String) {
|
||||
// Command to get opened file paths (if app was launched with files)
|
||||
#[tauri::command]
|
||||
pub async fn get_opened_files() -> Result<Vec<String>, String> {
|
||||
let mut all_files: Vec<String> = Vec::new();
|
||||
|
||||
// Get files from command line arguments (Windows/Linux 'Open With Stirling' behaviour)
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
let pdf_files: Vec<String> = args.iter()
|
||||
.skip(1)
|
||||
.filter(|arg| std::path::Path::new(arg).exists())
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
all_files.extend(pdf_files);
|
||||
|
||||
// Add any files sent via events or other instances (macOS 'Open With Stirling' behaviour, also Windows/Linux extra files)
|
||||
{
|
||||
let opened_files = OPENED_FILES.lock().unwrap();
|
||||
all_files.extend(opened_files.clone());
|
||||
}
|
||||
// Get all files from the OPENED_FILES store
|
||||
// Command line args are processed in setup() callback and added to this store
|
||||
// Additional files from second instances or events are also added here
|
||||
let opened_files = OPENED_FILES.lock().unwrap();
|
||||
let all_files = opened_files.clone();
|
||||
|
||||
add_log(format!("📂 Returning {} opened file(s)", all_files.len()));
|
||||
Ok(all_files)
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
pub mod backend;
|
||||
pub mod health;
|
||||
pub mod files;
|
||||
pub mod default_app;
|
||||
|
||||
pub use backend::{start_backend, cleanup_backend};
|
||||
pub use health::check_backend_health;
|
||||
pub use files::{get_opened_files, clear_opened_files, add_opened_file};
|
||||
pub use default_app::{is_default_pdf_handler, set_as_default_pdf_handler};
|
||||
|
||||
@ -3,7 +3,16 @@ use tauri::{RunEvent, WindowEvent, Emitter, Manager};
|
||||
mod utils;
|
||||
mod commands;
|
||||
|
||||
use commands::{start_backend, check_backend_health, get_opened_files, clear_opened_files, cleanup_backend, add_opened_file};
|
||||
use commands::{
|
||||
start_backend,
|
||||
check_backend_health,
|
||||
get_opened_files,
|
||||
clear_opened_files,
|
||||
cleanup_backend,
|
||||
add_opened_file,
|
||||
is_default_pdf_handler,
|
||||
set_as_default_pdf_handler,
|
||||
};
|
||||
use utils::{add_log, get_tauri_logs};
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
@ -23,9 +32,6 @@ pub fn run() {
|
||||
// Store file for later retrieval (in case frontend isn't ready yet)
|
||||
add_opened_file(arg.clone());
|
||||
|
||||
// Also emit event for immediate handling if frontend is ready
|
||||
let _ = app.emit("file-opened", arg.clone());
|
||||
|
||||
// Bring the existing window to front
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.set_focus();
|
||||
@ -33,13 +39,34 @@ pub fn run() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Emit a generic notification that files were added (frontend will re-read storage)
|
||||
let _ = app.emit("files-changed", ());
|
||||
}))
|
||||
.setup(|_app| {
|
||||
add_log("🚀 Tauri app setup started".to_string());
|
||||
|
||||
// Process command line arguments on first launch
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
for arg in args.iter().skip(1) {
|
||||
if std::path::Path::new(arg).exists() {
|
||||
add_log(format!("📂 Initial file from command line: {}", arg));
|
||||
add_opened_file(arg.clone());
|
||||
}
|
||||
}
|
||||
|
||||
add_log("🔍 DEBUG: Setup completed".to_string());
|
||||
Ok(())
|
||||
})
|
||||
.invoke_handler(tauri::generate_handler![start_backend, check_backend_health, get_opened_files, clear_opened_files, get_tauri_logs])
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
start_backend,
|
||||
check_backend_health,
|
||||
get_opened_files,
|
||||
clear_opened_files,
|
||||
get_tauri_logs,
|
||||
is_default_pdf_handler,
|
||||
set_as_default_pdf_handler,
|
||||
])
|
||||
.build(tauri::generate_context!())
|
||||
.expect("error while building tauri application")
|
||||
.run(|app_handle, event| {
|
||||
@ -58,6 +85,7 @@ pub fn run() {
|
||||
#[cfg(target_os = "macos")]
|
||||
RunEvent::Opened { urls } => {
|
||||
add_log(format!("📂 Tauri file opened event: {:?}", urls));
|
||||
let mut added_files = false;
|
||||
for url in urls {
|
||||
let url_str = url.as_str();
|
||||
if url_str.starts_with("file://") {
|
||||
@ -65,11 +93,14 @@ pub fn run() {
|
||||
if file_path.ends_with(".pdf") {
|
||||
add_log(format!("📂 Processing opened PDF: {}", file_path));
|
||||
add_opened_file(file_path.to_string());
|
||||
// Use unified event name for consistency across platforms
|
||||
let _ = app_handle.emit("file-opened", file_path.to_string());
|
||||
added_files = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Emit a generic notification that files were added (frontend will re-read storage)
|
||||
if added_files {
|
||||
let _ = app_handle.emit("files-changed", ());
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Only log unhandled events in debug mode to reduce noise
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { Suspense } from "react";
|
||||
import { AppProviders } from "@app/components/AppProviders";
|
||||
import { AppLayout } from "@app/components/AppLayout";
|
||||
import { LoadingFallback } from "@app/components/shared/LoadingFallback";
|
||||
import HomePage from "@app/pages/HomePage";
|
||||
import OnboardingTour from "@app/components/onboarding/OnboardingTour";
|
||||
@ -16,8 +17,10 @@ export default function App() {
|
||||
return (
|
||||
<Suspense fallback={<LoadingFallback />}>
|
||||
<AppProviders>
|
||||
<HomePage />
|
||||
<OnboardingTour />
|
||||
<AppLayout>
|
||||
<HomePage />
|
||||
<OnboardingTour />
|
||||
</AppLayout>
|
||||
</AppProviders>
|
||||
</Suspense>
|
||||
);
|
||||
|
||||
31
frontend/src/core/components/AppLayout.tsx
Normal file
31
frontend/src/core/components/AppLayout.tsx
Normal file
@ -0,0 +1,31 @@
|
||||
import { ReactNode } from 'react';
|
||||
import { useBanner } from '@app/contexts/BannerContext';
|
||||
|
||||
interface AppLayoutProps {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* App layout wrapper that handles banner rendering and viewport sizing
|
||||
* Automatically adjusts child components to fit remaining space after banner
|
||||
*/
|
||||
export function AppLayout({ children }: AppLayoutProps) {
|
||||
const { banner } = useBanner();
|
||||
|
||||
return (
|
||||
<>
|
||||
<style>{`
|
||||
.h-screen,
|
||||
.right-rail {
|
||||
height: 100% !important;
|
||||
}
|
||||
`}</style>
|
||||
<div style={{ height: '100vh', display: 'flex', flexDirection: 'column' }}>
|
||||
{banner}
|
||||
<div style={{ flex: 1, minHeight: 0, height: 0 }}>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@ -16,6 +16,7 @@ import { OnboardingProvider } from "@app/contexts/OnboardingContext";
|
||||
import { TourOrchestrationProvider } from "@app/contexts/TourOrchestrationContext";
|
||||
import { AdminTourOrchestrationProvider } from "@app/contexts/AdminTourOrchestrationContext";
|
||||
import { PageEditorProvider } from "@app/contexts/PageEditorContext";
|
||||
import { BannerProvider } from "@app/contexts/BannerContext";
|
||||
import ErrorBoundary from "@app/components/shared/ErrorBoundary";
|
||||
import { useScarfTracking } from "@app/hooks/useScarfTracking";
|
||||
import { useAppInitialization } from "@app/hooks/useAppInitialization";
|
||||
@ -50,22 +51,23 @@ export function AppProviders({ children, appConfigRetryOptions, appConfigProvide
|
||||
<PreferencesProvider>
|
||||
<RainbowThemeProvider>
|
||||
<ErrorBoundary>
|
||||
<OnboardingProvider>
|
||||
<AppConfigProvider
|
||||
retryOptions={appConfigRetryOptions}
|
||||
{...appConfigProviderProps}
|
||||
>
|
||||
<ScarfTrackingInitializer />
|
||||
<FileContextProvider enableUrlSync={true} enablePersistence={true}>
|
||||
<AppInitializer />
|
||||
<ToolRegistryProvider>
|
||||
<NavigationProvider>
|
||||
<FilesModalProvider>
|
||||
<ToolWorkflowProvider>
|
||||
<HotkeyProvider>
|
||||
<SidebarProvider>
|
||||
<ViewerProvider>
|
||||
<PageEditorProvider>
|
||||
<BannerProvider>
|
||||
<OnboardingProvider>
|
||||
<AppConfigProvider
|
||||
retryOptions={appConfigRetryOptions}
|
||||
{...appConfigProviderProps}
|
||||
>
|
||||
<ScarfTrackingInitializer />
|
||||
<FileContextProvider enableUrlSync={true} enablePersistence={true}>
|
||||
<AppInitializer />
|
||||
<ToolRegistryProvider>
|
||||
<NavigationProvider>
|
||||
<FilesModalProvider>
|
||||
<ToolWorkflowProvider>
|
||||
<HotkeyProvider>
|
||||
<SidebarProvider>
|
||||
<ViewerProvider>
|
||||
<PageEditorProvider>
|
||||
<SignatureProvider>
|
||||
<RightRailProvider>
|
||||
<TourOrchestrationProvider>
|
||||
@ -76,16 +78,17 @@ export function AppProviders({ children, appConfigRetryOptions, appConfigProvide
|
||||
</RightRailProvider>
|
||||
</SignatureProvider>
|
||||
</PageEditorProvider>
|
||||
</ViewerProvider>
|
||||
</SidebarProvider>
|
||||
</HotkeyProvider>
|
||||
</ToolWorkflowProvider>
|
||||
</FilesModalProvider>
|
||||
</NavigationProvider>
|
||||
</ToolRegistryProvider>
|
||||
</FileContextProvider>
|
||||
</AppConfigProvider>
|
||||
</OnboardingProvider>
|
||||
</ViewerProvider>
|
||||
</SidebarProvider>
|
||||
</HotkeyProvider>
|
||||
</ToolWorkflowProvider>
|
||||
</FilesModalProvider>
|
||||
</NavigationProvider>
|
||||
</ToolRegistryProvider>
|
||||
</FileContextProvider>
|
||||
</AppConfigProvider>
|
||||
</OnboardingProvider>
|
||||
</BannerProvider>
|
||||
</ErrorBoundary>
|
||||
</RainbowThemeProvider>
|
||||
</PreferencesProvider>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import React from "react";
|
||||
import React, { useMemo } from "react";
|
||||
import { TourProvider, useTour, type StepType } from '@reactour/tour';
|
||||
import { useOnboarding } from '@app/contexts/OnboardingContext';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@ -10,6 +10,7 @@ import ArrowForwardIcon from '@mui/icons-material/ArrowForward';
|
||||
import CheckIcon from '@mui/icons-material/Check';
|
||||
import TourWelcomeModal from '@app/components/onboarding/TourWelcomeModal';
|
||||
import '@app/components/onboarding/OnboardingTour.css';
|
||||
import i18n from "@app/i18n";
|
||||
|
||||
// Enum case order defines order steps will appear
|
||||
enum TourStep {
|
||||
@ -120,7 +121,7 @@ export default function OnboardingTour() {
|
||||
} = useAdminTourOrchestration();
|
||||
|
||||
// Define steps as object keyed by enum - TypeScript ensures all keys are present
|
||||
const stepsConfig: Record<TourStep, StepType> = {
|
||||
const stepsConfig: Record<TourStep, StepType> = useMemo(() => ({
|
||||
[TourStep.ALL_TOOLS]: {
|
||||
selector: '[data-tour="tool-panel"]',
|
||||
content: t('onboarding.allTools', 'This is the <strong>Tools</strong> panel, where you can browse and select from all available PDF tools.'),
|
||||
@ -248,10 +249,10 @@ export default function OnboardingTour() {
|
||||
position: 'right',
|
||||
padding: 10,
|
||||
},
|
||||
};
|
||||
}), [t]);
|
||||
|
||||
// Define admin tour steps
|
||||
const adminStepsConfig: Record<AdminTourStep, StepType> = {
|
||||
const adminStepsConfig: Record<AdminTourStep, StepType> = useMemo(() => ({
|
||||
[AdminTourStep.WELCOME]: {
|
||||
selector: '[data-tour="config-button"]',
|
||||
content: t('adminOnboarding.welcome', "Welcome to the <strong>Admin Tour</strong>! Let's explore the powerful enterprise features and settings available to system administrators."),
|
||||
@ -363,7 +364,7 @@ export default function OnboardingTour() {
|
||||
removeAllGlows();
|
||||
},
|
||||
},
|
||||
};
|
||||
}), [t]);
|
||||
|
||||
// Select steps based on tour type
|
||||
const steps = tourType === 'admin'
|
||||
@ -416,7 +417,7 @@ export default function OnboardingTour() {
|
||||
}}
|
||||
/>
|
||||
<TourProvider
|
||||
key={tourType}
|
||||
key={`${tourType}-${i18n.language}`}
|
||||
steps={steps}
|
||||
maskClassName={tourType === 'admin' ? 'admin-tour-mask' : undefined}
|
||||
onClickClose={handleCloseTour}
|
||||
|
||||
72
frontend/src/core/components/shared/InfoBanner.tsx
Normal file
72
frontend/src/core/components/shared/InfoBanner.tsx
Normal file
@ -0,0 +1,72 @@
|
||||
import React from 'react';
|
||||
import { Paper, Group, Text, Button, ActionIcon } from '@mantine/core';
|
||||
import LocalIcon from '@app/components/shared/LocalIcon';
|
||||
|
||||
interface InfoBannerProps {
|
||||
icon: string;
|
||||
message: string;
|
||||
buttonText: string;
|
||||
buttonIcon?: string;
|
||||
onButtonClick: () => void;
|
||||
onDismiss: () => void;
|
||||
loading?: boolean;
|
||||
show?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic info banner component for displaying dismissible messages at the top of the app
|
||||
*/
|
||||
export const InfoBanner: React.FC<InfoBannerProps> = ({
|
||||
icon,
|
||||
message,
|
||||
buttonText,
|
||||
buttonIcon = 'check-circle-rounded',
|
||||
onButtonClick,
|
||||
onDismiss,
|
||||
loading = false,
|
||||
show = true,
|
||||
}) => {
|
||||
if (!show) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Paper
|
||||
p="sm"
|
||||
radius={0}
|
||||
style={{
|
||||
background: 'var(--mantine-color-blue-0)',
|
||||
borderBottom: '1px solid var(--mantine-color-blue-2)',
|
||||
position: 'relative',
|
||||
}}
|
||||
>
|
||||
<Group gap="sm" align="center" wrap="nowrap">
|
||||
<LocalIcon icon={icon} width="1.2rem" height="1.2rem" style={{ color: 'var(--mantine-color-blue-6)', flexShrink: 0 }} />
|
||||
<Text fw={500} size="sm" style={{ color: 'var(--mantine-color-blue-9)' }}>
|
||||
{message}
|
||||
</Text>
|
||||
<Button
|
||||
variant="light"
|
||||
color="blue"
|
||||
size="xs"
|
||||
onClick={onButtonClick}
|
||||
loading={loading}
|
||||
leftSection={<LocalIcon icon={buttonIcon} width="0.9rem" height="0.9rem" />}
|
||||
style={{ flexShrink: 0 }}
|
||||
>
|
||||
{buttonText}
|
||||
</Button>
|
||||
</Group>
|
||||
<ActionIcon
|
||||
variant="subtle"
|
||||
color="gray"
|
||||
size="sm"
|
||||
onClick={onDismiss}
|
||||
aria-label="Dismiss"
|
||||
style={{ position: 'absolute', top: '50%', right: '0.5rem', transform: 'translateY(-50%)' }}
|
||||
>
|
||||
<LocalIcon icon="close-rounded" width="1rem" height="1rem" />
|
||||
</ActionIcon>
|
||||
</Paper>
|
||||
);
|
||||
};
|
||||
@ -272,8 +272,13 @@ const LanguageSelector: React.FC<LanguageSelectorProps> = ({ position = 'bottom-
|
||||
<ScrollArea h={190} type="scroll">
|
||||
<div className={styles.languageGrid}>
|
||||
{languageOptions.map((option, index) => {
|
||||
// Enable languages with >90% translation completion
|
||||
const enabledLanguages = ['en-GB', 'ar-AR', 'de-DE', 'es-ES', 'fr-FR', 'it-IT', 'pt-BR', 'ru-RU', 'zh-CN'];
|
||||
const enabledLanguages = [
|
||||
'en-GB', 'zh-CN', 'zh-TW', 'ar-AR', 'fa-IR', 'tr-TR', 'uk-UA', 'zh-BO', 'sl-SI',
|
||||
'ru-RU', 'ja-JP', 'ko-KR', 'hu-HU', 'ga-IE', 'bg-BG', 'es-ES', 'hi-IN', 'hr-HR',
|
||||
'el-GR', 'ml-ML', 'pt-BR', 'pl-PL', 'pt-PT', 'sk-SK', 'sr-LATN-RS', 'no-NB',
|
||||
'th-TH', 'vi-VN', 'az-AZ', 'eu-ES', 'de-DE', 'sv-SE', 'it-IT', 'ca-CA', 'id-ID',
|
||||
'ro-RO', 'fr-FR', 'nl-NL', 'da-DK', 'cs-CZ'
|
||||
];
|
||||
const isDisabled = !enabledLanguages.includes(option.value);
|
||||
|
||||
return (
|
||||
|
||||
@ -191,7 +191,6 @@ export function LocalEmbedPDF({ file, url, enableAnnotations = false, onSignatur
|
||||
height: '100%',
|
||||
width: '100%',
|
||||
position: 'relative',
|
||||
overflow: 'hidden',
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
minWidth: 0,
|
||||
@ -287,8 +286,6 @@ export function LocalEmbedPDF({ file, url, enableAnnotations = false, onSignatur
|
||||
minHeight: 0,
|
||||
minWidth: 0,
|
||||
contain: 'strict',
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
>
|
||||
<Scroller
|
||||
|
||||
@ -7,9 +7,7 @@ import {
|
||||
determineAutoZoom,
|
||||
DEFAULT_FALLBACK_ZOOM,
|
||||
DEFAULT_VISIBILITY_THRESHOLD,
|
||||
measureRenderedPageRect,
|
||||
useFitWidthResize,
|
||||
ZoomViewport,
|
||||
} from '@app/utils/viewerZoom';
|
||||
import { getFirstPageAspectRatioFromStub } from '@app/utils/pageMetadata';
|
||||
|
||||
@ -73,18 +71,6 @@ export function ZoomAPIBridge() {
|
||||
}
|
||||
}, [spreadMode, zoomState?.zoomLevel, scheduleAutoZoom, requestFitWidth]);
|
||||
|
||||
const getViewportSnapshot = useCallback((): ZoomViewport | null => {
|
||||
if (!zoomState || typeof zoomState !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ('viewport' in zoomState) {
|
||||
const candidate = (zoomState as { viewport?: ZoomViewport | null }).viewport;
|
||||
return candidate ?? null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}, [zoomState]);
|
||||
|
||||
const isManagedZoom =
|
||||
!!zoom &&
|
||||
@ -119,7 +105,7 @@ export function ZoomAPIBridge() {
|
||||
}
|
||||
|
||||
const fitWidthZoom = zoomState.currentZoomLevel;
|
||||
if (!fitWidthZoom || fitWidthZoom <= 0) {
|
||||
if (!fitWidthZoom || fitWidthZoom <= 0 || fitWidthZoom === 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -137,37 +123,23 @@ export function ZoomAPIBridge() {
|
||||
const pagesPerSpread = currentSpreadMode !== SpreadMode.None ? 2 : 1;
|
||||
const metadataAspectRatio = getFirstPageAspectRatioFromStub(firstFileStub);
|
||||
|
||||
const viewport = getViewportSnapshot();
|
||||
|
||||
if (cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const metrics = viewport ?? {};
|
||||
const viewportWidth =
|
||||
metrics.clientWidth ?? metrics.width ?? window.innerWidth ?? 0;
|
||||
const viewportHeight =
|
||||
metrics.clientHeight ?? metrics.height ?? window.innerHeight ?? 0;
|
||||
const viewportWidth = window.innerWidth ?? 0;
|
||||
const viewportHeight = window.innerHeight ?? 0;
|
||||
|
||||
if (viewportWidth <= 0 || viewportHeight <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pageRect = await measureRenderedPageRect({
|
||||
shouldCancel: () => cancelled,
|
||||
});
|
||||
if (cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const decision = determineAutoZoom({
|
||||
viewportWidth,
|
||||
viewportHeight,
|
||||
fitWidthZoom,
|
||||
pagesPerSpread,
|
||||
pageRect: pageRect
|
||||
? { width: pageRect.width, height: pageRect.height }
|
||||
: undefined,
|
||||
pageRect: undefined,
|
||||
metadataAspectRatio: metadataAspectRatio ?? null,
|
||||
visibilityThreshold: DEFAULT_VISIBILITY_THRESHOLD,
|
||||
fallbackZoom: DEFAULT_FALLBACK_ZOOM,
|
||||
@ -197,7 +169,6 @@ export function ZoomAPIBridge() {
|
||||
firstFileId,
|
||||
firstFileStub,
|
||||
requestFitWidth,
|
||||
getViewportSnapshot,
|
||||
autoZoomTick,
|
||||
spreadMode,
|
||||
triggerImmediateZoomUpdate,
|
||||
|
||||
26
frontend/src/core/contexts/BannerContext.tsx
Normal file
26
frontend/src/core/contexts/BannerContext.tsx
Normal file
@ -0,0 +1,26 @@
|
||||
import { createContext, useContext, useState, ReactNode } from 'react';
|
||||
|
||||
interface BannerContextType {
|
||||
banner: ReactNode;
|
||||
setBanner: (banner: ReactNode) => void;
|
||||
}
|
||||
|
||||
const BannerContext = createContext<BannerContextType | undefined>(undefined);
|
||||
|
||||
export function BannerProvider({ children }: { children: ReactNode }) {
|
||||
const [banner, setBanner] = useState<ReactNode>(null);
|
||||
|
||||
return (
|
||||
<BannerContext.Provider value={{ banner, setBanner }}>
|
||||
{children}
|
||||
</BannerContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useBanner() {
|
||||
const context = useContext(BannerContext);
|
||||
if (!context) {
|
||||
throw new Error('useBanner must be used within BannerProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
@ -7,6 +7,7 @@ import { getApiBaseUrl } from '@app/services/apiClientConfig';
|
||||
const apiClient = axios.create({
|
||||
baseURL: getApiBaseUrl(),
|
||||
responseType: 'json',
|
||||
withCredentials: true,
|
||||
});
|
||||
|
||||
// Setup interceptors (core does nothing, proprietary adds JWT auth)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { useEffect, useRef } from 'react';
|
||||
|
||||
export const DEFAULT_VISIBILITY_THRESHOLD = 80; // Require at least 80% of the page height to be visible
|
||||
export const DEFAULT_VISIBILITY_THRESHOLD = 70; // Require at least 70% of the page height to be visible
|
||||
export const DEFAULT_FALLBACK_ZOOM = 1.44; // 144% fallback when no reliable metadata is present
|
||||
|
||||
export interface ZoomViewport {
|
||||
@ -36,47 +36,33 @@ export function determineAutoZoom({
|
||||
visibilityThreshold = DEFAULT_VISIBILITY_THRESHOLD,
|
||||
fallbackZoom = DEFAULT_FALLBACK_ZOOM,
|
||||
}: AutoZoomParams): AutoZoomDecision {
|
||||
// Get aspect ratio from pageRect or metadata
|
||||
const rectWidth = pageRect?.width ?? 0;
|
||||
const rectHeight = pageRect?.height ?? 0;
|
||||
|
||||
const aspectRatio: number | null =
|
||||
rectWidth > 0 ? rectHeight / rectWidth : metadataAspectRatio ?? null;
|
||||
|
||||
let renderedHeight: number | null = rectHeight > 0 ? rectHeight : null;
|
||||
|
||||
if (!renderedHeight || renderedHeight <= 0) {
|
||||
if (aspectRatio == null || aspectRatio <= 0) {
|
||||
return { type: 'fallback', zoom: Math.min(fitWidthZoom, fallbackZoom) };
|
||||
}
|
||||
|
||||
const pageWidth = viewportWidth / (fitWidthZoom * pagesPerSpread);
|
||||
const pageHeight = pageWidth * aspectRatio;
|
||||
renderedHeight = pageHeight * fitWidthZoom;
|
||||
// Need aspect ratio to proceed
|
||||
if (!aspectRatio || aspectRatio <= 0) {
|
||||
return { type: 'fallback', zoom: Math.min(fitWidthZoom, fallbackZoom) };
|
||||
}
|
||||
|
||||
if (!renderedHeight || renderedHeight <= 0) {
|
||||
return { type: 'fitWidth' };
|
||||
}
|
||||
|
||||
const isLandscape = aspectRatio !== null && aspectRatio < 1;
|
||||
// Landscape pages need 100% visibility, portrait need the specified threshold
|
||||
const isLandscape = aspectRatio < 1;
|
||||
const targetVisibility = isLandscape ? 100 : visibilityThreshold;
|
||||
|
||||
const visiblePercent = (viewportHeight / renderedHeight) * 100;
|
||||
// Calculate zoom level that shows targetVisibility% of page height
|
||||
const pageHeightAtFitWidth = (viewportWidth / pagesPerSpread) * aspectRatio;
|
||||
const heightBasedZoom = fitWidthZoom * (viewportHeight / pageHeightAtFitWidth) / (targetVisibility / 100);
|
||||
|
||||
if (visiblePercent >= targetVisibility) {
|
||||
// Use whichever zoom is smaller (more zoomed out) to satisfy both width and height constraints
|
||||
if (heightBasedZoom < fitWidthZoom) {
|
||||
// Need to zoom out from fitWidth to show enough height
|
||||
return { type: 'adjust', zoom: heightBasedZoom };
|
||||
} else {
|
||||
// fitWidth already shows enough
|
||||
return { type: 'fitWidth' };
|
||||
}
|
||||
|
||||
const allowableHeightRatio = targetVisibility / 100;
|
||||
const zoomScale =
|
||||
viewportHeight / (allowableHeightRatio * renderedHeight);
|
||||
const targetZoom = Math.min(fitWidthZoom, fitWidthZoom * zoomScale);
|
||||
|
||||
if (Math.abs(targetZoom - fitWidthZoom) < 0.001) {
|
||||
return { type: 'fitWidth' };
|
||||
}
|
||||
|
||||
return { type: 'adjust', zoom: targetZoom };
|
||||
}
|
||||
|
||||
export interface MeasurePageRectOptions {
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { ReactNode } from "react";
|
||||
import { AppProviders as ProprietaryAppProviders } from "@proprietary/components/AppProviders";
|
||||
import { DesktopConfigSync } from '@app/components/DesktopConfigSync';
|
||||
import { DesktopBannerInitializer } from '@app/components/DesktopBannerInitializer';
|
||||
import { DESKTOP_DEFAULT_APP_CONFIG } from '@app/config/defaultAppConfig';
|
||||
|
||||
/**
|
||||
@ -22,6 +23,7 @@ export function AppProviders({ children }: { children: ReactNode }) {
|
||||
}}
|
||||
>
|
||||
<DesktopConfigSync />
|
||||
<DesktopBannerInitializer />
|
||||
{children}
|
||||
</ProprietaryAppProviders>
|
||||
);
|
||||
|
||||
13
frontend/src/desktop/components/DesktopBannerInitializer.tsx
Normal file
13
frontend/src/desktop/components/DesktopBannerInitializer.tsx
Normal file
@ -0,0 +1,13 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useBanner } from '@app/contexts/BannerContext';
|
||||
import { DefaultAppBanner } from '@app/components/shared/DefaultAppBanner';
|
||||
|
||||
export function DesktopBannerInitializer() {
|
||||
const { setBanner } = useBanner();
|
||||
|
||||
useEffect(() => {
|
||||
setBanner(<DefaultAppBanner />);
|
||||
}, [setBanner]);
|
||||
|
||||
return null;
|
||||
}
|
||||
27
frontend/src/desktop/components/shared/DefaultAppBanner.tsx
Normal file
27
frontend/src/desktop/components/shared/DefaultAppBanner.tsx
Normal file
@ -0,0 +1,27 @@
|
||||
import React, { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { InfoBanner } from '@app/components/shared/InfoBanner';
|
||||
import { useDefaultApp } from '@app/hooks/useDefaultApp';
|
||||
|
||||
export const DefaultAppBanner: React.FC = () => {
|
||||
const { t } = useTranslation();
|
||||
const { isDefault, isLoading, handleSetDefault } = useDefaultApp();
|
||||
const [dismissed, setDismissed] = useState(false);
|
||||
|
||||
const handleDismissPrompt = () => {
|
||||
setDismissed(true);
|
||||
};
|
||||
|
||||
return (
|
||||
<InfoBanner
|
||||
icon="picture-as-pdf-rounded"
|
||||
message={t('defaultApp.prompt.message', 'Make Stirling PDF your default application for opening PDF files.')}
|
||||
buttonText={t('defaultApp.setDefault', 'Set Default')}
|
||||
buttonIcon="check-circle-rounded"
|
||||
onButtonClick={handleSetDefault}
|
||||
onDismiss={handleDismissPrompt}
|
||||
loading={isLoading}
|
||||
show={!dismissed && isDefault === false}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@ -0,0 +1,40 @@
|
||||
import React from 'react';
|
||||
import { Paper, Text, Button, Group } from '@mantine/core';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useDefaultApp } from '@app/hooks/useDefaultApp';
|
||||
|
||||
export const DefaultAppSettings: React.FC = () => {
|
||||
const { t } = useTranslation();
|
||||
const { isDefault, isLoading, handleSetDefault } = useDefaultApp();
|
||||
|
||||
return (
|
||||
<Paper withBorder p="md" radius="md">
|
||||
<Group justify="space-between" align="center">
|
||||
<div>
|
||||
<Text fw={500} size="sm">
|
||||
{t('settings.general.defaultPdfEditor', 'Default PDF editor')}
|
||||
</Text>
|
||||
<Text size="xs" c="dimmed" mt={4}>
|
||||
{isDefault === true
|
||||
? t('settings.general.defaultPdfEditorActive', 'Stirling PDF is your default PDF editor')
|
||||
: isDefault === false
|
||||
? t('settings.general.defaultPdfEditorInactive', 'Another application is set as default')
|
||||
: t('settings.general.defaultPdfEditorChecking', 'Checking...')}
|
||||
</Text>
|
||||
</div>
|
||||
<Button
|
||||
variant={isDefault ? 'light' : 'filled'}
|
||||
color="blue"
|
||||
size="sm"
|
||||
onClick={handleSetDefault}
|
||||
loading={isLoading}
|
||||
disabled={isDefault === true}
|
||||
>
|
||||
{isDefault
|
||||
? t('settings.general.defaultPdfEditorSet', 'Already Default')
|
||||
: t('settings.general.setAsDefault', 'Set as Default')}
|
||||
</Button>
|
||||
</Group>
|
||||
</Paper>
|
||||
);
|
||||
};
|
||||
@ -0,0 +1,18 @@
|
||||
import React from 'react';
|
||||
import { Stack } from '@mantine/core';
|
||||
import CoreGeneralSection from '@core/components/shared/config/configSections/GeneralSection';
|
||||
import { DefaultAppSettings } from '@app/components/shared/config/configSections/DefaultAppSettings';
|
||||
|
||||
/**
|
||||
* Desktop extension of GeneralSection that adds default PDF editor settings
|
||||
*/
|
||||
const GeneralSection: React.FC = () => {
|
||||
return (
|
||||
<Stack gap="lg">
|
||||
<DefaultAppSettings />
|
||||
<CoreGeneralSection />
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
export default GeneralSection;
|
||||
@ -1,4 +1,4 @@
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useEffect } from 'react';
|
||||
import { useBackendInitializer } from '@app/hooks/useBackendInitializer';
|
||||
import { useOpenedFile } from '@app/hooks/useOpenedFile';
|
||||
import { fileOpenService } from '@app/services/fileOpenService';
|
||||
@ -20,75 +20,42 @@ export function useAppInitialization(): void {
|
||||
// Handle files opened with app (Tauri mode)
|
||||
const { openedFilePaths, loading: openedFileLoading } = useOpenedFile();
|
||||
|
||||
// Track if we've already loaded the initial files to prevent duplicate loads
|
||||
const initialFilesLoadedRef = useRef(false);
|
||||
|
||||
// Load opened files and add directly to FileContext
|
||||
useEffect(() => {
|
||||
if (openedFilePaths.length > 0 && !openedFileLoading && !initialFilesLoadedRef.current) {
|
||||
initialFilesLoadedRef.current = true;
|
||||
|
||||
const loadOpenedFiles = async () => {
|
||||
try {
|
||||
const filesArray: File[] = [];
|
||||
|
||||
// Load all files in parallel
|
||||
await Promise.all(
|
||||
openedFilePaths.map(async (filePath) => {
|
||||
try {
|
||||
const fileData = await fileOpenService.readFileAsArrayBuffer(filePath);
|
||||
if (fileData) {
|
||||
const file = new File([fileData.arrayBuffer], fileData.fileName, {
|
||||
type: 'application/pdf'
|
||||
});
|
||||
filesArray.push(file);
|
||||
console.log('[Desktop] Loaded file:', fileData.fileName);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Desktop] Failed to load file:', filePath, error);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
if (filesArray.length > 0) {
|
||||
// Add all files to FileContext at once
|
||||
await addFiles(filesArray);
|
||||
console.log(`[Desktop] ${filesArray.length} opened file(s) added to FileContext`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Desktop] Failed to load opened files:', error);
|
||||
}
|
||||
};
|
||||
|
||||
loadOpenedFiles();
|
||||
if (openedFilePaths.length === 0 || openedFileLoading) {
|
||||
return;
|
||||
}
|
||||
}, [openedFilePaths, openedFileLoading, addFiles]);
|
||||
|
||||
// Listen for runtime file-opened events (from second instances on Windows/Linux)
|
||||
useEffect(() => {
|
||||
const handleRuntimeFileOpen = async (filePath: string) => {
|
||||
const loadOpenedFiles = async () => {
|
||||
try {
|
||||
console.log('[Desktop] Runtime file-opened event received:', filePath);
|
||||
const fileData = await fileOpenService.readFileAsArrayBuffer(filePath);
|
||||
if (fileData) {
|
||||
// Create a File object from the ArrayBuffer
|
||||
const file = new File([fileData.arrayBuffer], fileData.fileName, {
|
||||
type: 'application/pdf'
|
||||
});
|
||||
const filesArray: File[] = [];
|
||||
|
||||
// Add directly to FileContext
|
||||
await addFiles([file]);
|
||||
console.log('[Desktop] Runtime opened file added to FileContext:', fileData.fileName);
|
||||
await Promise.all(
|
||||
openedFilePaths.map(async (filePath) => {
|
||||
try {
|
||||
const fileData = await fileOpenService.readFileAsArrayBuffer(filePath);
|
||||
if (fileData) {
|
||||
const file = new File([fileData.arrayBuffer], fileData.fileName, {
|
||||
type: 'application/pdf'
|
||||
});
|
||||
filesArray.push(file);
|
||||
console.log('[Desktop] Loaded file:', fileData.fileName);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Desktop] Failed to load file:', filePath, error);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
if (filesArray.length > 0) {
|
||||
await addFiles(filesArray);
|
||||
console.log(`[Desktop] ${filesArray.length} opened file(s) added to FileContext`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Desktop] Failed to load runtime opened file:', error);
|
||||
console.error('[Desktop] Failed to load opened files:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up event listener and get cleanup function
|
||||
const unlisten = fileOpenService.onFileOpened(handleRuntimeFileOpen);
|
||||
|
||||
// Clean up listener on unmount
|
||||
return unlisten;
|
||||
}, [addFiles]);
|
||||
loadOpenedFiles();
|
||||
}, [openedFilePaths, openedFileLoading, addFiles]);
|
||||
}
|
||||
|
||||
61
frontend/src/desktop/hooks/useDefaultApp.ts
Normal file
61
frontend/src/desktop/hooks/useDefaultApp.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { defaultAppService } from '@app/services/defaultAppService';
|
||||
import { alert } from '@app/components/toast';
|
||||
|
||||
export const useDefaultApp = () => {
|
||||
const { t } = useTranslation();
|
||||
const [isDefault, setIsDefault] = useState<boolean | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
checkDefaultStatus();
|
||||
}, []);
|
||||
|
||||
const checkDefaultStatus = async () => {
|
||||
try {
|
||||
const status = await defaultAppService.isDefaultPdfHandler();
|
||||
setIsDefault(status);
|
||||
} catch (error) {
|
||||
console.error('Failed to check default status:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSetDefault = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const result = await defaultAppService.setAsDefaultPdfHandler();
|
||||
|
||||
if (result === 'set_successfully') {
|
||||
alert({
|
||||
alertType: 'success',
|
||||
title: t('defaultApp.success.title', 'Default App Set'),
|
||||
body: t('defaultApp.success.message', 'Stirling PDF is now your default PDF editor'),
|
||||
});
|
||||
setIsDefault(true);
|
||||
} else if (result === 'opened_settings') {
|
||||
alert({
|
||||
alertType: 'neutral',
|
||||
title: t('defaultApp.settingsOpened.title', 'Settings Opened'),
|
||||
body: t('defaultApp.settingsOpened.message', 'Please select Stirling PDF in your system settings'),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to set default:', error);
|
||||
alert({
|
||||
alertType: 'error',
|
||||
title: t('defaultApp.error.title', 'Error'),
|
||||
body: t('defaultApp.error.message', 'Failed to set default PDF handler'),
|
||||
});
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
isDefault,
|
||||
isLoading,
|
||||
checkDefaultStatus,
|
||||
handleSetDefault,
|
||||
};
|
||||
};
|
||||
@ -1,45 +1,46 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { fileOpenService } from '@app/services/fileOpenService';
|
||||
import { listen } from '@tauri-apps/api/event';
|
||||
|
||||
export function useOpenedFile() {
|
||||
const [openedFilePaths, setOpenedFilePaths] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
const checkForOpenedFile = async () => {
|
||||
console.log('🔍 Checking for opened file(s)...');
|
||||
// Function to read and process files from storage
|
||||
const readFilesFromStorage = async () => {
|
||||
console.log('🔍 Reading files from storage...');
|
||||
try {
|
||||
const filePaths = await fileOpenService.getOpenedFiles();
|
||||
console.log('🔍 fileOpenService.getOpenedFiles() returned:', filePaths);
|
||||
|
||||
if (filePaths.length > 0) {
|
||||
console.log(`✅ App opened with ${filePaths.length} file(s):`, filePaths);
|
||||
console.log(`✅ Found ${filePaths.length} file(s) in storage:`, filePaths);
|
||||
setOpenedFilePaths(filePaths);
|
||||
|
||||
// Clear the files from service state after consuming them
|
||||
await fileOpenService.clearOpenedFiles();
|
||||
} else {
|
||||
console.log('ℹ️ No files were opened with the app');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to check for opened files:', error);
|
||||
console.error('❌ Failed to read files from storage:', error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
checkForOpenedFile();
|
||||
// Read files on mount
|
||||
readFilesFromStorage();
|
||||
|
||||
// Listen for runtime file open events (abstracted through service)
|
||||
const unlistenRuntimeEvents = fileOpenService.onFileOpened((filePath: string) => {
|
||||
console.log('📂 Runtime file open event:', filePath);
|
||||
setOpenedFilePaths(prev => [...prev, filePath]);
|
||||
// Listen for files-changed events (when new files are added to storage)
|
||||
let unlisten: (() => void) | undefined;
|
||||
listen('files-changed', async () => {
|
||||
console.log('📂 files-changed event received, re-reading storage...');
|
||||
await readFilesFromStorage();
|
||||
}).then(unlistenFn => {
|
||||
unlisten = unlistenFn;
|
||||
});
|
||||
|
||||
// Cleanup function
|
||||
return () => {
|
||||
unlistenRuntimeEvents();
|
||||
if (unlisten) unlisten();
|
||||
};
|
||||
}, []);
|
||||
|
||||
|
||||
70
frontend/src/desktop/services/defaultAppService.ts
Normal file
70
frontend/src/desktop/services/defaultAppService.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
/**
|
||||
* Service for managing default PDF handler settings
|
||||
* Note: Uses localStorage for machine-specific preferences (not synced to server)
|
||||
*/
|
||||
export const defaultAppService = {
|
||||
/**
|
||||
* Check if Stirling PDF is the default PDF handler
|
||||
*/
|
||||
async isDefaultPdfHandler(): Promise<boolean> {
|
||||
try {
|
||||
const result = await invoke<boolean>('is_default_pdf_handler');
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('[DefaultApp] Failed to check default handler:', error);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Set or prompt to set Stirling PDF as default PDF handler
|
||||
* Returns a status string indicating what happened
|
||||
*/
|
||||
async setAsDefaultPdfHandler(): Promise<'set_successfully' | 'opened_settings' | 'error'> {
|
||||
try {
|
||||
const result = await invoke<string>('set_as_default_pdf_handler');
|
||||
return result as 'set_successfully' | 'opened_settings';
|
||||
} catch (error) {
|
||||
console.error('[DefaultApp] Failed to set default handler:', error);
|
||||
return 'error';
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if user has dismissed the default app prompt (machine-specific)
|
||||
*/
|
||||
hasUserDismissedPrompt(): boolean {
|
||||
try {
|
||||
const dismissed = localStorage.getItem('stirlingpdf_default_app_prompt_dismissed');
|
||||
return dismissed === 'true';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Mark that user has dismissed the default app prompt (machine-specific)
|
||||
*/
|
||||
setPromptDismissed(dismissed: boolean): void {
|
||||
try {
|
||||
localStorage.setItem('stirlingpdf_default_app_prompt_dismissed', dismissed ? 'true' : 'false');
|
||||
} catch (error) {
|
||||
console.error('[DefaultApp] Failed to save prompt preference:', error);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if we should show the default app prompt
|
||||
* Returns true if: user hasn't dismissed it AND app is not default handler
|
||||
*/
|
||||
async shouldShowPrompt(): Promise<boolean> {
|
||||
if (this.hasUserDismissedPrompt()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isDefault = await this.isDefaultPdfHandler();
|
||||
return !isDefault;
|
||||
},
|
||||
};
|
||||
@ -1,6 +1,7 @@
|
||||
import { Suspense } from "react";
|
||||
import { Routes, Route } from "react-router-dom";
|
||||
import { AppProviders } from "@app/components/AppProviders";
|
||||
import { AppLayout } from "@app/components/AppLayout";
|
||||
import { LoadingFallback } from "@app/components/shared/LoadingFallback";
|
||||
import Landing from "@app/routes/Landing";
|
||||
import Login from "@app/routes/Login";
|
||||
@ -22,17 +23,19 @@ export default function App() {
|
||||
return (
|
||||
<Suspense fallback={<LoadingFallback />}>
|
||||
<AppProviders>
|
||||
<Routes>
|
||||
{/* Auth routes - no nested providers needed */}
|
||||
<Route path="/login" element={<Login />} />
|
||||
<Route path="/signup" element={<Signup />} />
|
||||
<Route path="/auth/callback" element={<AuthCallback />} />
|
||||
<Route path="/invite/:token" element={<InviteAccept />} />
|
||||
<AppLayout>
|
||||
<Routes>
|
||||
{/* Auth routes - no nested providers needed */}
|
||||
<Route path="/login" element={<Login />} />
|
||||
<Route path="/signup" element={<Signup />} />
|
||||
<Route path="/auth/callback" element={<AuthCallback />} />
|
||||
<Route path="/invite/:token" element={<InviteAccept />} />
|
||||
|
||||
{/* Main app routes - Landing handles auth logic */}
|
||||
<Route path="/*" element={<Landing />} />
|
||||
</Routes>
|
||||
<OnboardingTour />
|
||||
{/* Main app routes - Landing handles auth logic */}
|
||||
<Route path="/*" element={<Landing />} />
|
||||
</Routes>
|
||||
<OnboardingTour />
|
||||
</AppLayout>
|
||||
</AppProviders>
|
||||
</Suspense>
|
||||
);
|
||||
|
||||
@ -107,7 +107,7 @@ class SpringAuthClient {
|
||||
for (const cookie of cookies) {
|
||||
const [name, value] = cookie.trim().split('=');
|
||||
if (name === 'XSRF-TOKEN') {
|
||||
return value;
|
||||
return decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
@ -278,7 +278,7 @@ class SpringAuthClient {
|
||||
try {
|
||||
const response = await apiClient.post('/api/v1/auth/logout', null, {
|
||||
headers: {
|
||||
'X-CSRF-TOKEN': this.getCsrfToken() || '',
|
||||
'X-XSRF-TOKEN': this.getCsrfToken() || '',
|
||||
},
|
||||
withCredentials: true,
|
||||
});
|
||||
@ -311,7 +311,7 @@ class SpringAuthClient {
|
||||
try {
|
||||
const response = await apiClient.post('/api/v1/auth/refresh', null, {
|
||||
headers: {
|
||||
'X-CSRF-TOKEN': this.getCsrfToken() || '',
|
||||
'X-XSRF-TOKEN': this.getCsrfToken() || '',
|
||||
},
|
||||
withCredentials: true,
|
||||
});
|
||||
|
||||
@ -9,17 +9,38 @@ function getJwtTokenFromStorage(): string | null {
|
||||
}
|
||||
}
|
||||
|
||||
function getXsrfToken(): string | null {
|
||||
try {
|
||||
const cookies = document.cookie.split(';');
|
||||
for (const cookie of cookies) {
|
||||
const [name, value] = cookie.trim().split('=');
|
||||
if (name === 'XSRF-TOKEN') {
|
||||
return decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
console.error('[API Client] Failed to read XSRF token from cookies:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function setupApiInterceptors(client: AxiosInstance): void {
|
||||
// Install request interceptor to add JWT token
|
||||
client.interceptors.request.use(
|
||||
(config) => {
|
||||
const jwtToken = getJwtTokenFromStorage();
|
||||
const xsrfToken = getXsrfToken();
|
||||
|
||||
if (jwtToken && !config.headers.Authorization) {
|
||||
config.headers.Authorization = `Bearer ${jwtToken}`;
|
||||
console.debug('[API Client] Added JWT token from localStorage to Authorization header');
|
||||
}
|
||||
|
||||
if (xsrfToken && !config.headers['X-XSRF-TOKEN']) {
|
||||
config.headers['X-XSRF-TOKEN'] = xsrfToken;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
(error) => {
|
||||
|
||||
@ -2,6 +2,43 @@
|
||||
|
||||
This directory contains Python scripts for managing frontend translations in Stirling PDF. These tools help analyze, merge, validate, and manage translations against the en-GB golden truth file.
|
||||
|
||||
## Quick Start - Automated Translation (RECOMMENDED)
|
||||
|
||||
The **fastest and easiest way** to translate a language is using the automated pipeline:
|
||||
|
||||
```bash
|
||||
# Set your OpenAI API key
|
||||
export OPENAI_API_KEY=your_openai_api_key_here
|
||||
|
||||
# Translate a language automatically (extract → translate → merge → beautify → verify)
|
||||
python3 scripts/translations/auto_translate.py es-ES
|
||||
|
||||
# With custom batch size (default: 500 entries per batch)
|
||||
python3 scripts/translations/auto_translate.py es-ES --batch-size 600
|
||||
|
||||
# Keep temporary files for inspection
|
||||
python3 scripts/translations/auto_translate.py es-ES --no-cleanup
|
||||
```
|
||||
|
||||
**What it does:**
|
||||
1. Extracts untranslated entries from the language file
|
||||
2. Splits into batches (default 500 entries each)
|
||||
3. Translates each batch using GPT-5 with specialized prompts
|
||||
4. Validates placeholders are preserved
|
||||
5. Merges translated batches
|
||||
6. Applies translations to language file
|
||||
7. Beautifies structure to match en-GB
|
||||
8. Cleans up temporary files
|
||||
9. Reports final completion percentage
|
||||
|
||||
**Time:** ~8-10 minutes per language with 1200+ untranslated entries
|
||||
|
||||
**Cost:** ~$2-4 per language using GPT-5 (or use `gpt-5-mini` for lower cost)
|
||||
|
||||
See [`auto_translate.py`](#auto_translatepy-automated-translation-pipeline) for full details.
|
||||
|
||||
---
|
||||
|
||||
## Scripts Overview
|
||||
|
||||
### 0. Validation Scripts (Run First!)
|
||||
@ -191,7 +228,97 @@ python scripts/translations/compact_translator.py it-IT --output to_translate.js
|
||||
- Batch size control for manageable chunks
|
||||
- 50-80% fewer characters than other extraction methods
|
||||
|
||||
### 5. `json_beautifier.py`
|
||||
### 5. `auto_translate.py` - Automated Translation Pipeline
|
||||
|
||||
**NEW: Fully automated translation workflow using GPT-5.**
|
||||
|
||||
Combines all translation steps into a single command that handles everything from extraction to verification.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Basic usage (requires OPENAI_API_KEY environment variable)
|
||||
export OPENAI_API_KEY=your_api_key
|
||||
python3 scripts/translations/auto_translate.py es-ES
|
||||
|
||||
# With inline API key
|
||||
python3 scripts/translations/auto_translate.py es-ES --api-key YOUR_KEY
|
||||
|
||||
# Custom batch size (default: 500 entries)
|
||||
python3 scripts/translations/auto_translate.py es-ES --batch-size 600
|
||||
|
||||
# Custom timeout per batch (default: 600 seconds / 10 minutes)
|
||||
python3 scripts/translations/auto_translate.py es-ES --timeout 900
|
||||
|
||||
# Keep temporary files for debugging
|
||||
python3 scripts/translations/auto_translate.py es-ES --no-cleanup
|
||||
|
||||
# Skip final verification
|
||||
python3 scripts/translations/auto_translate.py es-ES --skip-verification
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Fully automated end-to-end translation pipeline
|
||||
- Uses GPT-5 with specialized prompts for Stirling PDF
|
||||
- Preserves all placeholders ({n}, {{variable}}, etc.)
|
||||
- Maintains consistent terminology
|
||||
- Validates translations automatically
|
||||
- Creates backups before modifying files
|
||||
- Reports detailed progress and final completion %
|
||||
|
||||
**Pipeline Steps:**
|
||||
1. **Extract**: Finds all untranslated entries
|
||||
2. **Split**: Divides into manageable batches (default: 500 entries)
|
||||
3. **Translate**: Uses GPT-5 to translate each batch with specialized prompts
|
||||
4. **Validate**: Ensures placeholders are preserved
|
||||
5. **Merge**: Combines all translated batches
|
||||
6. **Apply**: Updates the language file
|
||||
7. **Beautify**: Restructures to match en-GB format
|
||||
8. **Cleanup**: Removes temporary files
|
||||
9. **Verify**: Reports final completion percentage
|
||||
|
||||
**Translation Quality:**
|
||||
- Preserves ALL placeholders exactly as-is
|
||||
- Keeps HTML tags intact (<strong>, <br>, etc.)
|
||||
- Doesn't translate technical terms (PDF, API, OAuth2, etc.)
|
||||
- Maintains consistent terminology throughout
|
||||
- Uses appropriate formal/informal tone per language
|
||||
|
||||
**Supported Languages:**
|
||||
All language codes from `frontend/public/locales/` (e.g., es-ES, de-DE, fr-FR, zh-CN, ar-AR, etc.)
|
||||
|
||||
### 6. `batch_translator.py` - GPT-5 Translation Engine
|
||||
|
||||
Low-level translation script used by `auto_translate.py`. Can be used standalone for manual batch translation.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Translate single batch file
|
||||
python3 scripts/translations/batch_translator.py my_batch.json --language es-ES --api-key YOUR_KEY
|
||||
|
||||
# Translate multiple batches
|
||||
python3 scripts/translations/batch_translator.py batch_*.json --language de-DE --api-key YOUR_KEY
|
||||
|
||||
# Use different GPT model
|
||||
python3 scripts/translations/batch_translator.py batch.json --language fr-FR --model gpt-5-mini
|
||||
|
||||
# Skip validation
|
||||
python3 scripts/translations/batch_translator.py batch.json --language it-IT --skip-validation
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Translates JSON batch files using OpenAI GPT-5
|
||||
- Specialized system prompts for Stirling PDF translations
|
||||
- Automatic placeholder validation
|
||||
- Supports pattern matching for multiple files
|
||||
- Configurable model selection (gpt-5, gpt-5-mini, gpt-5-nano)
|
||||
- Rate limiting with configurable delays
|
||||
|
||||
**Models:**
|
||||
- `gpt-5` (default): Best quality, $1.25/1M input, $10/1M output
|
||||
- `gpt-5-mini`: Balanced quality/cost
|
||||
- `gpt-5-nano`: Fastest, most economical
|
||||
|
||||
### 7. `json_beautifier.py`
|
||||
Restructures and beautifies translation JSON files to match en-GB structure exactly.
|
||||
|
||||
**Usage:**
|
||||
|
||||
324
scripts/translations/auto_translate.py
Normal file
324
scripts/translations/auto_translate.py
Normal file
@ -0,0 +1,324 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Automated Translation Pipeline
|
||||
Extracts, translates, merges, and beautifies translations for a language.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import time
|
||||
|
||||
|
||||
def run_command(cmd, description=""):
|
||||
"""Run a shell command and return success status."""
|
||||
if description:
|
||||
print(f"\n{'='*60}")
|
||||
print(f"Step: {description}")
|
||||
print(f"{'='*60}")
|
||||
|
||||
result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
|
||||
|
||||
if result.stdout:
|
||||
print(result.stdout)
|
||||
if result.stderr:
|
||||
print(result.stderr, file=sys.stderr)
|
||||
|
||||
return result.returncode == 0
|
||||
|
||||
|
||||
def extract_untranslated(language_code, batch_size=500):
|
||||
"""Extract untranslated entries and split into batches."""
|
||||
print(f"\n🔍 Extracting untranslated entries for {language_code}...")
|
||||
|
||||
# Load files
|
||||
golden_path = Path(f'frontend/public/locales/en-GB/translation.json')
|
||||
lang_path = Path(f'frontend/public/locales/{language_code}/translation.json')
|
||||
|
||||
if not golden_path.exists():
|
||||
print(f"Error: Golden truth file not found: {golden_path}")
|
||||
return None
|
||||
|
||||
if not lang_path.exists():
|
||||
print(f"Error: Language file not found: {lang_path}")
|
||||
return None
|
||||
|
||||
def load_json(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def flatten_dict(d, parent_key='', separator='.'):
|
||||
items = []
|
||||
for k, v in d.items():
|
||||
new_key = f"{parent_key}{separator}{k}" if parent_key else k
|
||||
if isinstance(v, dict):
|
||||
items.extend(flatten_dict(v, new_key, separator).items())
|
||||
else:
|
||||
items.append((new_key, str(v)))
|
||||
return dict(items)
|
||||
|
||||
golden = load_json(golden_path)
|
||||
lang_data = load_json(lang_path)
|
||||
|
||||
golden_flat = flatten_dict(golden)
|
||||
lang_flat = flatten_dict(lang_data)
|
||||
|
||||
# Find untranslated
|
||||
untranslated = {}
|
||||
for key, value in golden_flat.items():
|
||||
if (key not in lang_flat or
|
||||
lang_flat.get(key) == value or
|
||||
(isinstance(lang_flat.get(key), str) and lang_flat.get(key).startswith("[UNTRANSLATED]"))):
|
||||
untranslated[key] = value
|
||||
|
||||
total = len(untranslated)
|
||||
print(f"Found {total} untranslated entries")
|
||||
|
||||
if total == 0:
|
||||
print("✓ Language is already complete!")
|
||||
return []
|
||||
|
||||
# Split into batches
|
||||
entries = list(untranslated.items())
|
||||
num_batches = (total + batch_size - 1) // batch_size
|
||||
|
||||
batch_files = []
|
||||
lang_code_safe = language_code.replace('-', '_')
|
||||
|
||||
for i in range(num_batches):
|
||||
start = i * batch_size
|
||||
end = min((i + 1) * batch_size, total)
|
||||
batch = dict(entries[start:end])
|
||||
|
||||
filename = f'{lang_code_safe}_batch_{i+1}_of_{num_batches}.json'
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
json.dump(batch, f, ensure_ascii=False, separators=(',', ':'))
|
||||
|
||||
batch_files.append(filename)
|
||||
print(f" Created {filename} with {len(batch)} entries")
|
||||
|
||||
return batch_files
|
||||
|
||||
|
||||
def translate_batches(batch_files, language_code, api_key, timeout=600):
|
||||
"""Translate all batch files using GPT-5."""
|
||||
if not batch_files:
|
||||
return []
|
||||
|
||||
print(f"\n🤖 Translating {len(batch_files)} batches using GPT-5...")
|
||||
print(f"Timeout: {timeout}s ({timeout//60} minutes) per batch")
|
||||
|
||||
translated_files = []
|
||||
|
||||
for i, batch_file in enumerate(batch_files, 1):
|
||||
print(f"\n[{i}/{len(batch_files)}] Translating {batch_file}...")
|
||||
|
||||
# Always pass API key since it's required
|
||||
cmd = f'python3 scripts/translations/batch_translator.py "{batch_file}" --language {language_code} --api-key "{api_key}"'
|
||||
|
||||
# Run with timeout
|
||||
result = subprocess.run(cmd, shell=True, capture_output=True, text=True, timeout=timeout)
|
||||
|
||||
if result.stdout:
|
||||
print(result.stdout)
|
||||
if result.stderr:
|
||||
print(result.stderr, file=sys.stderr)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"✗ Failed to translate {batch_file}")
|
||||
return None
|
||||
|
||||
translated_file = batch_file.replace('.json', '_translated.json')
|
||||
translated_files.append(translated_file)
|
||||
|
||||
# Small delay between batches
|
||||
if i < len(batch_files):
|
||||
time.sleep(1)
|
||||
|
||||
print(f"\n✓ All {len(batch_files)} batches translated successfully")
|
||||
return translated_files
|
||||
|
||||
|
||||
def merge_translations(translated_files, language_code):
|
||||
"""Merge all translated batch files."""
|
||||
if not translated_files:
|
||||
return None
|
||||
|
||||
print(f"\n🔗 Merging {len(translated_files)} translated batches...")
|
||||
|
||||
merged = {}
|
||||
for filename in translated_files:
|
||||
if not Path(filename).exists():
|
||||
print(f"Error: Translated file not found: {filename}")
|
||||
return None
|
||||
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
merged.update(json.load(f))
|
||||
|
||||
lang_code_safe = language_code.replace('-', '_')
|
||||
merged_file = f'{lang_code_safe}_merged.json'
|
||||
|
||||
with open(merged_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(merged, f, ensure_ascii=False, separators=(',', ':'))
|
||||
|
||||
print(f"✓ Merged {len(merged)} translations into {merged_file}")
|
||||
return merged_file
|
||||
|
||||
|
||||
def apply_translations(merged_file, language_code):
|
||||
"""Apply merged translations to the language file."""
|
||||
print(f"\n📝 Applying translations to {language_code}...")
|
||||
|
||||
cmd = f'python3 scripts/translations/translation_merger.py {language_code} apply-translations --translations-file {merged_file}'
|
||||
|
||||
if not run_command(cmd):
|
||||
print(f"✗ Failed to apply translations")
|
||||
return False
|
||||
|
||||
print(f"✓ Translations applied successfully")
|
||||
return True
|
||||
|
||||
|
||||
def beautify_translations(language_code):
|
||||
"""Beautify translation file to match en-GB structure."""
|
||||
print(f"\n✨ Beautifying {language_code} translation file...")
|
||||
|
||||
cmd = f'python3 scripts/translations/json_beautifier.py --language {language_code}'
|
||||
|
||||
if not run_command(cmd):
|
||||
print(f"✗ Failed to beautify translations")
|
||||
return False
|
||||
|
||||
print(f"✓ Translation file beautified")
|
||||
return True
|
||||
|
||||
|
||||
def cleanup_temp_files(language_code):
|
||||
"""Remove temporary batch files."""
|
||||
print(f"\n🧹 Cleaning up temporary files...")
|
||||
|
||||
lang_code_safe = language_code.replace('-', '_')
|
||||
patterns = [
|
||||
f'{lang_code_safe}_batch_*.json',
|
||||
f'{lang_code_safe}_merged.json'
|
||||
]
|
||||
|
||||
import glob
|
||||
removed = 0
|
||||
for pattern in patterns:
|
||||
for file in glob.glob(pattern):
|
||||
Path(file).unlink()
|
||||
removed += 1
|
||||
|
||||
print(f"✓ Removed {removed} temporary files")
|
||||
|
||||
|
||||
def verify_completion(language_code):
|
||||
"""Check final completion percentage."""
|
||||
print(f"\n📊 Verifying completion...")
|
||||
|
||||
cmd = f'python3 scripts/translations/translation_analyzer.py --language {language_code} --summary'
|
||||
run_command(cmd)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Automated translation pipeline for Stirling PDF',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Translate Spanish with API key in environment
|
||||
export OPENAI_API_KEY=your_key_here
|
||||
python3 scripts/translations/auto_translate.py es-ES
|
||||
|
||||
# Translate German with inline API key
|
||||
python3 scripts/translations/auto_translate.py de-DE --api-key YOUR_KEY
|
||||
|
||||
# Translate Italian with custom batch size
|
||||
python3 scripts/translations/auto_translate.py it-IT --batch-size 600
|
||||
|
||||
# Skip cleanup (keep temporary files for inspection)
|
||||
python3 scripts/translations/auto_translate.py fr-FR --no-cleanup
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument('language', help='Language code (e.g., es-ES, de-DE, zh-CN)')
|
||||
parser.add_argument('--api-key', help='OpenAI API key (or set OPENAI_API_KEY env var)')
|
||||
parser.add_argument('--batch-size', type=int, default=500, help='Entries per batch (default: 500)')
|
||||
parser.add_argument('--no-cleanup', action='store_true', help='Keep temporary batch files')
|
||||
parser.add_argument('--skip-verification', action='store_true', help='Skip final completion check')
|
||||
parser.add_argument('--timeout', type=int, default=600, help='Timeout per batch in seconds (default: 600 = 10 minutes)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Verify API key
|
||||
api_key = args.api_key or os.environ.get('OPENAI_API_KEY')
|
||||
if not api_key:
|
||||
print("Error: OpenAI API key required. Provide via --api-key or OPENAI_API_KEY environment variable")
|
||||
sys.exit(1)
|
||||
|
||||
print("="*60)
|
||||
print(f"Automated Translation Pipeline")
|
||||
print(f"Language: {args.language}")
|
||||
print(f"Batch Size: {args.batch_size} entries")
|
||||
print("="*60)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Step 1: Extract and split
|
||||
batch_files = extract_untranslated(args.language, args.batch_size)
|
||||
if batch_files is None:
|
||||
sys.exit(1)
|
||||
|
||||
if len(batch_files) == 0:
|
||||
print("\n✓ Nothing to translate!")
|
||||
sys.exit(0)
|
||||
|
||||
# Step 2: Translate all batches
|
||||
translated_files = translate_batches(batch_files, args.language, api_key, args.timeout)
|
||||
if translated_files is None:
|
||||
sys.exit(1)
|
||||
|
||||
# Step 3: Merge translations
|
||||
merged_file = merge_translations(translated_files, args.language)
|
||||
if merged_file is None:
|
||||
sys.exit(1)
|
||||
|
||||
# Step 4: Apply translations
|
||||
if not apply_translations(merged_file, args.language):
|
||||
sys.exit(1)
|
||||
|
||||
# Step 5: Beautify
|
||||
if not beautify_translations(args.language):
|
||||
sys.exit(1)
|
||||
|
||||
# Step 6: Cleanup
|
||||
if not args.no_cleanup:
|
||||
cleanup_temp_files(args.language)
|
||||
|
||||
# Step 7: Verify
|
||||
if not args.skip_verification:
|
||||
verify_completion(args.language)
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
print("\n" + "="*60)
|
||||
print(f"✅ Translation pipeline completed successfully!")
|
||||
print(f"Time elapsed: {elapsed:.1f} seconds")
|
||||
print("="*60)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\n\n⚠ Translation interrupted by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"\n\n✗ Error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
321
scripts/translations/batch_translator.py
Normal file
321
scripts/translations/batch_translator.py
Normal file
@ -0,0 +1,321 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Batch Translation Script using OpenAI API
|
||||
Automatically translates JSON batch files to target language while preserving:
|
||||
- Placeholders: {n}, {total}, {filename}, {{variable}}
|
||||
- HTML tags: <strong>, </strong>, etc.
|
||||
- Technical terms: PDF, API, OAuth2, SAML2, JWT, etc.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import time
|
||||
|
||||
try:
|
||||
from openai import OpenAI
|
||||
except ImportError:
|
||||
print("Error: openai package not installed. Install with: pip install openai")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class BatchTranslator:
|
||||
def __init__(self, api_key: str, model: str = "gpt-5"):
|
||||
"""Initialize translator with OpenAI API key."""
|
||||
self.client = OpenAI(api_key=api_key)
|
||||
self.model = model
|
||||
|
||||
def get_translation_prompt(self, language_name: str, language_code: str) -> str:
|
||||
"""Generate the system prompt for translation."""
|
||||
return f"""You are a professional translator for Stirling PDF, an open-source PDF manipulation tool.
|
||||
|
||||
Translate the following JSON from English to {language_name} ({language_code}) for the Stirling PDF user interface.
|
||||
|
||||
CRITICAL RULES - MUST FOLLOW EXACTLY:
|
||||
|
||||
1. PRESERVE ALL PLACEHOLDERS EXACTLY AS-IS:
|
||||
- Single braces: {{{{n}}}}, {{{{total}}}}, {{{{filename}}}}, {{{{count}}}}, {{{{date}}}}, {{{{planName}}}}, {{{{toolName}}}}, {{{{variable}}}}
|
||||
- Double braces: {{{{{{{{variable}}}}}}}}
|
||||
- Never translate, modify, or remove these - they are template variables
|
||||
|
||||
2. KEEP ALL HTML TAGS INTACT:
|
||||
- <strong>, </strong>, <br>, <code>, </code>, etc.
|
||||
- Do not translate tag names, only text between tags
|
||||
|
||||
3. DO NOT TRANSLATE TECHNICAL TERMS:
|
||||
- File formats: PDF, JSON, CSV, XML, HTML, ZIP, DOCX, XLSX, PNG, JPG
|
||||
- Protocols: API, OAuth2, SAML2, JWT, SMTP, HTTP, HTTPS, SSL, TLS
|
||||
- Technologies: Git, GitHub, Google, PostHog, Scarf, LibreOffice, Ghostscript, Tesseract, OCR
|
||||
- Technical keywords: URL, URI, DPI, RGB, CMYK, QR
|
||||
- "Stirling PDF" - always keep as-is
|
||||
|
||||
4. MAINTAIN CONSISTENT TERMINOLOGY:
|
||||
- Use the SAME translation for repeated terms throughout
|
||||
- Do not introduce new terminology or synonyms
|
||||
- Keep UI action words consistent (e.g., "upload", "download", "compress")
|
||||
|
||||
5. PRESERVE SPECIAL KEYWORDS IN CONTEXT:
|
||||
- Mathematical expressions: "2n", "2n-1", "3n" (in page selection)
|
||||
- Special keywords: "all", "odd", "even" (in page contexts)
|
||||
- Code examples and technical patterns
|
||||
|
||||
6. JSON STRUCTURE:
|
||||
- Translate ONLY the values (text after :), NEVER the keys
|
||||
- Return ONLY valid JSON with exact same structure
|
||||
- Maintain all quotes, commas, and braces
|
||||
|
||||
7. TONE & STYLE:
|
||||
- Use appropriate formal/informal tone for {language_name} UI
|
||||
- Keep translations concise and user-friendly
|
||||
- Maintain the professional but accessible tone of the original
|
||||
|
||||
8. DO NOT ADD OR REMOVE TEXT:
|
||||
- Do not add explanations, comments, or extra text
|
||||
- Do not remove any part of the original meaning
|
||||
- Keep the same level of detail
|
||||
|
||||
Return ONLY the translated JSON. No markdown, no explanations, just the JSON object."""
|
||||
|
||||
def translate_batch(self, batch_data: dict, target_language: str, language_code: str) -> dict:
|
||||
"""Translate a batch file using OpenAI API."""
|
||||
# Convert batch to compact JSON for API
|
||||
input_json = json.dumps(batch_data, ensure_ascii=False, separators=(',', ':'))
|
||||
|
||||
print(f"Translating {len(batch_data)} entries to {target_language}...")
|
||||
print(f"Input size: {len(input_json)} characters")
|
||||
|
||||
try:
|
||||
# GPT-5 only supports temperature=1, so we don't include it
|
||||
response = self.client.chat.completions.create(
|
||||
model=self.model,
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": self.get_translation_prompt(target_language, language_code)
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Translate this JSON:\n\n{input_json}"
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
translated_text = response.choices[0].message.content.strip()
|
||||
|
||||
# Remove markdown code blocks if present
|
||||
if translated_text.startswith("```"):
|
||||
lines = translated_text.split('\n')
|
||||
translated_text = '\n'.join(lines[1:-1])
|
||||
|
||||
# Parse the translated JSON
|
||||
translated_data = json.loads(translated_text)
|
||||
|
||||
print(f"✓ Translation complete")
|
||||
return translated_data
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error: AI returned invalid JSON: {e}")
|
||||
print(f"Response: {translated_text[:500]}...")
|
||||
raise
|
||||
except Exception as e:
|
||||
print(f"Error during translation: {e}")
|
||||
raise
|
||||
|
||||
def validate_translation(self, original: dict, translated: dict) -> bool:
|
||||
"""Validate that translation preserved all placeholders and structure."""
|
||||
issues = []
|
||||
|
||||
# Check that all keys are present
|
||||
if set(original.keys()) != set(translated.keys()):
|
||||
missing = set(original.keys()) - set(translated.keys())
|
||||
extra = set(translated.keys()) - set(original.keys())
|
||||
if missing:
|
||||
issues.append(f"Missing keys: {missing}")
|
||||
if extra:
|
||||
issues.append(f"Extra keys: {extra}")
|
||||
|
||||
# Check placeholders in each value
|
||||
import re
|
||||
placeholder_pattern = r'\{[^}]+\}|\{\{[^}]+\}\}'
|
||||
|
||||
for key in original.keys():
|
||||
if key not in translated:
|
||||
continue
|
||||
|
||||
orig_value = str(original[key])
|
||||
trans_value = str(translated[key])
|
||||
|
||||
# Find all placeholders in original
|
||||
orig_placeholders = set(re.findall(placeholder_pattern, orig_value))
|
||||
trans_placeholders = set(re.findall(placeholder_pattern, trans_value))
|
||||
|
||||
if orig_placeholders != trans_placeholders:
|
||||
issues.append(f"Placeholder mismatch in '{key}': {orig_placeholders} vs {trans_placeholders}")
|
||||
|
||||
if issues:
|
||||
print("\n⚠ Validation warnings:")
|
||||
for issue in issues[:10]: # Show first 10 issues
|
||||
print(f" - {issue}")
|
||||
if len(issues) > 10:
|
||||
print(f" ... and {len(issues) - 10} more issues")
|
||||
return False
|
||||
|
||||
print("✓ Validation passed")
|
||||
return True
|
||||
|
||||
|
||||
def get_language_info(language_code: str) -> tuple:
|
||||
"""Get full language name from code."""
|
||||
languages = {
|
||||
'zh-CN': ('Simplified Chinese', 'zh-CN'),
|
||||
'es-ES': ('Spanish', 'es-ES'),
|
||||
'it-IT': ('Italian', 'it-IT'),
|
||||
'de-DE': ('German', 'de-DE'),
|
||||
'ar-AR': ('Arabic', 'ar-AR'),
|
||||
'pt-BR': ('Brazilian Portuguese', 'pt-BR'),
|
||||
'ru-RU': ('Russian', 'ru-RU'),
|
||||
'fr-FR': ('French', 'fr-FR'),
|
||||
'ja-JP': ('Japanese', 'ja-JP'),
|
||||
'ko-KR': ('Korean', 'ko-KR'),
|
||||
'nl-NL': ('Dutch', 'nl-NL'),
|
||||
'pl-PL': ('Polish', 'pl-PL'),
|
||||
'sv-SE': ('Swedish', 'sv-SE'),
|
||||
'da-DK': ('Danish', 'da-DK'),
|
||||
'no-NB': ('Norwegian', 'no-NB'),
|
||||
'fi-FI': ('Finnish', 'fi-FI'),
|
||||
'tr-TR': ('Turkish', 'tr-TR'),
|
||||
'vi-VN': ('Vietnamese', 'vi-VN'),
|
||||
'th-TH': ('Thai', 'th-TH'),
|
||||
'id-ID': ('Indonesian', 'id-ID'),
|
||||
'hi-IN': ('Hindi', 'hi-IN'),
|
||||
'cs-CZ': ('Czech', 'cs-CZ'),
|
||||
'hu-HU': ('Hungarian', 'hu-HU'),
|
||||
'ro-RO': ('Romanian', 'ro-RO'),
|
||||
'uk-UA': ('Ukrainian', 'uk-UA'),
|
||||
'el-GR': ('Greek', 'el-GR'),
|
||||
'bg-BG': ('Bulgarian', 'bg-BG'),
|
||||
'hr-HR': ('Croatian', 'hr-HR'),
|
||||
'sk-SK': ('Slovak', 'sk-SK'),
|
||||
'sl-SI': ('Slovenian', 'sl-SI'),
|
||||
'ca-CA': ('Catalan', 'ca-CA'),
|
||||
}
|
||||
|
||||
return languages.get(language_code, (language_code, language_code))
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Translate JSON batch files using OpenAI API',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Translate single batch file
|
||||
python batch_translator.py zh_CN_batch_1_of_4.json --api-key YOUR_KEY --language zh-CN
|
||||
|
||||
# Translate all batches for a language (with pattern)
|
||||
python batch_translator.py "zh_CN_batch_*_of_*.json" --api-key YOUR_KEY --language zh-CN
|
||||
|
||||
# Use environment variable for API key
|
||||
export OPENAI_API_KEY=your_key_here
|
||||
python batch_translator.py zh_CN_batch_1_of_4.json --language zh-CN
|
||||
|
||||
# Use different model
|
||||
python batch_translator.py file.json --api-key KEY --language es-ES --model gpt-4-turbo
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument('input_files', nargs='+', help='Input batch JSON file(s) or pattern')
|
||||
parser.add_argument('--api-key', help='OpenAI API key (or set OPENAI_API_KEY env var)')
|
||||
parser.add_argument('--language', '-l', required=True, help='Target language code (e.g., zh-CN, es-ES)')
|
||||
parser.add_argument('--model', default='gpt-5', help='OpenAI model to use (default: gpt-5, options: gpt-5-mini, gpt-5-nano)')
|
||||
parser.add_argument('--output-suffix', default='_translated', help='Suffix for output files (default: _translated)')
|
||||
parser.add_argument('--skip-validation', action='store_true', help='Skip validation checks')
|
||||
parser.add_argument('--delay', type=float, default=1.0, help='Delay between API calls in seconds (default: 1.0)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get API key from args or environment
|
||||
import os
|
||||
api_key = args.api_key or os.environ.get('OPENAI_API_KEY')
|
||||
if not api_key:
|
||||
print("Error: OpenAI API key required. Provide via --api-key or OPENAI_API_KEY environment variable")
|
||||
sys.exit(1)
|
||||
|
||||
# Get language info
|
||||
language_name, language_code = get_language_info(args.language)
|
||||
|
||||
# Expand file patterns
|
||||
import glob
|
||||
input_files = []
|
||||
for pattern in args.input_files:
|
||||
matched = glob.glob(pattern)
|
||||
if matched:
|
||||
input_files.extend(matched)
|
||||
else:
|
||||
input_files.append(pattern) # Use as literal filename
|
||||
|
||||
if not input_files:
|
||||
print("Error: No input files found")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Batch Translator")
|
||||
print(f"Target Language: {language_name} ({language_code})")
|
||||
print(f"Model: {args.model}")
|
||||
print(f"Files to translate: {len(input_files)}")
|
||||
print("=" * 60)
|
||||
|
||||
# Initialize translator
|
||||
translator = BatchTranslator(api_key, args.model)
|
||||
|
||||
# Process each file
|
||||
successful = 0
|
||||
failed = 0
|
||||
|
||||
for i, input_file in enumerate(input_files, 1):
|
||||
print(f"\n[{i}/{len(input_files)}] Processing: {input_file}")
|
||||
|
||||
try:
|
||||
# Load input file
|
||||
with open(input_file, 'r', encoding='utf-8') as f:
|
||||
batch_data = json.load(f)
|
||||
|
||||
# Translate
|
||||
translated_data = translator.translate_batch(batch_data, language_name, language_code)
|
||||
|
||||
# Validate
|
||||
if not args.skip_validation:
|
||||
translator.validate_translation(batch_data, translated_data)
|
||||
|
||||
# Save output
|
||||
input_path = Path(input_file)
|
||||
output_file = input_path.stem + args.output_suffix + input_path.suffix
|
||||
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(translated_data, f, ensure_ascii=False, separators=(',', ':'))
|
||||
|
||||
print(f"✓ Saved to: {output_file}")
|
||||
successful += 1
|
||||
|
||||
# Delay between API calls to avoid rate limits
|
||||
if i < len(input_files):
|
||||
time.sleep(args.delay)
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Failed: {e}")
|
||||
failed += 1
|
||||
continue
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 60)
|
||||
print(f"Translation complete!")
|
||||
print(f"Successful: {successful}/{len(input_files)}")
|
||||
if failed > 0:
|
||||
print(f"Failed: {failed}/{len(input_files)}")
|
||||
|
||||
sys.exit(0 if failed == 0 else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
main()
|
||||
Loading…
Reference in New Issue
Block a user