Merge branch 'main' of https://github.com/Stirling-Tools/Stirling-PDF into feature/annotations

This commit is contained in:
Reece 2025-12-16 16:38:42 +00:00
commit 68f8bb749f
347 changed files with 18340 additions and 4931 deletions

View File

@ -1,403 +0,0 @@
"""
Author: Ludy87
Description: This script processes .properties files for localization checks. It compares translation files in a branch with
a reference file to ensure consistency. The script performs two main checks:
1. Verifies that the number of lines (including comments and empty lines) in the translation files matches the reference file.
2. Ensures that all keys in the translation files are present in the reference file and vice versa.
The script also provides functionality to update the translation files to match the reference file by adding missing keys and
adjusting the format.
Usage:
python check_language_properties.py --reference-file <path_to_reference_file> --branch <branch_name> [--actor <actor_name>] [--files <list_of_changed_files>]
"""
# Sample for Windows:
# python .github/scripts/check_language_properties.py --reference-file src\main\resources\messages_en_GB.properties --branch "" --files src\main\resources\messages_de_DE.properties src\main\resources\messages_uk_UA.properties
import copy
import glob
import os
import argparse
import re
def find_duplicate_keys(file_path):
"""
Identifies duplicate keys in a .properties file.
:param file_path: Path to the .properties file.
:return: List of tuples (key, first_occurrence_line, duplicate_line).
"""
keys = {}
duplicates = []
with open(file_path, "r", encoding="utf-8") as file:
for line_number, line in enumerate(file, start=1):
stripped_line = line.strip()
# Skip empty lines and comments
if not stripped_line or stripped_line.startswith("#"):
continue
# Split the line into key and value
if "=" in stripped_line:
key, _ = stripped_line.split("=", 1)
key = key.strip()
# Check if the key already exists
if key in keys:
duplicates.append((key, keys[key], line_number))
else:
keys[key] = line_number
return duplicates
# Maximum size for properties files (e.g., 200 KB)
MAX_FILE_SIZE = 200 * 1024
def parse_properties_file(file_path):
"""
Parses a .properties file and returns a structured list of its contents.
:param file_path: Path to the .properties file.
:return: List of dictionaries representing each line in the file.
"""
properties_list = []
with open(file_path, "r", encoding="utf-8") as file:
for line_number, line in enumerate(file, start=1):
stripped_line = line.strip()
# Handle empty lines
if not stripped_line:
properties_list.append(
{"line_number": line_number, "type": "empty", "content": ""}
)
continue
# Handle comments
if stripped_line.startswith("#"):
properties_list.append(
{
"line_number": line_number,
"type": "comment",
"content": stripped_line,
}
)
continue
# Handle key-value pairs
match = re.match(r"^([^=]+)=(.*)$", line)
if match:
key, value = match.groups()
properties_list.append(
{
"line_number": line_number,
"type": "entry",
"key": key.strip(),
"value": value.strip(),
}
)
return properties_list
def write_json_file(file_path, updated_properties):
"""
Writes updated properties back to the file in their original format.
:param file_path: Path to the .properties file.
:param updated_properties: List of updated properties to write.
"""
updated_lines = {entry["line_number"]: entry for entry in updated_properties}
# Sort lines by their numbers and retain comments and empty lines
all_lines = sorted(set(updated_lines.keys()))
original_format = []
for line in all_lines:
if line in updated_lines:
entry = updated_lines[line]
else:
entry = None
ref_entry = updated_lines[line]
if ref_entry["type"] in ["comment", "empty"]:
original_format.append(ref_entry)
elif entry is None:
# Add missing entries from the reference file
original_format.append(ref_entry)
elif entry["type"] == "entry":
# Replace entries with those from the current JSON
original_format.append(entry)
# Write the updated content back to the file
with open(file_path, "w", encoding="utf-8", newline="\n") as file:
for entry in original_format:
if entry["type"] == "comment":
file.write(f"{entry['content']}\n")
elif entry["type"] == "empty":
file.write(f"{entry['content']}\n")
elif entry["type"] == "entry":
file.write(f"{entry['key']}={entry['value']}\n")
def update_missing_keys(reference_file, file_list, branch=""):
"""
Updates missing keys in the translation files based on the reference file.
:param reference_file: Path to the reference .properties file.
:param file_list: List of translation files to update.
:param branch: Branch where the files are located.
"""
reference_properties = parse_properties_file(reference_file)
for file_path in file_list:
basename_current_file = os.path.basename(os.path.join(branch, file_path))
if (
basename_current_file == os.path.basename(reference_file)
or not file_path.endswith(".properties")
or not basename_current_file.startswith("messages_")
):
continue
current_properties = parse_properties_file(os.path.join(branch, file_path))
updated_properties = []
for ref_entry in reference_properties:
ref_entry_copy = copy.deepcopy(ref_entry)
for current_entry in current_properties:
if current_entry["type"] == "entry":
if ref_entry_copy["type"] != "entry":
continue
if ref_entry_copy["key"].lower() == current_entry["key"].lower():
ref_entry_copy["value"] = current_entry["value"]
updated_properties.append(ref_entry_copy)
write_json_file(os.path.join(branch, file_path), updated_properties)
def check_for_missing_keys(reference_file, file_list, branch):
update_missing_keys(reference_file, file_list, branch)
def read_properties(file_path):
if os.path.isfile(file_path) and os.path.exists(file_path):
with open(file_path, "r", encoding="utf-8") as file:
return file.read().splitlines()
return [""]
def check_for_differences(reference_file, file_list, branch, actor):
reference_branch = reference_file.split("/")[0]
basename_reference_file = os.path.basename(reference_file)
report = []
report.append(f"#### 🔄 Reference Branch: `{reference_branch}`")
reference_lines = read_properties(reference_file)
has_differences = False
only_reference_file = True
file_arr = file_list
if len(file_list) == 1:
file_arr = file_list[0].split()
base_dir = os.path.abspath(
os.path.join(os.getcwd(), "app", "core", "src", "main", "resources")
)
for file_path in file_arr:
file_normpath = os.path.normpath(file_path)
absolute_path = os.path.abspath(file_normpath)
# Verify that file is within the expected directory
if not absolute_path.startswith(base_dir):
raise ValueError(f"Unsafe file found: {file_normpath}")
# Verify file size before processing
if os.path.getsize(os.path.join(branch, file_normpath)) > MAX_FILE_SIZE:
raise ValueError(
f"The file {file_normpath} is too large and could pose a security risk."
)
basename_current_file = os.path.basename(os.path.join(branch, file_normpath))
if (
basename_current_file == basename_reference_file
or (
# only local windows command
not file_normpath.startswith(
os.path.join(
"", "app", "core", "src", "main", "resources", "messages_"
)
)
and not file_normpath.startswith(
os.path.join(
os.getcwd(),
"app",
"core",
"src",
"main",
"resources",
"messages_",
)
)
)
or not file_normpath.endswith(".properties")
or not basename_current_file.startswith("messages_")
):
continue
only_reference_file = False
report.append(f"#### 📃 **File Check:** `{basename_current_file}`")
current_lines = read_properties(os.path.join(branch, file_path))
reference_line_count = len(reference_lines)
current_line_count = len(current_lines)
if reference_line_count != current_line_count:
report.append("")
report.append("1. **Test Status:** ❌ **_Failed_**")
report.append(" - **Issue:**")
has_differences = True
if reference_line_count > current_line_count:
report.append(
f" - **_Mismatched line count_**: {reference_line_count} (reference) vs {current_line_count} (current). Comments, empty lines, or translation strings are missing."
)
elif reference_line_count < current_line_count:
report.append(
f" - **_Too many lines_**: {reference_line_count} (reference) vs {current_line_count} (current). Please verify if there is an additional line that needs to be removed."
)
else:
report.append("1. **Test Status:** ✅ **_Passed_**")
# Check for missing or extra keys
current_keys = []
reference_keys = []
for line in current_lines:
if not line.startswith("#") and line != "" and "=" in line:
key, _ = line.split("=", 1)
current_keys.append(key)
for line in reference_lines:
if not line.startswith("#") and line != "" and "=" in line:
key, _ = line.split("=", 1)
reference_keys.append(key)
current_keys_set = set(current_keys)
reference_keys_set = set(reference_keys)
missing_keys = current_keys_set.difference(reference_keys_set)
extra_keys = reference_keys_set.difference(current_keys_set)
missing_keys_list = list(missing_keys)
extra_keys_list = list(extra_keys)
if missing_keys_list or extra_keys_list:
has_differences = True
missing_keys_str = "`, `".join(missing_keys_list)
extra_keys_str = "`, `".join(extra_keys_list)
report.append("2. **Test Status:** ❌ **_Failed_**")
report.append(" - **Issue:**")
if missing_keys_list:
spaces_keys_list = []
for key in missing_keys_list:
if " " in key:
spaces_keys_list.append(key)
if spaces_keys_list:
spaces_keys_str = "`, `".join(spaces_keys_list)
report.append(
f" - **_Keys containing unnecessary spaces_**: `{spaces_keys_str}`!"
)
report.append(
f" - **_Extra keys in `{basename_current_file}`_**: `{missing_keys_str}` that are not present in **_`{basename_reference_file}`_**."
)
if extra_keys_list:
report.append(
f" - **_Missing keys in `{basename_reference_file}`_**: `{extra_keys_str}` that are not present in **_`{basename_current_file}`_**."
)
else:
report.append("2. **Test Status:** ✅ **_Passed_**")
if find_duplicate_keys(os.path.join(branch, file_normpath)):
has_differences = True
output = "\n".join(
[
f" - `{key}`: first at line {first}, duplicate at `line {duplicate}`"
for key, first, duplicate in find_duplicate_keys(
os.path.join(branch, file_normpath)
)
]
)
report.append("3. **Test Status:** ❌ **_Failed_**")
report.append(" - **Issue:**")
report.append(" - duplicate entries were found:")
report.append(output)
else:
report.append("3. **Test Status:** ✅ **_Passed_**")
report.append("")
report.append("---")
report.append("")
if has_differences:
report.append("## ❌ Overall Check Status: **_Failed_**")
report.append("")
report.append(
f"@{actor} please check your translation if it conforms to the standard. Follow the format of [messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/app/core/src/main/resources/messages_en_GB.properties)"
)
else:
report.append("## ✅ Overall Check Status: **_Success_**")
report.append("")
report.append(
f"Thanks @{actor} for your help in keeping the translations up to date."
)
if not only_reference_file:
print("\n".join(report))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Find missing keys")
parser.add_argument(
"--actor",
required=False,
help="Actor from PR.",
)
parser.add_argument(
"--reference-file",
required=True,
help="Path to the reference file.",
)
parser.add_argument(
"--branch",
type=str,
required=True,
help="Branch name.",
)
parser.add_argument(
"--check-file",
type=str,
required=False,
help="List of changed files, separated by spaces.",
)
parser.add_argument(
"--files",
nargs="+",
required=False,
help="List of changed files, separated by spaces.",
)
args = parser.parse_args()
# Sanitize --actor input to avoid injection attacks
if args.actor:
args.actor = re.sub(r"[^a-zA-Z0-9_\\-]", "", args.actor)
# Sanitize --branch input to avoid injection attacks
if args.branch:
args.branch = re.sub(r"[^a-zA-Z0-9\\-]", "", args.branch)
file_list = args.files
if file_list is None:
if args.check_file:
file_list = [args.check_file]
else:
file_list = glob.glob(
os.path.join(
os.getcwd(),
"app",
"core",
"src",
"main",
"resources",
"messages_*.properties",
)
)
update_missing_keys(args.reference_file, file_list)
else:
check_for_differences(args.reference_file, file_list, args.branch, args.actor)

View File

@ -1,6 +1,6 @@
"""
Author: Ludy87
Description: This script processes JSON translation files for localization checks. It compares translation files in a branch with
Description: This script processes TOML translation files for localization checks. It compares translation files in a branch with
a reference file to ensure consistency. The script performs two main checks:
1. Verifies that the number of translation keys in the translation files matches the reference file.
2. Ensures that all keys in the translation files are present in the reference file and vice versa.
@ -9,10 +9,10 @@ The script also provides functionality to update the translation files to match
adjusting the format.
Usage:
python check_language_json.py --reference-file <path_to_reference_file> --branch <branch_name> [--actor <actor_name>] [--files <list_of_changed_files>]
python check_language_toml.py --reference-file <path_to_reference_file> --branch <branch_name> [--actor <actor_name>] [--files <list_of_changed_files>]
"""
# Sample for Windows:
# python .github/scripts/check_language_json.py --reference-file frontend/public/locales/en-GB/translation.json --branch "" --files frontend/public/locales/de-DE/translation.json frontend/public/locales/fr-FR/translation.json
# python .github/scripts/check_language_toml.py --reference-file frontend/public/locales/en-GB/translation.toml --branch "" --files frontend/public/locales/de-DE/translation.toml frontend/public/locales/fr-FR/translation.toml
import copy
import glob
@ -20,12 +20,14 @@ import os
import argparse
import re
import json
import tomllib # Python 3.11+ (stdlib)
import tomli_w # For writing TOML files
def find_duplicate_keys(file_path, keys=None, prefix=""):
"""
Identifies duplicate keys in a JSON file (including nested keys).
:param file_path: Path to the JSON file.
Identifies duplicate keys in a TOML file (including nested keys).
:param file_path: Path to the TOML file.
:param keys: Dictionary to track keys (used for recursion).
:param prefix: Prefix for nested keys.
:return: List of tuples (key, first_occurrence_path, duplicate_path).
@ -35,8 +37,9 @@ def find_duplicate_keys(file_path, keys=None, prefix=""):
duplicates = []
with open(file_path, "r", encoding="utf-8") as file:
data = json.load(file)
# Load TOML file
with open(file_path, 'rb') as file:
data = tomllib.load(file)
def process_dict(obj, current_prefix=""):
for key, value in obj.items():
@ -54,18 +57,18 @@ def find_duplicate_keys(file_path, keys=None, prefix=""):
return duplicates
# Maximum size for JSON files (e.g., 500 KB)
# Maximum size for TOML files (e.g., 500 KB)
MAX_FILE_SIZE = 500 * 1024
def parse_json_file(file_path):
def parse_toml_file(file_path):
"""
Parses a JSON translation file and returns a flat dictionary of all keys.
:param file_path: Path to the JSON file.
Parses a TOML translation file and returns a flat dictionary of all keys.
:param file_path: Path to the TOML file.
:return: Dictionary with flattened keys.
"""
with open(file_path, "r", encoding="utf-8") as file:
data = json.load(file)
with open(file_path, 'rb') as file:
data = tomllib.load(file)
def flatten_dict(d, parent_key="", sep="."):
items = {}
@ -99,38 +102,37 @@ def unflatten_dict(d, sep="."):
return result
def write_json_file(file_path, updated_properties):
def write_toml_file(file_path, updated_properties):
"""
Writes updated properties back to the JSON file.
:param file_path: Path to the JSON file.
Writes updated properties back to the TOML file.
:param file_path: Path to the TOML file.
:param updated_properties: Dictionary of updated properties to write.
"""
nested_data = unflatten_dict(updated_properties)
with open(file_path, "w", encoding="utf-8", newline="\n") as file:
json.dump(nested_data, file, ensure_ascii=False, indent=2)
file.write("\n") # Add trailing newline
with open(file_path, "wb") as file:
tomli_w.dump(nested_data, file)
def update_missing_keys(reference_file, file_list, branch=""):
"""
Updates missing keys in the translation files based on the reference file.
:param reference_file: Path to the reference JSON file.
:param reference_file: Path to the reference TOML file.
:param file_list: List of translation files to update.
:param branch: Branch where the files are located.
"""
reference_properties = parse_json_file(reference_file)
reference_properties = parse_toml_file(reference_file)
for file_path in file_list:
basename_current_file = os.path.basename(os.path.join(branch, file_path))
if (
basename_current_file == os.path.basename(reference_file)
or not file_path.endswith(".json")
or not file_path.endswith(".toml")
or not os.path.dirname(file_path).endswith("locales")
):
continue
current_properties = parse_json_file(os.path.join(branch, file_path))
current_properties = parse_toml_file(os.path.join(branch, file_path))
updated_properties = {}
for ref_key, ref_value in reference_properties.items():
@ -141,16 +143,16 @@ def update_missing_keys(reference_file, file_list, branch=""):
# Add missing key with reference value
updated_properties[ref_key] = ref_value
write_json_file(os.path.join(branch, file_path), updated_properties)
write_toml_file(os.path.join(branch, file_path), updated_properties)
def check_for_missing_keys(reference_file, file_list, branch):
update_missing_keys(reference_file, file_list, branch)
def read_json_keys(file_path):
def read_toml_keys(file_path):
if os.path.isfile(file_path) and os.path.exists(file_path):
return parse_json_file(file_path)
return parse_toml_file(file_path)
return {}
@ -160,7 +162,7 @@ def check_for_differences(reference_file, file_list, branch, actor):
report = []
report.append(f"#### 🔄 Reference Branch: `{reference_branch}`")
reference_keys = read_json_keys(reference_file)
reference_keys = read_toml_keys(reference_file)
has_differences = False
only_reference_file = True
@ -197,12 +199,12 @@ def check_for_differences(reference_file, file_list, branch, actor):
):
continue
if not file_normpath.endswith(".json") or basename_current_file != "translation.json":
if not file_normpath.endswith(".toml") or basename_current_file != "translation.toml":
continue
only_reference_file = False
report.append(f"#### 📃 **File Check:** `{locale_dir}/{basename_current_file}`")
current_keys = read_json_keys(os.path.join(branch, file_path))
current_keys = read_toml_keys(os.path.join(branch, file_path))
reference_key_count = len(reference_keys)
current_key_count = len(current_keys)
@ -272,7 +274,7 @@ def check_for_differences(reference_file, file_list, branch, actor):
report.append("## ❌ Overall Check Status: **_Failed_**")
report.append("")
report.append(
f"@{actor} please check your translation if it conforms to the standard. Follow the format of [en-GB/translation.json](https://github.com/Stirling-Tools/Stirling-PDF/blob/V2/frontend/public/locales/en-GB/translation.json)"
f"@{actor} please check your translation if it conforms to the standard. Follow the format of [en-GB/translation.toml](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/frontend/public/locales/en-GB/translation.toml)"
)
else:
report.append("## ✅ Overall Check Status: **_Success_**")
@ -286,7 +288,7 @@ def check_for_differences(reference_file, file_list, branch, actor):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Find missing keys")
parser = argparse.ArgumentParser(description="Find missing keys in TOML translation files")
parser.add_argument(
"--actor",
required=False,
@ -337,9 +339,9 @@ if __name__ == "__main__":
"public",
"locales",
"*",
"translation.json",
"translation.toml",
)
)
update_missing_keys(args.reference_file, file_list)
else:
check_for_differences(args.reference_file, file_list, args.branch, args.actor)
check_for_differences(args.reference_file, file_list, args.branch, args.actor)

View File

@ -52,7 +52,6 @@ jobs:
core.setOutput('repository', pr.head.repo.full_name);
core.setOutput('ref', pr.head.ref);
core.setOutput('is_fork', String(pr.head.repo.fork));
core.setOutput('base_ref', pr.base.ref);
core.setOutput('author', pr.user.login);
core.setOutput('state', pr.state);
@ -65,10 +64,6 @@ jobs:
IS_FORK: ${{ steps.resolve.outputs.is_fork }}
# nur bei workflow_dispatch gesetzt:
ALLOW_FORK_INPUT: ${{ inputs.allow_fork }}
# für Auto-PR-Logik:
PR_TITLE: ${{ github.event.pull_request.title }}
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_BASE: ${{ steps.resolve.outputs.base_ref }}
PR_AUTHOR: ${{ steps.resolve.outputs.author }}
run: |
set -e
@ -89,14 +84,8 @@ jobs:
else
auth_users=("Frooodle" "sf298" "Ludy87" "LaserKaspar" "sbplat" "reecebrowne" "DarioGii" "ConnorYoh" "EthanHealy01" "jbrunton96" "balazs-szucs")
is_auth=false; for u in "${auth_users[@]}"; do [ "$u" = "$PR_AUTHOR" ] && is_auth=true && break; done
if [ "$PR_BASE" = "V2" ] && [ "$is_auth" = true ]; then
if [ "$is_auth" = true ]; then
should=true
else
title_has_v2=false; echo "$PR_TITLE" | grep -qiE 'v2|version.?2|version.?two' && title_has_v2=true
branch_has_kw=false; echo "$PR_BRANCH" | grep -qiE 'v2|react' && branch_has_kw=true
if [ "$is_auth" = true ] && { [ "$title_has_v2" = true ] || [ "$branch_has_kw" = true ]; }; then
should=true
fi
fi
fi
@ -174,7 +163,7 @@ jobs:
owner,
repo,
issue_number: prNumber,
body: `🚀 **Auto-deploying V2 version** for PR #${prNumber}...\n\n_This is an automated deployment triggered by V2/version2 keywords in the PR title or V2/React keywords in the branch name._\n\n⚠ **Note:** If new commits are pushed during deployment, this build will be cancelled and replaced with the latest version.`
body: `🚀 **Auto-deploying V2 version** for PR #${prNumber}...\n\n_This is an automated deployment for approved V2 contributors._\n\n⚠ **Note:** If new commits are pushed during deployment, this build will be cancelled and replaced with the latest version.`
});
return newComment.id;
@ -394,7 +383,7 @@ jobs:
`🔗 **Direct Test URL (non-SSL)** [${deploymentUrl}](${deploymentUrl})\n\n` +
`🔐 **Secure HTTPS URL**: [${httpsUrl}](${httpsUrl})\n\n` +
`_This deployment will be automatically cleaned up when the PR is closed._\n\n` +
`🔄 **Auto-deployed** because PR title or branch name contains V2/version2/React keywords.`;
`🔄 **Auto-deployed** for approved V2 contributors.`;
await github.rest.issues.createComment({
owner,

View File

@ -14,6 +14,7 @@ jobs:
permissions:
issues: write
if: |
vars.CI_PROFILE != 'lite' &&
github.event.issue.pull_request &&
(
contains(github.event.comment.body, 'prdeploy') ||

View File

@ -262,7 +262,13 @@ jobs:
strategy:
fail-fast: false
matrix:
docker-rev: ["docker/embedded/Dockerfile", "docker/embedded/Dockerfile.ultra-lite", "docker/embedded/Dockerfile.fat"]
include:
- docker-rev: docker/embedded/Dockerfile
artifact-suffix: Dockerfile
- docker-rev: docker/embedded/Dockerfile.ultra-lite
artifact-suffix: Dockerfile.ultra-lite
- docker-rev: docker/embedded/Dockerfile.fat
artifact-suffix: Dockerfile.fat
steps:
- name: Harden Runner
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
@ -272,6 +278,13 @@ jobs:
- name: Checkout Repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Free disk space on runner
run: |
echo "Disk space before cleanup:" && df -h
sudo rm -rf /usr/share/dotnet /opt/ghc /usr/local/lib/android /usr/local/share/boost
docker system prune -af || true
echo "Disk space after cleanup:" && df -h
- name: Set up JDK 17
uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
@ -313,7 +326,7 @@ jobs:
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: reports-docker-${{ matrix.docker-rev }}
name: reports-docker-${{ matrix.artifact-suffix }}
path: |
build/reports/tests/
build/test-results/

View File

@ -1,19 +1,14 @@
name: Check Properties Files on PR
name: Check TOML Translation Files on PR
# This workflow validates TOML translation files
on:
pull_request_target:
types: [opened, synchronize, reopened]
paths:
- "app/core/src/main/resources/messages_*.properties"
- "frontend/public/locales/*/translation.toml"
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref_name || github.ref }}
cancel-in-progress: true
@ -73,22 +68,22 @@ jobs:
run: |
echo "Fetching PR changed files..."
echo "Getting list of changed files from PR..."
# Check if PR number exists
if [ -z "${{ steps.get-pr-data.outputs.pr_number }}" ]; then
echo "Error: PR number is empty"
exit 1
fi
# Get changed files and filter for properties files, handle case where no matches are found
gh pr view ${{ steps.get-pr-data.outputs.pr_number }} --json files -q ".files[].path" | grep -E '^app/core/src/main/resources/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$' > changed_files.txt || echo "No matching properties files found in PR"
# Check if any files were found
if [ ! -s changed_files.txt ]; then
echo "No properties files changed in this PR"
echo "Workflow will exit early as no relevant files to check"
exit 0
fi
echo "Found $(wc -l < changed_files.txt) matching properties files"
# Check if PR number exists
if [ -z "${{ steps.get-pr-data.outputs.pr_number }}" ]; then
echo "Error: PR number is empty"
exit 1
fi
# Get changed files and filter for TOML translation files
gh pr view ${{ steps.get-pr-data.outputs.pr_number }} --json files -q ".files[].path" | grep -E '^frontend/public/locales/[a-zA-Z-]+/translation\.toml$' > changed_files.txt || echo "No matching TOML files found in PR"
# Check if any files were found
if [ ! -s changed_files.txt ]; then
echo "No TOML translation files changed in this PR"
echo "Workflow will exit early as no relevant files to check"
exit 0
fi
echo "Found $(wc -l < changed_files.txt) matching TOML files"
- name: Determine reference file test
- name: Determine reference file
id: determine-file
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
@ -125,11 +120,11 @@ jobs:
pull_number: prNumber,
});
// Filter for relevant files based on the PR changes
// Filter for relevant TOML files based on the PR changes
const changedFiles = files
.filter(file =>
file.status !== "removed" &&
/^app\/core\/src\/main\/resources\/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$/.test(file.filename)
/^frontend\/public\/locales\/[a-zA-Z-]+\/translation\.toml$/.test(file.filename)
)
.map(file => file.filename);
@ -169,16 +164,16 @@ jobs:
// Determine reference file
let referenceFilePath;
if (changedFiles.includes("app/core/src/main/resources/messages_en_GB.properties")) {
if (changedFiles.includes("frontend/public/locales/en-GB/translation.toml")) {
console.log("Using PR branch reference file.");
const { data: fileContent } = await github.rest.repos.getContent({
owner: prRepoOwner,
repo: prRepoName,
path: "app/core/src/main/resources/messages_en_GB.properties",
path: "frontend/public/locales/en-GB/translation.toml",
ref: branch,
});
referenceFilePath = "pr-branch-messages_en_GB.properties";
referenceFilePath = "pr-branch-translation-en-GB.toml";
const content = Buffer.from(fileContent.content, "base64").toString("utf-8");
fs.writeFileSync(referenceFilePath, content);
} else {
@ -186,11 +181,11 @@ jobs:
const { data: fileContent } = await github.rest.repos.getContent({
owner: repoOwner,
repo: repoName,
path: "app/core/src/main/resources/messages_en_GB.properties",
path: "frontend/public/locales/en-GB/translation.toml",
ref: "main",
});
referenceFilePath = "main-branch-messages_en_GB.properties";
referenceFilePath = "main-branch-translation-en-GB.toml";
const content = Buffer.from(fileContent.content, "base64").toString("utf-8");
fs.writeFileSync(referenceFilePath, content);
}
@ -198,11 +193,20 @@ jobs:
console.log(`Reference file path: ${referenceFilePath}`);
core.exportVariable("REFERENCE_FILE", referenceFilePath);
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.12"
- name: Install Python dependencies
run: |
pip install tomli-w
- name: Run Python script to check files
id: run-check
run: |
echo "Running Python script to check files..."
python .github/scripts/check_language_properties.py \
echo "Running Python script to check TOML files..."
python .github/scripts/check_language_toml.py \
--actor ${{ github.event.pull_request.user.login }} \
--reference-file "${REFERENCE_FILE}" \
--branch "pr-branch" \
@ -213,7 +217,7 @@ jobs:
id: capture-output
run: |
if [ -f result.txt ] && [ -s result.txt ]; then
echo "Test, capturing output..."
echo "Capturing output..."
SCRIPT_OUTPUT=$(cat result.txt)
echo "SCRIPT_OUTPUT<<EOF" >> $GITHUB_ENV
echo "$SCRIPT_OUTPUT" >> $GITHUB_ENV
@ -227,7 +231,7 @@ jobs:
echo "FAIL_JOB=false" >> $GITHUB_ENV
fi
else
echo "No update found."
echo "No output found."
echo "SCRIPT_OUTPUT=" >> $GITHUB_ENV
echo "FAIL_JOB=false" >> $GITHUB_ENV
fi
@ -249,7 +253,7 @@ jobs:
issue_number: issueNumber
});
const comment = comments.data.find(c => c.body.includes("## 🚀 Translation Verification Summary"));
const comment = comments.data.find(c => c.body.includes("## 🌐 TOML Translation Verification Summary"));
// Only update or create comments by the action user
const expectedActor = "${{ steps.setup-bot.outputs.app-slug }}[bot]";
@ -260,7 +264,7 @@ jobs:
owner: repoOwner,
repo: repoName,
comment_id: comment.id,
body: `## 🚀 Translation Verification Summary\n\n\n${SCRIPT_OUTPUT}\n`
body: `## 🌐 TOML Translation Verification Summary\n\n\n${SCRIPT_OUTPUT}\n`
});
console.log("Updated existing comment.");
} else if (!comment) {
@ -269,7 +273,7 @@ jobs:
owner: repoOwner,
repo: repoName,
issue_number: issueNumber,
body: `## 🚀 Translation Verification Summary\n\n\n${SCRIPT_OUTPUT}\n`
body: `## 🌐 TOML Translation Verification Summary\n\n\n${SCRIPT_OUTPUT}\n`
});
console.log("Created new comment.");
} else {
@ -287,6 +291,6 @@ jobs:
run: |
echo "Cleaning up temporary files..."
rm -rf pr-branch
rm -f pr-branch-messages_en_GB.properties main-branch-messages_en_GB.properties changed_files.txt result.txt
rm -f pr-branch-translation-en-GB.toml main-branch-translation-en-GB.toml changed_files.txt result.txt
echo "Cleanup complete."
continue-on-error: true # Ensure cleanup runs even if previous steps fail

View File

@ -31,6 +31,7 @@ permissions:
jobs:
determine-matrix:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}

View File

@ -24,6 +24,7 @@ permissions:
jobs:
push:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-24.04-8core
permissions:
packages: write

View File

@ -24,6 +24,7 @@ permissions:
jobs:
push:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
permissions:
packages: write

View File

@ -17,6 +17,7 @@ permissions: read-all
jobs:
analysis:
if: ${{ vars.CI_PROFILE != 'lite' }}
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:

View File

@ -27,6 +27,7 @@ permissions:
jobs:
sonarqube:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
steps:
- name: Harden Runner

View File

@ -10,6 +10,7 @@ permissions:
jobs:
stale:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
permissions:
issues: write

View File

@ -23,6 +23,7 @@ permissions:
jobs:
push:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
steps:
- name: Harden Runner

View File

@ -1,122 +0,0 @@
name: Sync Files
on:
workflow_dispatch:
push:
branches:
- main
paths:
- "build.gradle"
- "README.md"
- "app/core/src/main/resources/messages_*.properties"
- "app/core/src/main/resources/static/3rdPartyLicenses.json"
- "scripts/ignore_translation.toml"
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
sync-files:
runs-on: ubuntu-latest
env:
# Prevents sdist builds → no tar extraction
PIP_ONLY_BINARY: ":all:"
PIP_DISABLE_PIP_VERSION_CHECK: "1"
steps:
- name: Harden Runner
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
with:
egress-policy: audit
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- name: Sync translation property files
run: |
python .github/scripts/check_language_properties.py --reference-file "app/core/src/main/resources/messages_en_GB.properties" --branch main
- name: Commit translation files
run: |
git add app/core/src/main/resources/messages_*.properties
git diff --staged --quiet || git commit -m ":memo: Sync translation files" || echo "No changes detected"
- name: Install dependencies
# Wheels-only + Hash-Pinning
run: |
pip install --require-hashes --only-binary=:all: -r ./.github/scripts/requirements_sync_readme.txt
- name: Sync README.md
run: |
python scripts/counter_translation.py
- name: Run git add
run: |
git add README.md scripts/ignore_translation.toml
git diff --staged --quiet || git commit -m ":memo: Sync README.md & scripts/ignore_translation.toml" || echo "No changes detected"
- name: Create Pull Request
if: always()
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.setup-bot.outputs.token }}
commit-message: Update files
committer: ${{ steps.setup-bot.outputs.committer }}
author: ${{ steps.setup-bot.outputs.committer }}
signoff: true
branch: sync_readme
title: ":globe_with_meridians: Sync Translations + Update README Progress Table"
body: |
### Description of Changes
This Pull Request was automatically generated to synchronize updates to translation files and documentation. Below are the details of the changes made:
#### **1. Synchronization of Translation Files**
- Updated translation files (`messages_*.properties`) to reflect changes in the reference file `messages_en_GB.properties`.
- Ensured consistency and synchronization across all supported language files.
- Highlighted any missing or incomplete translations.
#### **2. Update README.md**
- Generated the translation progress table in `README.md`.
- Added a summary of the current translation status for all supported languages.
- Included up-to-date statistics on translation coverage.
#### **Why these changes are necessary**
- Keeps translation files aligned with the latest reference updates.
- Ensures the documentation reflects the current translation progress.
---
Auto-generated by [create-pull-request][1].
[1]: https://github.com/peter-evans/create-pull-request
draft: false
delete-branch: true
labels: github-actions
sign-commits: true
add-paths: |
README.md
app/core/src/main/resources/messages_*.properties

View File

@ -1,15 +1,15 @@
name: Sync Files V2
name: Sync Files (TOML)
on:
workflow_dispatch:
push:
branches:
- V2
- main
- syncLangTest
paths:
- "build.gradle"
- "README.md"
- "frontend/public/locales/*/translation.json"
- "frontend/public/locales/*/translation.toml"
- "app/core/src/main/resources/static/3rdPartyLicenses.json"
- "scripts/ignore_translation.toml"
@ -52,21 +52,25 @@ jobs:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- name: Sync translation JSON files
- name: Install Python dependencies
run: |
python .github/scripts/check_language_json.py --reference-file "frontend/public/locales/en-GB/translation.json" --branch V2
pip install tomli-w
- name: Sync translation TOML files
run: |
python .github/scripts/check_language_toml.py --reference-file "frontend/public/locales/en-GB/translation.toml" --branch main
- name: Commit translation files
run: |
git add frontend/public/locales/*/translation.json
git diff --staged --quiet || git commit -m ":memo: Sync translation files" || echo "No changes detected"
git add frontend/public/locales/*/translation.toml
git diff --staged --quiet || git commit -m ":memo: Sync translation files (TOML)" || echo "No changes detected"
- name: Install dependencies
- name: Install README dependencies
run: pip install --require-hashes -r ./.github/scripts/requirements_sync_readme.txt
- name: Sync README.md
run: |
python scripts/counter_translation_v2.py
python scripts/counter_translation_v3.py
- name: Run git add
run: |
@ -82,21 +86,22 @@ jobs:
committer: ${{ steps.setup-bot.outputs.committer }}
author: ${{ steps.setup-bot.outputs.committer }}
signoff: true
branch: sync_readme_v2
base: V2
title: ":globe_with_meridians: [V2] Sync Translations + Update README Progress Table"
branch: sync_readme_v3
base: main
title: ":globe_with_meridians: Sync Translations + Update README Progress Table"
body: |
### Description of Changes
This Pull Request was automatically generated to synchronize updates to translation files and documentation for the **V2 branch**. Below are the details of the changes made:
This Pull Request was automatically generated to synchronize updates to translation files and documentation. Below are the details of the changes made:
#### **1. Synchronization of Translation Files**
- Updated translation files (`frontend/public/locales/*/translation.json`) to reflect changes in the reference file `en-GB/translation.json`.
- Updated translation files (`frontend/public/locales/*/translation.toml`) to reflect changes in the reference file `en-GB/translation.toml`.
- Ensured consistency and synchronization across all supported language files.
- Highlighted any missing or incomplete translations.
- **Format**: TOML
#### **2. Update README.md**
- Generated the translation progress table in `README.md`.
- Generated the translation progress table in `README.md` using `counter_translation_v3.py`.
- Added a summary of the current translation status for all supported languages.
- Included up-to-date statistics on translation coverage.
@ -115,4 +120,5 @@ jobs:
sign-commits: true
add-paths: |
README.md
frontend/public/locales/*/translation.json
frontend/public/locales/*/translation.toml
scripts/ignore_translation.toml

View File

@ -28,6 +28,7 @@ permissions:
jobs:
determine-matrix:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
@ -636,6 +637,8 @@ jobs:
if [ "${{ needs.build.result }}" = "success" ]; then
echo "✅ All Tauri builds completed successfully!"
echo "Artifacts are ready for distribution."
elif [ "${{ needs.build.result }}" = "skipped" ]; then
echo "⏭️ Tauri builds skipped (CI lite mode enabled)"
else
echo "❌ Some Tauri builds failed."
echo "Please check the logs and fix any issues."

View File

@ -21,6 +21,7 @@ permissions:
jobs:
deploy:
if: ${{ vars.CI_PROFILE != 'lite' }}
runs-on: ubuntu-latest
steps:
- name: Harden Runner

View File

@ -202,10 +202,10 @@ const [ToolName] = (props: BaseToolProps) => {
## 5. Add Translations
Update translation files. **Important: Only update `en-GB` files** - other languages are handled separately.
**File to update:** `frontend/public/locales/en-GB/translation.json`
**File to update:** `frontend/public/locales/en-GB/translation.toml`
**Required Translation Keys**:
```json
```toml
{
"home": {
"[toolName]": {
@ -251,7 +251,7 @@ Update translation files. **Important: Only update `en-GB` files** - other langu
```
**Translation Notes:**
- **Only update `en-GB/translation.json`** - other locale files are managed separately
- **Only update `en-GB/translation.toml`** - other locale files are managed separately
- Use descriptive keys that match your component's `t()` calls
- Include tooltip translations if you created tooltip hooks
- Add `options.*` keys if your tool has settings with descriptions

View File

@ -491,6 +491,9 @@ public class EndpointConfiguration {
addEndpointToGroup("Ghostscript", "repair");
addEndpointToGroup("Ghostscript", "compress-pdf");
/* ImageMagick */
addEndpointToGroup("ImageMagick", "compress-pdf");
/* tesseract */
addEndpointToGroup("tesseract", "ocr-pdf");
@ -574,6 +577,7 @@ public class EndpointConfiguration {
|| "Javascript".equals(group)
|| "Weasyprint".equals(group)
|| "Pdftohtml".equals(group)
|| "ImageMagick".equals(group)
|| "rar".equals(group);
}

View File

@ -68,6 +68,7 @@ public class ApplicationProperties {
private AutoPipeline autoPipeline = new AutoPipeline();
private ProcessExecutor processExecutor = new ProcessExecutor();
private PdfEditor pdfEditor = new PdfEditor();
@Bean
public PropertySource<?> dynamicYamlPropertySource(ConfigurableEnvironment environment)
@ -100,6 +101,46 @@ public class ApplicationProperties {
private String outputFolder;
}
@Data
public static class PdfEditor {
private Cache cache = new Cache();
private FontNormalization fontNormalization = new FontNormalization();
private CffConverter cffConverter = new CffConverter();
private Type3 type3 = new Type3();
private String fallbackFont = "classpath:/static/fonts/NotoSans-Regular.ttf";
@Data
public static class Cache {
private long maxBytes = -1;
private int maxPercent = 20;
}
@Data
public static class FontNormalization {
private boolean enabled = false;
}
@Data
public static class CffConverter {
private boolean enabled = true;
private String method = "python";
private String pythonCommand = "/opt/venv/bin/python3";
private String pythonScript = "/scripts/convert_cff_to_ttf.py";
private String fontforgeCommand = "fontforge";
}
@Data
public static class Type3 {
private Library library = new Library();
@Data
public static class Library {
private boolean enabled = true;
private String index = "classpath:/type3/library/index.json";
}
}
}
@Data
public static class Legal {
private String termsAndConditions;
@ -112,7 +153,6 @@ public class ApplicationProperties {
@Data
public static class Security {
private Boolean enableLogin;
private Boolean csrfDisabled;
private InitialLogin initialLogin = new InitialLogin();
private OAUTH2 oauth2 = new OAUTH2();
private SAML2 saml2 = new SAML2();
@ -358,6 +398,7 @@ public class ApplicationProperties {
private Boolean enableAnalytics;
private Boolean enablePosthog;
private Boolean enableScarf;
private Boolean enableDesktopInstallSlide;
private Datasource datasource;
private Boolean disableSanitize;
private int maxDPI;
@ -368,10 +409,12 @@ public class ApplicationProperties {
private TempFileManagement tempFileManagement = new TempFileManagement();
private DatabaseBackup databaseBackup = new DatabaseBackup();
private List<String> corsAllowedOrigins = new ArrayList<>();
private String
frontendUrl; // Base URL for frontend (used for invite links, etc.). If not set,
private String backendUrl; // Backend base URL for SAML/OAuth/API callbacks (e.g.
// 'http://localhost:8080', 'https://api.example.com'). Required for
// SSO.
private String frontendUrl; // Frontend URL for invite email links (e.g.
// falls back to backend URL.
// 'https://app.example.com'). If not set, falls back to backendUrl.
public boolean isAnalyticsEnabled() {
return this.getEnableAnalytics() != null && this.getEnableAnalytics();
@ -536,6 +579,7 @@ public class ApplicationProperties {
@ToString.Exclude private String key;
private String UUID;
private String appVersion;
private Boolean isNewServer;
}
// TODO: Remove post migration
@ -575,6 +619,16 @@ public class ApplicationProperties {
private String username;
@ToString.Exclude private String password;
private String from;
// STARTTLS upgrades a plain SMTP connection to TLS after connecting (RFC 3207)
private Boolean startTlsEnable = true;
private Boolean startTlsRequired;
// SSL/TLS wrapper for implicit TLS (typically port 465)
private Boolean sslEnable;
// Hostnames or patterns (e.g., "smtp.example.com" or "*") to trust for TLS certificates;
// defaults to "*" (trust all) when not set
private String sslTrust;
// Enables hostname verification for TLS connections
private Boolean sslCheckServerIdentity;
}
@Data
@ -643,6 +697,7 @@ public class ApplicationProperties {
private int weasyPrintSessionLimit;
private int installAppSessionLimit;
private int calibreSessionLimit;
private int imageMagickSessionLimit;
private int qpdfSessionLimit;
private int tesseractSessionLimit;
private int ghostscriptSessionLimit;
@ -680,6 +735,10 @@ public class ApplicationProperties {
return calibreSessionLimit > 0 ? calibreSessionLimit : 1;
}
public int getImageMagickSessionLimit() {
return imageMagickSessionLimit > 0 ? imageMagickSessionLimit : 4;
}
public int getGhostscriptSessionLimit() {
return ghostscriptSessionLimit > 0 ? ghostscriptSessionLimit : 8;
}
@ -709,6 +768,8 @@ public class ApplicationProperties {
@JsonProperty("calibretimeoutMinutes")
private long calibreTimeoutMinutes;
private long imageMagickTimeoutMinutes;
private long tesseractTimeoutMinutes;
private long qpdfTimeoutMinutes;
private long ghostscriptTimeoutMinutes;
@ -746,6 +807,10 @@ public class ApplicationProperties {
return calibreTimeoutMinutes > 0 ? calibreTimeoutMinutes : 30;
}
public long getImageMagickTimeoutMinutes() {
return imageMagickTimeoutMinutes > 0 ? imageMagickTimeoutMinutes : 30;
}
public long getGhostscriptTimeoutMinutes() {
return ghostscriptTimeoutMinutes > 0 ? ghostscriptTimeoutMinutes : 30;
}

View File

@ -0,0 +1,12 @@
package stirling.software.common.service;
import java.io.IOException;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
public interface LineArtConversionService {
PDImageXObject convertImageToLineArt(
PDDocument doc, PDImageXObject originalImage, double threshold, int edgeLevel)
throws IOException;
}

View File

@ -254,10 +254,7 @@ public class PostHogService {
properties,
"security_enableLogin",
applicationProperties.getSecurity().getEnableLogin());
addIfNotEmpty(
properties,
"security_csrfDisabled",
applicationProperties.getSecurity().getCsrfDisabled());
addIfNotEmpty(properties, "security_csrfDisabled", true);
addIfNotEmpty(
properties,
"security_loginAttemptCount",

View File

@ -86,6 +86,11 @@ public class ProcessExecutor {
.getProcessExecutor()
.getSessionLimit()
.getCalibreSessionLimit();
case IMAGEMAGICK ->
applicationProperties
.getProcessExecutor()
.getSessionLimit()
.getImageMagickSessionLimit();
case GHOSTSCRIPT ->
applicationProperties
.getProcessExecutor()
@ -141,6 +146,11 @@ public class ProcessExecutor {
.getProcessExecutor()
.getTimeoutMinutes()
.getCalibreTimeoutMinutes();
case IMAGEMAGICK ->
applicationProperties
.getProcessExecutor()
.getTimeoutMinutes()
.getImageMagickTimeoutMinutes();
case GHOSTSCRIPT ->
applicationProperties
.getProcessExecutor()
@ -301,6 +311,7 @@ public class ProcessExecutor {
WEASYPRINT,
INSTALL_APP,
CALIBRE,
IMAGEMAGICK,
TESSERACT,
QPDF,
GHOSTSCRIPT,

View File

@ -26,6 +26,7 @@ public class RequestUriUtils {
|| normalizedUri.startsWith("/public/")
|| normalizedUri.startsWith("/pdfjs/")
|| normalizedUri.startsWith("/pdfjs-legacy/")
|| normalizedUri.startsWith("/pdfium/")
|| normalizedUri.startsWith("/assets/")
|| normalizedUri.startsWith("/locales/")
|| normalizedUri.startsWith("/Login/")
@ -61,7 +62,8 @@ public class RequestUriUtils {
|| normalizedUri.endsWith(".css")
|| normalizedUri.endsWith(".mjs")
|| normalizedUri.endsWith(".html")
|| normalizedUri.endsWith(".toml");
|| normalizedUri.endsWith(".toml")
|| normalizedUri.endsWith(".wasm");
}
public static boolean isFrontendRoute(String contextPath, String requestURI) {
@ -125,11 +127,13 @@ public class RequestUriUtils {
|| requestURI.endsWith("popularity.txt")
|| requestURI.endsWith(".js")
|| requestURI.endsWith(".toml")
|| requestURI.endsWith(".wasm")
|| requestURI.contains("swagger")
|| requestURI.startsWith("/api/v1/info")
|| requestURI.startsWith("/site.webmanifest")
|| requestURI.startsWith("/fonts")
|| requestURI.startsWith("/pdfjs"));
|| requestURI.startsWith("/pdfjs")
|| requestURI.startsWith("/pdfium"));
}
/**
@ -162,10 +166,9 @@ public class RequestUriUtils {
// enableLogin)
|| trimmedUri.startsWith(
"/api/v1/ui-data/footer-info") // Public footer configuration
|| trimmedUri.startsWith("/v1/api-docs")
|| trimmedUri.startsWith("/api/v1/invite/validate")
|| trimmedUri.startsWith("/api/v1/invite/accept")
|| trimmedUri.contains("/v1/api-docs");
|| trimmedUri.startsWith("/v1/api-docs");
}
private static String stripContextPath(String contextPath, String requestURI) {

View File

@ -24,6 +24,9 @@ public class RequestUriUtilsTest {
assertTrue(
RequestUriUtils.isStaticResource("/pdfjs/pdf.worker.js"),
"PDF.js files should be static");
assertTrue(
RequestUriUtils.isStaticResource("/pdfium/pdfium.wasm"),
"PDFium wasm should be static");
assertTrue(
RequestUriUtils.isStaticResource("/api/v1/info/status"),
"API status should be static");
@ -110,7 +113,8 @@ public class RequestUriUtilsTest {
"/downloads/document.png",
"/assets/brand.ico",
"/any/path/with/image.svg",
"/deep/nested/folder/icon.png"
"/deep/nested/folder/icon.png",
"/pdfium/pdfium.wasm"
})
void testIsStaticResourceWithFileExtensions(String path) {
assertTrue(
@ -148,6 +152,9 @@ public class RequestUriUtilsTest {
assertFalse(
RequestUriUtils.isTrackableResource("/script.js"),
"JS files should not be trackable");
assertFalse(
RequestUriUtils.isTrackableResource("/pdfium/pdfium.wasm"),
"PDFium wasm should not be trackable");
assertFalse(
RequestUriUtils.isTrackableResource("/swagger/index.html"),
"Swagger files should not be trackable");
@ -224,7 +231,8 @@ public class RequestUriUtilsTest {
"/api/v1/info/health",
"/site.webmanifest",
"/fonts/roboto.woff",
"/pdfjs/viewer.js"
"/pdfjs/viewer.js",
"/pdfium/pdfium.wasm"
})
void testNonTrackableResources(String path) {
assertFalse(

View File

@ -46,6 +46,7 @@ public class ExternalAppDepConfig {
put("qpdf", List.of("qpdf"));
put("tesseract", List.of("tesseract"));
put("rar", List.of("rar")); // Required for real CBR output
put("magick", List.of("ImageMagick"));
}
};
}
@ -128,6 +129,7 @@ public class ExternalAppDepConfig {
checkDependencyAndDisableGroup("pdftohtml");
checkDependencyAndDisableGroup(unoconvPath);
checkDependencyAndDisableGroup("rar");
checkDependencyAndDisableGroup("magick");
// Special handling for Python/OpenCV dependencies
boolean pythonAvailable = isCommandAvailable("python3") || isCommandAvailable("python");
if (!pythonAvailable) {

View File

@ -34,7 +34,6 @@ public class InitialSetup {
public void init() throws IOException {
initUUIDKey();
initSecretKey();
initEnableCSRFSecurity();
initLegalUrls();
initSetAppVersion();
GeneralUtils.extractPipeline();
@ -60,18 +59,6 @@ public class InitialSetup {
}
}
public void initEnableCSRFSecurity() throws IOException {
if (GeneralUtils.isVersionHigher(
"0.46.0", applicationProperties.getAutomaticallyGenerated().getAppVersion())) {
Boolean csrf = applicationProperties.getSecurity().getCsrfDisabled();
if (!csrf) {
GeneralUtils.saveKeyToSettings("security.csrfDisabled", false);
GeneralUtils.saveKeyToSettings("system.enableAnalytics", true);
applicationProperties.getSecurity().setCsrfDisabled(false);
}
}
}
public void initLegalUrls() throws IOException {
// Initialize Terms and Conditions
String termsUrl = applicationProperties.getLegal().getTermsAndConditions();
@ -95,7 +82,7 @@ public class InitialSetup {
isNewServer =
existingVersion == null
|| existingVersion.isEmpty()
|| existingVersion.equals("0.0.0");
|| "0.0.0".equals(existingVersion);
String appVersion = "0.0.0";
Resource resource = new ClassPathResource("version.properties");
@ -107,6 +94,7 @@ public class InitialSetup {
}
GeneralUtils.saveKeyToSettings("AutomaticallyGenerated.appVersion", appVersion);
applicationProperties.getAutomaticallyGenerated().setAppVersion(appVersion);
applicationProperties.getAutomaticallyGenerated().setIsNewServer(isNewServer);
}
public static boolean isNewServer() {

View File

@ -62,10 +62,15 @@ public class OpenApiConfig {
// Add server configuration from environment variable
String swaggerServerUrl = System.getenv("SWAGGER_SERVER_URL");
Server server;
if (swaggerServerUrl != null && !swaggerServerUrl.trim().isEmpty()) {
Server server = new Server().url(swaggerServerUrl).description("API Server");
openAPI.addServersItem(server);
server = new Server().url(swaggerServerUrl).description("API Server");
} else {
// Use relative path so Swagger uses the current browser origin to avoid CORS issues
// when accessing via different ports
server = new Server().url("/").description("Current Server");
}
openAPI.addServersItem(server);
// Add ErrorResponse schema to components
Schema<?> errorResponseSchema =

View File

@ -1,10 +1,14 @@
package stirling.software.SPDF.config;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.CacheControl;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import lombok.RequiredArgsConstructor;
@ -25,6 +29,20 @@ public class WebMvcConfig implements WebMvcConfigurer {
registry.addInterceptor(endpointInterceptor);
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// Cache hashed assets (JS/CSS with content hashes) for 1 year
// These files have names like index-ChAS4tCC.js that change when content changes
registry.addResourceHandler("/assets/**")
.addResourceLocations("classpath:/static/assets/")
.setCacheControl(CacheControl.maxAge(365, TimeUnit.DAYS).cachePublic());
// Don't cache index.html - it needs to be fresh to reference latest hashed assets
registry.addResourceHandler("/index.html")
.addResourceLocations("classpath:/static/")
.setCacheControl(CacheControl.noCache().mustRevalidate());
}
@Override
public void addCorsMappings(CorsRegistry registry) {
// Check if running in Tauri mode

View File

@ -124,7 +124,6 @@ public class SettingsController {
ApplicationProperties.Security security = applicationProperties.getSecurity();
settings.put("enableLogin", security.getEnableLogin());
settings.put("csrfDisabled", security.getCsrfDisabled());
settings.put("loginMethod", security.getLoginMethod());
settings.put("loginAttemptCount", security.getLoginAttemptCount());
settings.put("loginResetTimeMinutes", security.getLoginResetTimeMinutes());
@ -159,12 +158,6 @@ public class SettingsController {
.getSecurity()
.setEnableLogin((Boolean) settings.get("enableLogin"));
}
if (settings.containsKey("csrfDisabled")) {
GeneralUtils.saveKeyToSettings("security.csrfDisabled", settings.get("csrfDisabled"));
applicationProperties
.getSecurity()
.setCsrfDisabled((Boolean) settings.get("csrfDisabled"));
}
if (settings.containsKey("loginMethod")) {
GeneralUtils.saveKeyToSettings("security.loginMethod", settings.get("loginMethod"));
applicationProperties

View File

@ -31,12 +31,10 @@ import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.JobOwnershipService;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.WebResponseUtils;
import stirling.software.proprietary.security.config.PremiumEndpoint;
@Slf4j
@ConvertApi
@RequiredArgsConstructor
@PremiumEndpoint
public class ConvertPdfJsonController {
private final PdfJsonConversionService pdfJsonConversionService;

View File

@ -0,0 +1,60 @@
package stirling.software.SPDF.controller.api.converters;
import java.nio.charset.StandardCharsets;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.SPDF.exception.CacheUnavailableException;
@ControllerAdvice(assignableTypes = ConvertPdfJsonController.class)
@Slf4j
@RequiredArgsConstructor
public class ConvertPdfJsonExceptionHandler {
private final ObjectMapper objectMapper;
@ExceptionHandler(CacheUnavailableException.class)
@ResponseBody
public ResponseEntity<byte[]> handleCacheUnavailable(CacheUnavailableException ex) {
try {
byte[] body =
objectMapper.writeValueAsBytes(
java.util.Map.of(
"error", "cache_unavailable",
"action", "reupload",
"message", ex.getMessage()));
return ResponseEntity.status(HttpStatus.GONE)
.contentType(MediaType.APPLICATION_JSON)
.body(body);
} catch (Exception e) {
log.warn("Failed to serialize cache_unavailable response", e);
var fallbackBody =
java.util.Map.of(
"error", "cache_unavailable",
"action", "reupload",
"message", String.valueOf(ex.getMessage()));
try {
return ResponseEntity.status(HttpStatus.GONE)
.contentType(MediaType.APPLICATION_JSON)
.body(objectMapper.writeValueAsBytes(fallbackBody));
} catch (Exception ignored) {
// Truly last-ditch fallback
return ResponseEntity.status(HttpStatus.GONE)
.contentType(MediaType.APPLICATION_JSON)
.body(
"{\"error\":\"cache_unavailable\",\"action\":\"reupload\",\"message\":\"Cache unavailable\"}"
.getBytes(StandardCharsets.UTF_8));
}
}
}
}

View File

@ -28,10 +28,13 @@ import org.apache.pdfbox.pdmodel.PDResources;
import org.apache.pdfbox.pdmodel.graphics.PDXObject;
import org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.server.ResponseStatusException;
import io.swagger.v3.oas.annotations.Operation;
@ -44,6 +47,7 @@ import stirling.software.SPDF.model.api.misc.OptimizePdfRequest;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.annotations.api.MiscApi;
import stirling.software.common.service.CustomPDFDocumentFactory;
import stirling.software.common.service.LineArtConversionService;
import stirling.software.common.util.ExceptionUtils;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.ProcessExecutor;
@ -58,6 +62,9 @@ public class CompressController {
private final CustomPDFDocumentFactory pdfDocumentFactory;
private final EndpointConfiguration endpointConfiguration;
@Autowired(required = false)
private LineArtConversionService lineArtConversionService;
private boolean isQpdfEnabled() {
return endpointConfiguration.isGroupEnabled("qpdf");
}
@ -66,6 +73,10 @@ public class CompressController {
return endpointConfiguration.isGroupEnabled("Ghostscript");
}
private boolean isImageMagickEnabled() {
return endpointConfiguration.isGroupEnabled("ImageMagick");
}
@Data
@AllArgsConstructor
@NoArgsConstructor
@ -660,6 +671,9 @@ public class CompressController {
Integer optimizeLevel = request.getOptimizeLevel();
String expectedOutputSizeString = request.getExpectedOutputSize();
Boolean convertToGrayscale = request.getGrayscale();
Boolean convertToLineArt = request.getLineArt();
Double lineArtThreshold = request.getLineArtThreshold();
Integer lineArtEdgeLevel = request.getLineArtEdgeLevel();
if (expectedOutputSizeString == null && optimizeLevel == null) {
throw new Exception("Both expected output size and optimize level are not specified");
}
@ -689,6 +703,26 @@ public class CompressController {
optimizeLevel = determineOptimizeLevel(sizeReductionRatio);
}
if (Boolean.TRUE.equals(convertToLineArt)) {
if (lineArtConversionService == null) {
throw new ResponseStatusException(
HttpStatus.FORBIDDEN,
"Line art conversion is unavailable - ImageMagick service not found");
}
if (!isImageMagickEnabled()) {
throw new IOException(
"ImageMagick is not enabled but line art conversion was requested");
}
double thresholdValue =
lineArtThreshold == null
? 55d
: Math.min(100d, Math.max(0d, lineArtThreshold));
int edgeLevel =
lineArtEdgeLevel == null ? 1 : Math.min(3, Math.max(1, lineArtEdgeLevel));
currentFile =
applyLineArtConversion(currentFile, tempFiles, thresholdValue, edgeLevel);
}
boolean sizeMet = false;
boolean imageCompressionApplied = false;
boolean externalCompressionApplied = false;
@ -810,6 +844,75 @@ public class CompressController {
}
}
private Path applyLineArtConversion(
Path currentFile, List<Path> tempFiles, double threshold, int edgeLevel)
throws IOException {
Path lineArtFile = Files.createTempFile("lineart_output_", ".pdf");
tempFiles.add(lineArtFile);
try (PDDocument doc = pdfDocumentFactory.load(currentFile.toFile())) {
Map<String, List<ImageReference>> uniqueImages = findImages(doc);
CompressionStats stats = new CompressionStats();
stats.uniqueImagesCount = uniqueImages.size();
calculateImageStats(uniqueImages, stats);
Map<String, PDImageXObject> convertedImages =
createLineArtImages(doc, uniqueImages, stats, threshold, edgeLevel);
replaceImages(doc, uniqueImages, convertedImages, stats);
log.info(
"Applied line art conversion to {} unique images ({} total references)",
stats.uniqueImagesCount,
stats.totalImages);
doc.save(lineArtFile.toString());
return lineArtFile;
}
}
private Map<String, PDImageXObject> createLineArtImages(
PDDocument doc,
Map<String, List<ImageReference>> uniqueImages,
CompressionStats stats,
double threshold,
int edgeLevel)
throws IOException {
Map<String, PDImageXObject> convertedImages = new HashMap<>();
for (Entry<String, List<ImageReference>> entry : uniqueImages.entrySet()) {
String imageHash = entry.getKey();
List<ImageReference> references = entry.getValue();
if (references.isEmpty()) continue;
PDImageXObject originalImage = getOriginalImage(doc, references.get(0));
int originalSize = (int) originalImage.getCOSObject().getLength();
stats.totalOriginalBytes += originalSize;
PDImageXObject converted =
lineArtConversionService.convertImageToLineArt(
doc, originalImage, threshold, edgeLevel);
convertedImages.put(imageHash, converted);
stats.compressedImages++;
int convertedSize = (int) converted.getCOSObject().getLength();
stats.totalCompressedBytes += convertedSize * references.size();
double reductionPercentage = 100.0 - ((convertedSize * 100.0) / originalSize);
log.info(
"Image hash {}: Line art conversion {} → {} (reduced by {}%)",
imageHash,
GeneralUtils.formatBytes(originalSize),
GeneralUtils.formatBytes(convertedSize),
String.format("%.1f", reductionPercentage));
}
return convertedImages;
}
// Run Ghostscript compression
private void applyGhostscriptCompression(
OptimizePdfRequest request, int optimizeLevel, Path currentFile, List<Path> tempFiles)

View File

@ -74,6 +74,7 @@ public class ConfigController {
configData.put("appNameNavbar", applicationProperties.getUi().getAppNameNavbar());
configData.put("languages", applicationProperties.getUi().getLanguages());
configData.put("logoStyle", applicationProperties.getUi().getLogoStyle());
configData.put("defaultLocale", applicationProperties.getSystem().getDefaultLocale());
// Security settings
// enableLogin requires both the config flag AND proprietary features to be loaded
@ -123,6 +124,9 @@ public class ConfigController {
"enableAnalytics", applicationProperties.getSystem().getEnableAnalytics());
configData.put("enablePosthog", applicationProperties.getSystem().getEnablePosthog());
configData.put("enableScarf", applicationProperties.getSystem().getEnableScarf());
configData.put(
"enableDesktopInstallSlide",
applicationProperties.getSystem().getEnableDesktopInstallSlide());
// Premium/Enterprise settings
configData.put("premiumEnabled", applicationProperties.getPremium().isEnabled());
@ -226,4 +230,10 @@ public class ConfigController {
}
return ResponseEntity.ok(result);
}
@GetMapping("/group-enabled")
public ResponseEntity<Boolean> isGroupEnabled(@RequestParam(name = "group") String group) {
boolean enabled = endpointConfiguration.isGroupEnabled(group);
return ResponseEntity.ok(enabled);
}
}

View File

@ -191,6 +191,12 @@ public class CertSignController {
switch (certType) {
case "PEM":
privateKeyFile =
validateFilePresent(
privateKeyFile, "PEM private key", "private key file is required");
certFile =
validateFilePresent(
certFile, "PEM certificate", "certificate file is required");
ks = KeyStore.getInstance("JKS");
ks.load(null);
PrivateKey privateKey = getPrivateKeyFromPEM(privateKeyFile.getBytes(), password);
@ -200,10 +206,16 @@ public class CertSignController {
break;
case "PKCS12":
case "PFX":
p12File =
validateFilePresent(
p12File, "PKCS12 keystore", "PKCS12/PFX keystore file is required");
ks = KeyStore.getInstance("PKCS12");
ks.load(p12File.getInputStream(), password.toCharArray());
break;
case "JKS":
jksfile =
validateFilePresent(
jksfile, "JKS keystore", "JKS keystore file is required");
ks = KeyStore.getInstance("JKS");
ks.load(jksfile.getInputStream(), password.toCharArray());
break;
@ -251,6 +263,17 @@ public class CertSignController {
GeneralUtils.generateFilename(pdf.getOriginalFilename(), "_signed.pdf"));
}
private MultipartFile validateFilePresent(
MultipartFile file, String argumentName, String errorDescription) {
if (file == null || file.isEmpty()) {
throw ExceptionUtils.createIllegalArgumentException(
"error.invalidArgument",
"Invalid argument: {0}",
argumentName + " - " + errorDescription);
}
return file;
}
private PrivateKey getPrivateKeyFromPEM(byte[] pemBytes, String password)
throws IOException, OperatorCreationException, PKCSException {
try (PEMParser pemParser =

View File

@ -11,6 +11,7 @@ import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import jakarta.annotation.PostConstruct;
import jakarta.servlet.http.HttpServletRequest;
@Controller
@ -19,10 +20,25 @@ public class ReactRoutingController {
@Value("${server.servlet.context-path:/}")
private String contextPath;
@GetMapping(
value = {"/", "/index.html"},
produces = MediaType.TEXT_HTML_VALUE)
public ResponseEntity<String> serveIndexHtml(HttpServletRequest request) throws IOException {
private String cachedIndexHtml;
private boolean indexHtmlExists = false;
@PostConstruct
public void init() {
// Only cache if index.html exists (production builds)
ClassPathResource resource = new ClassPathResource("static/index.html");
if (resource.exists()) {
try {
this.cachedIndexHtml = processIndexHtml();
this.indexHtmlExists = true;
} catch (IOException e) {
// Failed to cache, will process on each request
this.indexHtmlExists = false;
}
}
}
private String processIndexHtml() throws IOException {
ClassPathResource resource = new ClassPathResource("static/index.html");
try (InputStream inputStream = resource.getInputStream()) {
@ -42,18 +58,29 @@ public class ReactRoutingController {
"<script>window.STIRLING_PDF_API_BASE_URL = '" + baseUrl + "';</script>";
html = html.replace("</head>", contextPathScript + "</head>");
return ResponseEntity.ok().contentType(MediaType.TEXT_HTML).body(html);
return html;
}
}
@GetMapping(
"/{path:^(?!api|static|robots\\.txt|favicon\\.ico|manifest.*\\.json|pipeline|pdfjs|pdfjs-legacy|fonts|images|files|css|js|assets|locales|modern-logo|classic-logo|Login|og_images|samples)[^\\.]*$}")
value = {"/", "/index.html"},
produces = MediaType.TEXT_HTML_VALUE)
public ResponseEntity<String> serveIndexHtml(HttpServletRequest request) throws IOException {
if (indexHtmlExists && cachedIndexHtml != null) {
return ResponseEntity.ok().contentType(MediaType.TEXT_HTML).body(cachedIndexHtml);
}
// Fallback: process on each request (dev mode or cache failed)
return ResponseEntity.ok().contentType(MediaType.TEXT_HTML).body(processIndexHtml());
}
@GetMapping(
"/{path:^(?!api|static|robots\\.txt|favicon\\.ico|manifest.*\\.json|pipeline|pdfjs|pdfjs-legacy|pdfium|fonts|images|files|css|js|assets|locales|modern-logo|classic-logo|Login|og_images|samples)[^\\.]*$}")
public ResponseEntity<String> forwardRootPaths(HttpServletRequest request) throws IOException {
return serveIndexHtml(request);
}
@GetMapping(
"/{path:^(?!api|static|pipeline|pdfjs|pdfjs-legacy|fonts|images|files|css|js|assets|locales|modern-logo|classic-logo|Login|og_images|samples)[^\\.]*}/{subpath:^(?!.*\\.).*$}")
"/{path:^(?!api|static|pipeline|pdfjs|pdfjs-legacy|pdfium|fonts|images|files|css|js|assets|locales|modern-logo|classic-logo|Login|og_images|samples)[^\\.]*}/{subpath:^(?!.*\\.).*$}")
public ResponseEntity<String> forwardNestedPaths(HttpServletRequest request)
throws IOException {
return serveIndexHtml(request);

View File

@ -0,0 +1,8 @@
package stirling.software.SPDF.exception;
public class CacheUnavailableException extends RuntimeException {
public CacheUnavailableException(String message) {
super(message);
}
}

View File

@ -45,4 +45,26 @@ public class OptimizePdfRequest extends PDFFile {
requiredMode = Schema.RequiredMode.REQUIRED,
defaultValue = "false")
private Boolean grayscale = false;
@Schema(
description =
"Whether to convert images to high-contrast line art using ImageMagick. Default is false.",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
defaultValue = "false")
private Boolean lineArt = false;
@Schema(
description = "Threshold to use for line art conversion (0-100).",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
defaultValue = "55")
private Double lineArtThreshold = 55d;
@Schema(
description =
"Edge detection strength to use for line art conversion (1-3). This maps to"
+ " ImageMagick's -edge radius.",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
defaultValue = "1",
allowableValues = {"1", "2", "3"})
private Integer lineArtEdgeLevel = 1;
}

View File

@ -86,7 +86,6 @@ import org.apache.pdfbox.text.PDFTextStripper;
import org.apache.pdfbox.text.TextPosition;
import org.apache.pdfbox.util.DateConverter;
import org.apache.pdfbox.util.Matrix;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
@ -144,15 +143,23 @@ public class PdfJsonConversionService {
private final PdfJsonFontService fontService;
private final Type3FontConversionService type3FontConversionService;
private final Type3GlyphExtractor type3GlyphExtractor;
private final stirling.software.common.model.ApplicationProperties applicationProperties;
private final Map<String, PDFont> type3NormalizedFontCache = new ConcurrentHashMap<>();
private final Map<String, Set<Integer>> type3GlyphCoverageCache = new ConcurrentHashMap<>();
@Value("${stirling.pdf.json.font-normalization.enabled:true}")
private boolean fontNormalizationEnabled;
private long cacheMaxBytes;
private int cacheMaxPercent;
/** Cache for storing PDDocuments for lazy page loading. Key is jobId. */
private final Map<String, CachedPdfDocument> documentCache = new ConcurrentHashMap<>();
private final java.util.LinkedHashMap<String, CachedPdfDocument> lruCache =
new java.util.LinkedHashMap<>(16, 0.75f, true);
private final Object cacheLock = new Object();
private volatile long currentCacheBytes = 0L;
private volatile long cacheBudgetBytes = -1L;
private volatile boolean ghostscriptAvailable;
private static final float FLOAT_EPSILON = 0.0001f;
@ -161,7 +168,23 @@ public class PdfJsonConversionService {
@PostConstruct
private void initializeToolAvailability() {
loadConfigurationFromProperties();
initializeGhostscriptAvailability();
initializeCacheBudget();
}
private void loadConfigurationFromProperties() {
stirling.software.common.model.ApplicationProperties.PdfEditor cfg =
applicationProperties.getPdfEditor();
if (cfg != null) {
fontNormalizationEnabled = cfg.getFontNormalization().isEnabled();
cacheMaxBytes = cfg.getCache().getMaxBytes();
cacheMaxPercent = cfg.getCache().getMaxPercent();
} else {
fontNormalizationEnabled = false;
cacheMaxBytes = -1;
cacheMaxPercent = 20;
}
}
private void initializeGhostscriptAvailability() {
@ -202,6 +225,25 @@ public class PdfJsonConversionService {
}
}
private void initializeCacheBudget() {
long effective = -1L;
if (cacheMaxBytes > 0) {
effective = cacheMaxBytes;
} else if (cacheMaxPercent > 0) {
long maxMem = Runtime.getRuntime().maxMemory();
effective = Math.max(0L, (maxMem * cacheMaxPercent) / 100);
}
cacheBudgetBytes = effective;
if (cacheBudgetBytes > 0) {
log.info(
"PDF JSON cache budget configured: {} bytes (source: {})",
cacheBudgetBytes,
cacheMaxBytes > 0 ? "max-bytes" : "max-percent");
} else {
log.info("PDF JSON cache budget: unlimited");
}
}
public byte[] convertPdfToJson(MultipartFile file) throws IOException {
return convertPdfToJson(file, null, false);
}
@ -236,7 +278,10 @@ public class PdfJsonConversionService {
log.debug("Generated synthetic jobId for synchronous conversion: {}", jobId);
} else {
jobId = contextJobId;
log.debug("Starting PDF to JSON conversion, jobId from context: {}", jobId);
log.info(
"Starting PDF to JSON conversion, jobId from context: {} (lightweight={})",
jobId,
lightweight);
}
Consumer<PdfJsonConversionProgress> progress =
@ -318,9 +363,9 @@ public class PdfJsonConversionService {
try (PDDocument document = pdfDocumentFactory.load(workingPath, true)) {
int totalPages = document.getNumberOfPages();
// Only use lazy images for real async jobs where client can access the cache
// Synchronous calls with synthetic jobId should do full extraction
boolean useLazyImages = totalPages > 5 && isRealJobId;
// Always enable lazy mode for real async jobs so cache is available regardless of
// page count. Synchronous calls with synthetic jobId still do full extraction.
boolean useLazyImages = isRealJobId;
Map<COSBase, FontModelCacheEntry> fontCache = new IdentityHashMap<>();
Map<COSBase, EncodedImage> imageCache = new IdentityHashMap<>();
log.debug(
@ -403,6 +448,11 @@ public class PdfJsonConversionService {
// Only cache for real async jobIds, not synthetic synchronous ones
if (useLazyImages && isRealJobId) {
log.info(
"Creating cache for jobId: {} (useLazyImages={}, isRealJobId={})",
jobId,
useLazyImages,
isRealJobId);
PdfJsonDocumentMetadata docMetadata = new PdfJsonDocumentMetadata();
docMetadata.setMetadata(pdfJson.getMetadata());
docMetadata.setXmpMetadata(pdfJson.getXmpMetadata());
@ -435,16 +485,23 @@ public class PdfJsonConversionService {
cachedPdfBytes = Files.readAllBytes(workingPath);
}
CachedPdfDocument cached =
new CachedPdfDocument(
cachedPdfBytes, docMetadata, fonts, pageFontResources);
documentCache.put(jobId, cached);
log.debug(
"Cached PDF bytes ({} bytes, {} pages, {} fonts) for lazy images, jobId: {}",
cachedPdfBytes.length,
buildCachedDocument(
jobId, cachedPdfBytes, docMetadata, fonts, pageFontResources);
putCachedDocument(jobId, cached);
log.info(
"Successfully cached PDF ({} bytes, {} pages, {} fonts) for jobId: {} (diskBacked={})",
cached.getPdfSize(),
totalPages,
fonts.size(),
jobId);
jobId,
cached.isDiskBacked());
scheduleDocumentCleanup(jobId);
} else {
log.warn(
"Skipping cache creation: useLazyImages={}, isRealJobId={}, jobId={}",
useLazyImages,
isRealJobId,
jobId);
}
if (lightweight) {
@ -2973,6 +3030,139 @@ public class PdfJsonConversionService {
}
}
// Cache helpers
private CachedPdfDocument buildCachedDocument(
String jobId,
byte[] pdfBytes,
PdfJsonDocumentMetadata metadata,
Map<String, PdfJsonFont> fonts,
Map<Integer, Map<PDFont, String>> pageFontResources)
throws IOException {
if (pdfBytes == null) {
throw new IllegalArgumentException("pdfBytes must not be null");
}
long budget = cacheBudgetBytes;
// If single document is larger than budget, spill straight to disk
if (budget > 0 && pdfBytes.length > budget) {
TempFile tempFile = new TempFile(tempFileManager, ".pdfjsoncache");
Files.write(tempFile.getPath(), pdfBytes);
log.debug(
"Cached PDF spilled to disk ({} bytes exceeds budget {}) for jobId {}",
pdfBytes.length,
budget,
jobId);
return new CachedPdfDocument(
null, tempFile, pdfBytes.length, metadata, fonts, pageFontResources);
}
return new CachedPdfDocument(
pdfBytes, null, pdfBytes.length, metadata, fonts, pageFontResources);
}
private void putCachedDocument(String jobId, CachedPdfDocument cached) {
synchronized (cacheLock) {
CachedPdfDocument existing = documentCache.put(jobId, cached);
if (existing != null) {
lruCache.remove(jobId);
currentCacheBytes = Math.max(0L, currentCacheBytes - existing.getInMemorySize());
existing.close();
}
lruCache.put(jobId, cached);
currentCacheBytes += cached.getInMemorySize();
enforceCacheBudget();
}
}
private CachedPdfDocument getCachedDocument(String jobId) {
synchronized (cacheLock) {
CachedPdfDocument cached = documentCache.get(jobId);
if (cached != null) {
lruCache.remove(jobId);
lruCache.put(jobId, cached);
}
return cached;
}
}
private void enforceCacheBudget() {
if (cacheBudgetBytes <= 0) {
return;
}
// Must be called under cacheLock
java.util.Iterator<java.util.Map.Entry<String, CachedPdfDocument>> it =
lruCache.entrySet().iterator();
while (currentCacheBytes > cacheBudgetBytes && it.hasNext()) {
java.util.Map.Entry<String, CachedPdfDocument> entry = it.next();
it.remove();
CachedPdfDocument removed = entry.getValue();
documentCache.remove(entry.getKey(), removed);
currentCacheBytes = Math.max(0L, currentCacheBytes - removed.getInMemorySize());
removed.close();
log.warn(
"Evicted cached PDF for jobId {} to enforce cache budget (budget={} bytes, current={} bytes)",
entry.getKey(),
cacheBudgetBytes,
currentCacheBytes);
}
if (currentCacheBytes > cacheBudgetBytes && !lruCache.isEmpty()) {
// Spill the most recently used large entry to disk
String key =
lruCache.entrySet().stream()
.reduce((first, second) -> second)
.map(java.util.Map.Entry::getKey)
.orElse(null);
if (key != null) {
CachedPdfDocument doc = lruCache.get(key);
if (doc != null && doc.getInMemorySize() > 0) {
try {
CachedPdfDocument diskDoc =
buildCachedDocument(
key,
doc.getPdfBytes(),
doc.getMetadata(),
doc.getFonts(),
doc.getPageFontResources());
lruCache.put(key, diskDoc);
documentCache.put(key, diskDoc);
currentCacheBytes =
Math.max(0L, currentCacheBytes - doc.getInMemorySize())
+ diskDoc.getInMemorySize();
doc.close();
log.debug("Spilled cached PDF for jobId {} to disk to satisfy budget", key);
} catch (IOException ex) {
log.warn(
"Failed to spill cached PDF for jobId {} to disk: {}",
key,
ex.getMessage());
}
}
}
}
}
private void removeCachedDocument(String jobId) {
log.warn(
"removeCachedDocument called for jobId: {} [CALLER: {}]",
jobId,
Thread.currentThread().getStackTrace()[2].toString());
CachedPdfDocument removed = null;
synchronized (cacheLock) {
removed = documentCache.remove(jobId);
if (removed != null) {
lruCache.remove(jobId);
currentCacheBytes = Math.max(0L, currentCacheBytes - removed.getInMemorySize());
log.warn(
"Removed cached document for jobId: {} (size={} bytes)",
jobId,
removed.getInMemorySize());
} else {
log.warn("Attempted to remove jobId: {} but it was not in cache", jobId);
}
}
if (removed != null) {
removed.close();
}
}
private void applyTextState(PDPageContentStream contentStream, PdfJsonTextElement element)
throws IOException {
if (element.getCharacterSpacing() != null) {
@ -5311,6 +5501,8 @@ public class PdfJsonConversionService {
*/
private static class CachedPdfDocument {
private final byte[] pdfBytes;
private final TempFile pdfTempFile;
private final long pdfSize;
private final PdfJsonDocumentMetadata metadata;
private final Map<String, PdfJsonFont> fonts; // Font map with UIDs for consistency
private final Map<Integer, Map<PDFont, String>> pageFontResources; // Page font resources
@ -5318,10 +5510,14 @@ public class PdfJsonConversionService {
public CachedPdfDocument(
byte[] pdfBytes,
TempFile pdfTempFile,
long pdfSize,
PdfJsonDocumentMetadata metadata,
Map<String, PdfJsonFont> fonts,
Map<Integer, Map<PDFont, String>> pageFontResources) {
this.pdfBytes = pdfBytes;
this.pdfTempFile = pdfTempFile;
this.pdfSize = pdfSize;
this.metadata = metadata;
// Create defensive copies to prevent mutation of shared maps
this.fonts =
@ -5336,8 +5532,14 @@ public class PdfJsonConversionService {
}
// Getters return defensive copies to prevent external mutation
public byte[] getPdfBytes() {
return pdfBytes;
public byte[] getPdfBytes() throws IOException {
if (pdfBytes != null) {
return pdfBytes;
}
if (pdfTempFile != null) {
return Files.readAllBytes(pdfTempFile.getPath());
}
throw new IOException("Cached PDF backing missing");
}
public PdfJsonDocumentMetadata getMetadata() {
@ -5352,6 +5554,18 @@ public class PdfJsonConversionService {
return new java.util.concurrent.ConcurrentHashMap<>(pageFontResources);
}
public long getPdfSize() {
return pdfSize;
}
public long getInMemorySize() {
return pdfBytes != null ? pdfBytes.length : 0L;
}
public boolean isDiskBacked() {
return pdfBytes == null && pdfTempFile != null;
}
public long getTimestamp() {
return timestamp;
}
@ -5363,7 +5577,19 @@ public class PdfJsonConversionService {
public CachedPdfDocument withUpdatedFonts(
byte[] nextBytes, Map<String, PdfJsonFont> nextFonts) {
Map<String, PdfJsonFont> fontsToUse = nextFonts != null ? nextFonts : this.fonts;
return new CachedPdfDocument(nextBytes, metadata, fontsToUse, pageFontResources);
return new CachedPdfDocument(
nextBytes,
null,
nextBytes != null ? nextBytes.length : 0,
metadata,
fontsToUse,
pageFontResources);
}
public void close() {
if (pdfTempFile != null) {
pdfTempFile.close();
}
}
}
@ -5444,14 +5670,15 @@ public class PdfJsonConversionService {
// Cache PDF bytes, metadata, and fonts for lazy page loading
if (jobId != null) {
CachedPdfDocument cached =
new CachedPdfDocument(pdfBytes, docMetadata, fonts, pageFontResources);
documentCache.put(jobId, cached);
buildCachedDocument(jobId, pdfBytes, docMetadata, fonts, pageFontResources);
putCachedDocument(jobId, cached);
log.debug(
"Cached PDF bytes ({} bytes, {} pages, {} fonts) for lazy loading, jobId: {}",
pdfBytes.length,
"Cached PDF bytes ({} bytes, {} pages, {} fonts) for lazy loading, jobId: {} (diskBacked={})",
cached.getPdfSize(),
totalPages,
fonts.size(),
jobId);
jobId,
cached.isDiskBacked());
// Schedule cleanup after 30 minutes
scheduleDocumentCleanup(jobId);
@ -5466,9 +5693,10 @@ public class PdfJsonConversionService {
/** Extracts a single page from cached PDF bytes. Re-loads the PDF for each request. */
public byte[] extractSinglePage(String jobId, int pageNumber) throws IOException {
CachedPdfDocument cached = documentCache.get(jobId);
CachedPdfDocument cached = getCachedDocument(jobId);
if (cached == null) {
throw new IllegalArgumentException("No cached document found for jobId: " + jobId);
throw new stirling.software.SPDF.exception.CacheUnavailableException(
"No cached document found for jobId: " + jobId);
}
int pageIndex = pageNumber - 1;
@ -5480,8 +5708,8 @@ public class PdfJsonConversionService {
}
log.debug(
"Loading PDF from bytes ({} bytes) to extract page {} (jobId: {})",
cached.getPdfBytes().length,
"Loading PDF from {} to extract page {} (jobId: {})",
cached.isDiskBacked() ? "disk cache" : "memory cache",
pageNumber,
jobId);
@ -5627,10 +5855,21 @@ public class PdfJsonConversionService {
if (jobId == null || jobId.isBlank()) {
throw new IllegalArgumentException("jobId is required for incremental export");
}
CachedPdfDocument cached = documentCache.get(jobId);
log.info("Looking up cache for jobId: {}", jobId);
CachedPdfDocument cached = getCachedDocument(jobId);
if (cached == null) {
throw new IllegalArgumentException("No cached document available for jobId: " + jobId);
log.error(
"Cache not found for jobId: {}. Available cache keys: {}",
jobId,
documentCache.keySet());
throw new stirling.software.SPDF.exception.CacheUnavailableException(
"No cached document available for jobId: " + jobId);
}
log.info(
"Found cached document for jobId: {} (size={}, diskBacked={})",
jobId,
cached.getPdfSize(),
cached.isDiskBacked());
if (updates == null || updates.getPages() == null || updates.getPages().isEmpty()) {
log.debug(
"Incremental export requested with no page updates; returning cached PDF for jobId {}",
@ -5709,7 +5948,14 @@ public class PdfJsonConversionService {
document.save(baos);
byte[] updatedBytes = baos.toByteArray();
documentCache.put(jobId, cached.withUpdatedFonts(updatedBytes, mergedFonts));
CachedPdfDocument updated =
buildCachedDocument(
jobId,
updatedBytes,
cached.getMetadata(),
mergedFonts,
cached.getPageFontResources());
putCachedDocument(jobId, updated);
// Clear Type3 cache entries for this incremental update
clearType3CacheEntriesForJob(updateJobId);
@ -5724,11 +5970,13 @@ public class PdfJsonConversionService {
/** Clears a cached document. */
public void clearCachedDocument(String jobId) {
CachedPdfDocument cached = documentCache.remove(jobId);
CachedPdfDocument cached = getCachedDocument(jobId);
removeCachedDocument(jobId);
if (cached != null) {
log.debug(
"Removed cached PDF bytes ({} bytes) for jobId: {}",
cached.getPdfBytes().length,
"Removed cached PDF ({} bytes, diskBacked={}) for jobId: {}",
cached.getPdfSize(),
cached.isDiskBacked(),
jobId);
}

View File

@ -33,8 +33,12 @@ public class PdfJsonFallbackFontService {
public static final String FALLBACK_FONT_CJK_ID = "fallback-noto-cjk";
public static final String FALLBACK_FONT_JP_ID = "fallback-noto-jp";
public static final String FALLBACK_FONT_KR_ID = "fallback-noto-korean";
public static final String FALLBACK_FONT_TC_ID = "fallback-noto-tc";
public static final String FALLBACK_FONT_AR_ID = "fallback-noto-arabic";
public static final String FALLBACK_FONT_TH_ID = "fallback-noto-thai";
public static final String FALLBACK_FONT_DEVANAGARI_ID = "fallback-noto-devanagari";
public static final String FALLBACK_FONT_MALAYALAM_ID = "fallback-noto-malayalam";
public static final String FALLBACK_FONT_TIBETAN_ID = "fallback-noto-tibetan";
// Font name aliases map PDF font names to available fallback fonts
// This provides better visual consistency when editing PDFs
@ -59,6 +63,22 @@ public class PdfJsonFallbackFontService {
Map.entry("dejavuserif", "fallback-dejavu-serif"),
Map.entry("dejavumono", "fallback-dejavu-mono"),
Map.entry("dejavusansmono", "fallback-dejavu-mono"),
// Traditional Chinese fonts (Taiwan, Hong Kong, Macau)
Map.entry("mingliu", "fallback-noto-tc"),
Map.entry("pmingliu", "fallback-noto-tc"),
Map.entry("microsoftjhenghei", "fallback-noto-tc"),
Map.entry("jhenghei", "fallback-noto-tc"),
Map.entry("kaiti", "fallback-noto-tc"),
Map.entry("kaiu", "fallback-noto-tc"),
Map.entry("dfkaib5", "fallback-noto-tc"),
Map.entry("dfkai", "fallback-noto-tc"),
// Simplified Chinese fonts (Mainland China) - more common
Map.entry("simsun", "fallback-noto-cjk"),
Map.entry("simhei", "fallback-noto-cjk"),
Map.entry("microsoftyahei", "fallback-noto-cjk"),
Map.entry("yahei", "fallback-noto-cjk"),
Map.entry("songti", "fallback-noto-cjk"),
Map.entry("heiti", "fallback-noto-cjk"),
// Noto Sans - Google's universal font (use as last resort generic fallback)
Map.entry("noto", "fallback-noto-sans"),
Map.entry("notosans", "fallback-noto-sans"));
@ -83,6 +103,12 @@ public class PdfJsonFallbackFontService {
"classpath:/static/fonts/NotoSansKR-Regular.ttf",
"NotoSansKR-Regular",
"ttf")),
Map.entry(
FALLBACK_FONT_TC_ID,
new FallbackFontSpec(
"classpath:/static/fonts/NotoSansTC-Regular.ttf",
"NotoSansTC-Regular",
"ttf")),
Map.entry(
FALLBACK_FONT_AR_ID,
new FallbackFontSpec(
@ -95,6 +121,24 @@ public class PdfJsonFallbackFontService {
"classpath:/static/fonts/NotoSansThai-Regular.ttf",
"NotoSansThai-Regular",
"ttf")),
Map.entry(
FALLBACK_FONT_DEVANAGARI_ID,
new FallbackFontSpec(
"classpath:/static/fonts/NotoSansDevanagari-Regular.ttf",
"NotoSansDevanagari-Regular",
"ttf")),
Map.entry(
FALLBACK_FONT_MALAYALAM_ID,
new FallbackFontSpec(
"classpath:/static/fonts/NotoSansMalayalam-Regular.ttf",
"NotoSansMalayalam-Regular",
"ttf")),
Map.entry(
FALLBACK_FONT_TIBETAN_ID,
new FallbackFontSpec(
"classpath:/static/fonts/NotoSerifTibetan-Regular.ttf",
"NotoSerifTibetan-Regular",
"ttf")),
// Liberation Sans family
Map.entry(
"fallback-liberation-sans",
@ -268,12 +312,29 @@ public class PdfJsonFallbackFontService {
"ttf")));
private final ResourceLoader resourceLoader;
private final stirling.software.common.model.ApplicationProperties applicationProperties;
@Value("${stirling.pdf.fallback-font:" + DEFAULT_FALLBACK_FONT_LOCATION + "}")
private String legacyFallbackFontLocation;
private String fallbackFontLocation;
private final Map<String, byte[]> fallbackFontCache = new ConcurrentHashMap<>();
@jakarta.annotation.PostConstruct
private void loadConfig() {
String configured = null;
if (applicationProperties.getPdfEditor() != null) {
configured = applicationProperties.getPdfEditor().getFallbackFont();
}
if (configured != null && !configured.isBlank()) {
fallbackFontLocation = configured;
} else {
fallbackFontLocation = legacyFallbackFontLocation;
}
log.info("Using fallback font location: {}", fallbackFontLocation);
}
public PdfJsonFont buildFallbackFontModel() throws IOException {
return buildFallbackFontModel(FALLBACK_FONT_ID);
}
@ -484,6 +545,20 @@ public class PdfJsonFallbackFontService {
*/
public String resolveFallbackFontId(int codePoint) {
Character.UnicodeBlock block = Character.UnicodeBlock.of(codePoint);
// Bopomofo is primarily used in Taiwan for Traditional Chinese phonetic annotation
if (block == Character.UnicodeBlock.BOPOMOFO
|| block == Character.UnicodeBlock.BOPOMOFO_EXTENDED) {
return FALLBACK_FONT_TC_ID;
}
// Compatibility ideographs are primarily used by Traditional Chinese encodings (e.g., Big5,
// HKSCS) so prefer the Traditional Chinese fallback here.
if (block == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
|| block == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT) {
return FALLBACK_FONT_TC_ID;
}
if (block == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
|| block == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
|| block == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
@ -492,19 +567,23 @@ public class PdfJsonFallbackFontService {
|| block == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E
|| block == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_F
|| block == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION
|| block == Character.UnicodeBlock.BOPOMOFO
|| block == Character.UnicodeBlock.BOPOMOFO_EXTENDED
|| block == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS) {
return FALLBACK_FONT_CJK_ID;
}
Character.UnicodeScript script = Character.UnicodeScript.of(codePoint);
return switch (script) {
// HAN script is used by both Simplified and Traditional Chinese
// Default to Simplified (mainland China, 1.4B speakers) as it's more common
// Traditional Chinese PDFs are detected via font name aliases (MingLiU, PMingLiU, etc.)
case HAN -> FALLBACK_FONT_CJK_ID;
case HIRAGANA, KATAKANA -> FALLBACK_FONT_JP_ID;
case HANGUL -> FALLBACK_FONT_KR_ID;
case ARABIC -> FALLBACK_FONT_AR_ID;
case THAI -> FALLBACK_FONT_TH_ID;
case DEVANAGARI -> FALLBACK_FONT_DEVANAGARI_ID;
case MALAYALAM -> FALLBACK_FONT_MALAYALAM_ID;
case TIBETAN -> FALLBACK_FONT_TIBETAN_ID;
default -> FALLBACK_FONT_ID;
};
}

View File

@ -179,7 +179,7 @@ public class SharedSignatureService {
StandardOpenOption.TRUNCATE_EXISTING);
// Store reference to image file
response.setDataUrl("/api/v1/general/sign/" + imageFileName);
response.setDataUrl("/api/v1/general/signatures/" + imageFileName);
}
log.info("Saved signature {} for user {}", request.getId(), username);
@ -207,7 +207,7 @@ public class SharedSignatureService {
sig.setLabel(id); // Use ID as label
sig.setType("image"); // Default type
sig.setScope("personal");
sig.setDataUrl("/api/v1/general/sign/" + fileName);
sig.setDataUrl("/api/v1/general/signatures/" + fileName);
sig.setCreatedAt(
Files.getLastModifiedTime(path).toMillis());
sig.setUpdatedAt(
@ -238,7 +238,7 @@ public class SharedSignatureService {
sig.setLabel(id); // Use ID as label
sig.setType("image"); // Default type
sig.setScope("shared");
sig.setDataUrl("/api/v1/general/sign/" + fileName);
sig.setDataUrl("/api/v1/general/signatures/" + fileName);
sig.setCreatedAt(
Files.getLastModifiedTime(path).toMillis());
sig.setUpdatedAt(

View File

@ -5,7 +5,6 @@ import java.nio.file.Files;
import java.util.Base64;
import java.util.Locale;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
@ -25,22 +24,16 @@ import stirling.software.common.util.TempFileManager;
public class PdfJsonFontService {
private final TempFileManager tempFileManager;
private final stirling.software.common.model.ApplicationProperties applicationProperties;
@Getter
@Value("${stirling.pdf.json.cff-converter.enabled:true}")
private boolean cffConversionEnabled;
@Getter private boolean cffConversionEnabled;
@Getter
@Value("${stirling.pdf.json.cff-converter.method:python}")
private String cffConverterMethod;
@Getter private String cffConverterMethod;
@Value("${stirling.pdf.json.cff-converter.python-command:/opt/venv/bin/python3}")
private String pythonCommand;
@Value("${stirling.pdf.json.cff-converter.python-script:/scripts/convert_cff_to_ttf.py}")
private String pythonScript;
@Value("${stirling.pdf.json.cff-converter.fontforge-command:fontforge}")
private String fontforgeCommand;
private volatile boolean pythonCffConverterAvailable;
@ -48,6 +41,7 @@ public class PdfJsonFontService {
@PostConstruct
private void initialiseCffConverterAvailability() {
loadConfiguration();
if (!cffConversionEnabled) {
log.warn("[FONT-DEBUG] CFF conversion is DISABLED in configuration");
pythonCffConverterAvailable = false;
@ -77,6 +71,22 @@ public class PdfJsonFontService {
log.info("[FONT-DEBUG] Selected CFF converter method: {}", cffConverterMethod);
}
private void loadConfiguration() {
if (applicationProperties.getPdfEditor() != null
&& applicationProperties.getPdfEditor().getCffConverter() != null) {
var cfg = applicationProperties.getPdfEditor().getCffConverter();
this.cffConversionEnabled = cfg.isEnabled();
this.cffConverterMethod = cfg.getMethod();
this.pythonCommand = cfg.getPythonCommand();
this.pythonScript = cfg.getPythonScript();
this.fontforgeCommand = cfg.getFontforgeCommand();
} else {
// Use defaults when config is not available
this.cffConversionEnabled = false;
log.warn("[FONT-DEBUG] PdfEditor configuration not available, CFF conversion disabled");
}
}
public byte[] convertCffProgramToTrueType(byte[] fontBytes, String toUnicode) {
if (!cffConversionEnabled || fontBytes == null || fontBytes.length == 0) {
log.warn(

View File

@ -2,7 +2,6 @@ package stirling.software.SPDF.service.pdfjson.type3;
import java.io.IOException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
@ -23,8 +22,8 @@ import stirling.software.SPDF.service.pdfjson.type3.library.Type3FontLibraryPayl
public class Type3LibraryStrategy implements Type3ConversionStrategy {
private final Type3FontLibrary fontLibrary;
private final stirling.software.common.model.ApplicationProperties applicationProperties;
@Value("${stirling.pdf.json.type3.library.enabled:true}")
private boolean enabled;
@Override
@ -42,6 +41,19 @@ public class Type3LibraryStrategy implements Type3ConversionStrategy {
return enabled && fontLibrary != null && fontLibrary.isLoaded();
}
@jakarta.annotation.PostConstruct
private void loadConfiguration() {
if (applicationProperties.getPdfEditor() != null
&& applicationProperties.getPdfEditor().getType3() != null
&& applicationProperties.getPdfEditor().getType3().getLibrary() != null) {
var cfg = applicationProperties.getPdfEditor().getType3().getLibrary();
this.enabled = cfg.isEnabled();
} else {
this.enabled = false;
log.warn("PdfEditor Type3 library configuration not available, disabled");
}
}
@Override
public PdfJsonFontConversionCandidate convert(
Type3ConversionRequest request, Type3GlyphContext context) throws IOException {

View File

@ -14,7 +14,6 @@ import java.util.stream.Collectors;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.font.PDType3Font;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Component;
@ -34,8 +33,8 @@ public class Type3FontLibrary {
private final ObjectMapper objectMapper;
private final ResourceLoader resourceLoader;
private final stirling.software.common.model.ApplicationProperties applicationProperties;
@Value("${stirling.pdf.json.type3.library.index:classpath:/type3/library/index.json}")
private String indexLocation;
private final Map<String, Type3FontLibraryEntry> signatureIndex = new ConcurrentHashMap<>();
@ -44,6 +43,17 @@ public class Type3FontLibrary {
@jakarta.annotation.PostConstruct
void initialise() {
if (applicationProperties.getPdfEditor() != null
&& applicationProperties.getPdfEditor().getType3() != null
&& applicationProperties.getPdfEditor().getType3().getLibrary() != null) {
this.indexLocation =
applicationProperties.getPdfEditor().getType3().getLibrary().getIndex();
} else {
log.warn(
"[TYPE3] PdfEditor Type3 library configuration not available; Type3 library disabled");
entries = List.of();
return;
}
Resource resource = resourceLoader.getResource(indexLocation);
if (!resource.exists()) {
log.info("[TYPE3] Library index {} not found; Type3 library disabled", indexLocation);

View File

@ -12,7 +12,6 @@
security:
enableLogin: true # set to 'true' to enable login
csrfDisabled: false # set to 'true' to disable CSRF protection (not recommended for production)
loginAttemptCount: 5 # lock user account after 5 tries; when using e.g. Fail2Ban you can deactivate the function with -1
loginResetTimeMinutes: 120 # lock account for 2 hours after x attempts
loginMethod: all # Accepts values like 'all' and 'normal'(only Login with Username/Password), 'oauth2'(only Login with OAuth2) or 'saml2'(only Login with SAML2)
@ -59,6 +58,8 @@ security:
idpCert: classpath:okta.cert # The certificate your Provider will use to authenticate your app's SAML authentication requests. Provided by your Provider
privateKey: classpath:saml-private-key.key # Your private key. Generated from your keypair
spCert: classpath:saml-public-cert.crt # Your signing certificate. Generated from your keypair
# IMPORTANT: For SAML setup, download your SP metadata from the BACKEND URL: http://localhost:8080/saml2/service-provider-metadata/{registrationId}
# Do NOT use the frontend dev server URL (localhost:5173) as it will generate incorrect ACS URLs. Always use the backend URL (localhost:8080) for SAML configuration.
jwt: # This feature is currently under development and not yet fully supported. Do not use in production.
persistence: true # Set to 'true' to enable JWT key store
enableKeyRotation: true # Set to 'true' to enable key pair rotation
@ -105,6 +106,11 @@ mail:
username: '' # SMTP server username
password: '' # SMTP server password
from: '' # sender email address
startTlsEnable: true # enable STARTTLS (explicit TLS upgrade after connecting) when supported by the SMTP server
startTlsRequired: false # require STARTTLS; connection fails if the upgrade command is not supported
sslEnable: false # enable SSL/TLS wrapper for implicit TLS (typically used with port 465)
sslTrust: '' # optional trusted host override, e.g. "smtp.example.com" or "*"; defaults to "*" (trust all) when empty
sslCheckServerIdentity: false # enable hostname verification when using SSL/TLS
legal:
termsAndConditions: https://www.stirling.com/legal/terms-of-service # URL to the terms and conditions of your application (e.g. https://example.com/terms). Empty string to disable or filename to load from local file in static folder
@ -122,13 +128,15 @@ system:
customHTMLFiles: false # enable to have files placed in /customFiles/templates override the existing template HTML files
tessdataDir: /usr/share/tessdata # path to the directory containing the Tessdata files. This setting is relevant for Windows systems. For Windows users, this path should be adjusted to point to the appropriate directory where the Tessdata files are stored.
enableAnalytics: null # Master toggle for analytics: set to 'true' to enable all analytics, 'false' to disable all analytics, or leave as 'null' to prompt admin on first launch
enableDesktopInstallSlide: true # Set to 'false' to hide the desktop app installation slide in the onboarding flow
enablePosthog: null # Enable PostHog analytics (open-source product analytics): set to 'true' to enable, 'false' to disable, or 'null' to enable by default when analytics is enabled
enableScarf: null # Enable Scarf tracking pixel: set to 'true' to enable, 'false' to disable, or 'null' to enable by default when analytics is enabled
enableUrlToPDF: false # Set to 'true' to enable URL to PDF, INTERNAL ONLY, known security issues, should not be used externally
disableSanitize: false # set to true to disable Sanitize HTML; (can lead to injections in HTML)
maxDPI: 500 # Maximum allowed DPI for PDF to image conversion
corsAllowedOrigins: [] # List of allowed origins for CORS (e.g. ['http://localhost:5173', 'https://app.example.com']). Leave empty to disable CORS.
frontendUrl: '' # Base URL for frontend (e.g. 'https://pdf.example.com'). Used for generating invite links in emails. If empty, falls back to backend URL.
corsAllowedOrigins: [] # List of allowed origins for CORS (e.g. ['http://localhost:5173', 'https://app.example.com']). Leave empty to disable CORS. For local development with frontend on port 5173, add 'http://localhost:5173'
backendUrl: '' # Backend base URL for SAML/OAuth/API callbacks (e.g. 'http://localhost:8080' for dev, 'https://api.example.com' for production). REQUIRED for SSO authentication to work correctly. This is where your IdP will send SAML responses and OAuth callbacks. Leave empty to default to 'http://localhost:8080' in development.
frontendUrl: '' # Frontend URL for invite email links (e.g. 'https://app.example.com'). Optional - if not set, will use backendUrl. This is the URL users click in invite emails.
serverCertificate:
enabled: true # Enable server-side certificate for "Sign with Stirling-PDF" option
organizationName: Stirling-PDF # Organization name for generated certificates
@ -174,23 +182,6 @@ system:
databaseBackup:
cron: '0 0 0 * * ?' # Cron expression for automatic database backups "0 0 0 * * ?" daily at midnight
stirling:
pdf:
fallback-font: classpath:/static/fonts/NotoSans-Regular.ttf # Override to point at a custom fallback font
json:
font-normalization:
enabled: false # IMPORTANT: Disable to preserve ToUnicode CMaps for correct font rendering. Ghostscript strips Unicode mappings from CID fonts.
cff-converter:
enabled: true # Wrap CFF/Type1C fonts as OpenType-CFF for browser compatibility
method: python # Converter method: 'python' (fontTools, recommended - wraps as OTF), 'fontforge' (legacy - converts to TTF, may hang on CID fonts)
python-command: /opt/venv/bin/python3 # Python interpreter path
python-script: /scripts/convert_cff_to_ttf.py # Path to font wrapping script
fontforge-command: fontforge # Override if FontForge is installed under a different name/path
type3:
library:
enabled: true # Match common Type3 fonts against the built-in library of converted programs
index: classpath:/type3/library/index.json # Override to point at a custom index.json (supports http:, file:, classpath:)
ui:
appNameNavbar: '' # name displayed on the navigation bar
logoStyle: classic # Options: 'classic' (default - classic S icon) or 'modern' (minimalist logo)
@ -219,6 +210,7 @@ processExecutor:
weasyPrintSessionLimit: 16
installAppSessionLimit: 1
calibreSessionLimit: 1
imageMagickSessionLimit: 4
ghostscriptSessionLimit: 8
ocrMyPdfSessionLimit: 2
timeoutMinutes: # Process executor timeout in minutes
@ -228,7 +220,26 @@ processExecutor:
weasyPrinttimeoutMinutes: 30
installApptimeoutMinutes: 60
calibretimeoutMinutes: 30
imageMagickTimeoutMinutes: 30
tesseractTimeoutMinutes: 30
qpdfTimeoutMinutes: 30
ghostscriptTimeoutMinutes: 30
ocrMyPdfTimeoutMinutes: 30
pdfEditor:
fallback-font: classpath:/static/fonts/NotoSans-Regular.ttf # Override to point at a custom fallback font
cache:
max-bytes: -1 # Max in-memory cache size in bytes; -1 disables byte cap
max-percent: 20 # Max in-memory cache as % of JVM max; used when max-bytes <= 0
font-normalization:
enabled: false # IMPORTANT: Disable to preserve ToUnicode CMaps for correct font rendering. Ghostscript strips Unicode mappings from CID fonts.
cff-converter:
enabled: true # Wrap CFF/Type1CFF fonts as OpenType-CFF for browser compatibility
method: python # Converter method: 'python' (fontTools, recommended - wraps as OTF), 'fontforge' (legacy - converts to TTF, may hang on CID fonts)
python-command: /opt/venv/bin/python3 # Python interpreter path
python-script: /scripts/convert_cff_to_ttf.py # Path to font wrapping script
fontforge-command: fontforge # Override if FontForge is installed under a different name/path
type3:
library:
enabled: true # Match common Type3 fonts against the built-in library of converted programs
index: classpath:/type3/library/index.json # Override to point at a custom index.json (supports http:, file:, classpath:)

View File

@ -1,9 +1,10 @@
package stirling.software.SPDF.controller.api.security;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.lenient;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
@ -107,7 +108,8 @@ class CertSignControllerTest {
derCertBytes = baos.toByteArray();
}
when(pdfDocumentFactory.load(any(MultipartFile.class)))
lenient()
.when(pdfDocumentFactory.load(any(MultipartFile.class)))
.thenAnswer(
invocation -> {
MultipartFile file = invocation.getArgument(0);
@ -167,6 +169,31 @@ class CertSignControllerTest {
assertTrue(response.getBody().length > 0);
}
@Test
void testSignPdfWithMissingPkcs12FileThrowsError() {
MockMultipartFile pdfFile =
new MockMultipartFile(
"fileInput", "test.pdf", MediaType.APPLICATION_PDF_VALUE, pdfBytes);
SignPDFWithCertRequest request = new SignPDFWithCertRequest();
request.setFileInput(pdfFile);
request.setCertType("PFX");
request.setPassword("password");
request.setShowSignature(false);
request.setReason("test");
request.setLocation("test");
request.setName("tester");
request.setPageNumber(1);
request.setShowLogo(false);
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> certSignController.signPDFWithCert(request));
assertTrue(exception.getMessage().contains("PKCS12 keystore"));
}
@Test
void testSignPdfWithJks() throws Exception {
MockMultipartFile pdfFile =

View File

@ -94,6 +94,22 @@ public class ProprietaryUIDataController {
this.auditRepository = auditRepository;
}
/**
* Get the backend base URL for SAML/OAuth redirects. Uses system.backendUrl from config if set,
* otherwise defaults to http://localhost:8080
*/
private String getBackendBaseUrl() {
String backendUrl = applicationProperties.getSystem().getBackendUrl();
// If backendUrl is configured, use it
if (backendUrl != null && !backendUrl.trim().isEmpty()) {
return backendUrl.trim();
}
// For development, default to localhost:8080 (backend port)
return "http://localhost:8080";
}
@GetMapping("/audit-dashboard")
@PreAuthorize("hasRole('ADMIN')")
@EnterpriseEndpoint
@ -185,14 +201,17 @@ public class ProprietaryUIDataController {
}
SAML2 saml2 = securityProps.getSaml2();
if (securityProps.isSaml2Active()
&& applicationProperties.getSystem().getEnableAlphaFunctionality()
&& applicationProperties.getPremium().isEnabled()) {
if (securityProps.isSaml2Active() && applicationProperties.getPremium().isEnabled()) {
String samlIdp = saml2.getProvider();
String saml2AuthenticationPath = "/saml2/authenticate/" + saml2.getRegistrationId();
// For SAML, we need to use the backend URL directly, not a relative path
// This ensures Spring Security generates the correct ACS URL
String backendUrl = getBackendBaseUrl();
String fullSamlPath = backendUrl + saml2AuthenticationPath;
if (!applicationProperties.getPremium().getProFeatures().isSsoAutoLogin()) {
providerList.put(saml2AuthenticationPath, samlIdp + " (SAML 2)");
providerList.put(fullSamlPath, samlIdp + " (SAML 2)");
}
}
@ -205,6 +224,10 @@ public class ProprietaryUIDataController {
data.setLoginMethod(securityProps.getLoginMethod());
data.setAltLogin(!providerList.isEmpty() && securityProps.isAltLogin());
// Add language configuration for login page
data.setLanguages(applicationProperties.getUi().getLanguages());
data.setDefaultLocale(applicationProperties.getSystem().getDefaultLocale());
return ResponseEntity.ok(data);
}
@ -328,6 +351,7 @@ public class ProprietaryUIDataController {
data.setGrandfatheredUserCount(grandfatheredCount);
data.setLicenseMaxUsers(licenseMaxUsers);
data.setPremiumEnabled(premiumEnabled);
data.setMailEnabled(applicationProperties.getMail().isEnabled());
return ResponseEntity.ok(data);
}
@ -376,7 +400,7 @@ public class ProprietaryUIDataController {
data.setUsername(username);
data.setRole(user.get().getRolesAsString());
data.setSettings(settingsJson);
data.setChangeCredsFlag(user.get().isFirstLogin());
data.setChangeCredsFlag(user.get().isFirstLogin() || user.get().isForcePasswordChange());
data.setOAuth2Login(isOAuth2Login);
data.setSaml2Login(isSaml2Login);
@ -491,6 +515,8 @@ public class ProprietaryUIDataController {
private boolean altLogin;
private boolean firstTimeSetup;
private boolean showDefaultCredentials;
private List<String> languages;
private String defaultLocale;
}
@Data
@ -510,6 +536,7 @@ public class ProprietaryUIDataController {
private int grandfatheredUserCount;
private int licenseMaxUsers;
private boolean premiumEnabled;
private boolean mailEnabled;
}
@Data

View File

@ -1,7 +1,12 @@
package stirling.software.proprietary.controller.api;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
@ -17,7 +22,7 @@ import org.springframework.web.bind.annotation.RestController;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.annotations.api.UserApi;
import stirling.software.common.configuration.InstallationPathConfig;
import stirling.software.proprietary.model.api.signature.SavedSignatureRequest;
import stirling.software.proprietary.model.api.signature.SavedSignatureResponse;
import stirling.software.proprietary.security.service.UserService;
@ -28,7 +33,6 @@ import stirling.software.proprietary.service.SignatureService;
* authentication and enforces per-user storage limits. All endpoints require authentication
* via @PreAuthorize("isAuthenticated()").
*/
@UserApi
@Slf4j
@RestController
@RequestMapping("/api/v1/proprietary/signatures")
@ -38,6 +42,7 @@ public class SignatureController {
private final SignatureService signatureService;
private final UserService userService;
private static final String ALL_USERS_FOLDER = "ALL_USERS";
/**
* Save a new signature for the authenticated user. Enforces storage limits and authentication
@ -84,19 +89,105 @@ public class SignatureController {
}
/**
* Delete a signature owned by the authenticated user. Users can only delete their own personal
* signatures, not shared ones.
* Update a signature label. Users can update labels for their own personal signatures and for
* shared signatures.
*/
@PostMapping("/{signatureId}/label")
@PreAuthorize("!hasAuthority('ROLE_DEMO_USER')")
public ResponseEntity<Void> updateSignatureLabel(
@PathVariable String signatureId, @RequestBody Map<String, String> body) {
try {
String username = userService.getCurrentUsername();
String newLabel = body.get("label");
if (newLabel == null || newLabel.trim().isEmpty()) {
log.warn("Invalid label update request");
return ResponseEntity.badRequest().build();
}
signatureService.updateSignatureLabel(username, signatureId, newLabel);
log.info("User {} updated label for signature {}", username, signatureId);
return ResponseEntity.noContent().build();
} catch (IOException e) {
log.warn("Failed to update signature label: {}", e.getMessage());
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
}
/**
* Delete a signature owned by the authenticated user. Users can delete their own personal
* signatures. Admins can also delete shared signatures.
*/
@DeleteMapping("/{signatureId}")
@PreAuthorize("!hasAuthority('ROLE_DEMO_USER')")
public ResponseEntity<Void> deleteSignature(@PathVariable String signatureId) {
try {
String username = userService.getCurrentUsername();
signatureService.deleteSignature(username, signatureId);
log.info("User {} deleted signature {}", username, signatureId);
return ResponseEntity.noContent().build();
boolean isAdmin = userService.isCurrentUserAdmin();
// Validate filename to prevent path traversal
if (signatureId.contains("..")
|| signatureId.contains("/")
|| signatureId.contains("\\")) {
log.warn("Invalid signature ID: {}", signatureId);
return ResponseEntity.badRequest().build();
}
// Try to delete from personal folder first
try {
signatureService.deleteSignature(username, signatureId);
log.info("User {} deleted personal signature {}", username, signatureId);
return ResponseEntity.noContent().build();
} catch (IOException e) {
// If not found in personal folder, check if it's in shared folder
if (isAdmin) {
// Admin can delete from shared folder
if (deleteFromSharedFolder(signatureId)) {
log.info("Admin {} deleted shared signature {}", username, signatureId);
return ResponseEntity.noContent().build();
}
}
// If not admin or not found in shared folder either, return 404
throw e;
}
} catch (IOException e) {
log.warn("Failed to delete signature {} for user: {}", signatureId, e.getMessage());
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
}
/**
* Delete a signature from the shared (ALL_USERS) folder. Only admins should call this method.
*/
private boolean deleteFromSharedFolder(String signatureId) throws IOException {
String signatureBasePath = InstallationPathConfig.getSignaturesPath();
Path sharedFolder = Paths.get(signatureBasePath, ALL_USERS_FOLDER);
boolean deleted = false;
if (Files.exists(sharedFolder)) {
try (Stream<Path> stream = Files.list(sharedFolder)) {
List<Path> matchingFiles =
stream.filter(
path ->
path.getFileName()
.toString()
.startsWith(signatureId + "."))
.toList();
for (Path file : matchingFiles) {
Files.delete(file);
deleted = true;
log.info("Deleted shared signature file: {}", file);
}
}
// Also delete metadata file if it exists
Path metadataPath = sharedFolder.resolve(signatureId + ".json");
if (Files.exists(metadataPath)) {
Files.delete(metadataPath);
log.info("Deleted shared signature metadata: {}", metadataPath);
}
}
return deleted;
}
}

View File

@ -120,9 +120,7 @@ public class AccountWebController {
SAML2 saml2 = securityProps.getSaml2();
if (securityProps.isSaml2Active()
&& applicationProperties.getSystem().getEnableAlphaFunctionality()
&& applicationProperties.getPremium().isEnabled()) {
if (securityProps.isSaml2Active() && applicationProperties.getPremium().isEnabled()) {
String samlIdp = saml2.getProvider();
String saml2AuthenticationPath = "/saml2/authenticate/" + saml2.getRegistrationId();

View File

@ -33,7 +33,8 @@ public class MailConfig {
// Creates a new instance of JavaMailSenderImpl, which is a Spring implementation
JavaMailSenderImpl mailSender = new JavaMailSenderImpl();
mailSender.setHost(mailProperties.getHost());
String host = mailProperties.getHost();
mailSender.setHost(host);
mailSender.setPort(mailProperties.getPort());
mailSender.setDefaultEncoding("UTF-8");
@ -70,8 +71,32 @@ public class MailConfig {
log.info("SMTP authentication disabled - no credentials provided");
}
boolean startTlsEnabled =
mailProperties.getStartTlsEnable() == null || mailProperties.getStartTlsEnable();
// Enables STARTTLS to encrypt the connection if supported by the SMTP server
props.put("mail.smtp.starttls.enable", "true");
props.put("mail.smtp.starttls.enable", Boolean.toString(startTlsEnabled));
if (mailProperties.getStartTlsRequired() != null) {
props.put(
"mail.smtp.starttls.required", mailProperties.getStartTlsRequired().toString());
}
if (mailProperties.getSslEnable() != null) {
props.put("mail.smtp.ssl.enable", mailProperties.getSslEnable().toString());
}
// Trust the configured host to allow STARTTLS with self-signed certificates
String sslTrust = mailProperties.getSslTrust();
if (sslTrust == null || sslTrust.trim().isEmpty()) {
sslTrust = "*";
}
if (sslTrust != null && !sslTrust.trim().isEmpty()) {
props.put("mail.smtp.ssl.trust", sslTrust);
}
if (mailProperties.getSslCheckServerIdentity() != null) {
props.put(
"mail.smtp.ssl.checkserveridentity",
mailProperties.getSslCheckServerIdentity().toString());
}
// Returns the configured mail sender, ready to send emails
return mailSender;

View File

@ -1,7 +1,6 @@
package stirling.software.proprietary.security.configuration;
import java.util.List;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@ -25,8 +24,6 @@ import org.springframework.security.saml2.provider.service.web.authentication.Op
import org.springframework.security.web.SecurityFilterChain;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.security.web.authentication.rememberme.PersistentTokenRepository;
import org.springframework.security.web.csrf.CookieCsrfTokenRepository;
import org.springframework.security.web.csrf.CsrfTokenRequestAttributeHandler;
import org.springframework.security.web.savedrequest.NullRequestCache;
import org.springframework.security.web.servlet.util.matcher.PathPatternRequestMatcher;
import org.springframework.web.cors.CorsConfiguration;
@ -47,7 +44,6 @@ import stirling.software.proprietary.security.database.repository.PersistentLogi
import stirling.software.proprietary.security.filter.IPRateLimitingFilter;
import stirling.software.proprietary.security.filter.JwtAuthenticationFilter;
import stirling.software.proprietary.security.filter.UserAuthenticationFilter;
import stirling.software.proprietary.security.model.User;
import stirling.software.proprietary.security.oauth2.CustomOAuth2AuthenticationFailureHandler;
import stirling.software.proprietary.security.oauth2.CustomOAuth2AuthenticationSuccessHandler;
import stirling.software.proprietary.security.saml2.CustomSaml2AuthenticationFailureHandler;
@ -198,74 +194,19 @@ public class SecurityConfiguration {
http.cors(cors -> cors.disable());
}
if (securityProperties.getCsrfDisabled() || !loginEnabledValue) {
http.csrf(CsrfConfigurer::disable);
}
http.csrf(CsrfConfigurer::disable);
if (loginEnabledValue) {
boolean v2Enabled = appConfig.v2Enabled();
http.addFilterBefore(
userAuthenticationFilter, UsernamePasswordAuthenticationFilter.class)
.addFilterBefore(rateLimitingFilter, UsernamePasswordAuthenticationFilter.class)
.addFilterBefore(jwtAuthenticationFilter, UserAuthenticationFilter.class);
if (!securityProperties.getCsrfDisabled()) {
CookieCsrfTokenRepository cookieRepo =
CookieCsrfTokenRepository.withHttpOnlyFalse();
CsrfTokenRequestAttributeHandler requestHandler =
new CsrfTokenRequestAttributeHandler();
requestHandler.setCsrfRequestAttributeName(null);
http.csrf(
csrf ->
csrf.ignoringRequestMatchers(
request -> {
String uri = request.getRequestURI();
// Ignore CSRF for auth endpoints
if (uri.startsWith("/api/v1/auth/")) {
return true;
}
String apiKey = request.getHeader("X-API-KEY");
// If there's no API key, don't ignore CSRF
// (return false)
if (apiKey == null || apiKey.trim().isEmpty()) {
return false;
}
// Validate API key using existing UserService
try {
Optional<User> user =
userService.getUserByApiKey(apiKey);
// If API key is valid, ignore CSRF (return
// true)
// If API key is invalid, don't ignore CSRF
// (return false)
return user.isPresent();
} catch (Exception e) {
// If there's any error validating the API
// key, don't ignore CSRF
return false;
}
})
.csrfTokenRepository(cookieRepo)
.csrfTokenRequestHandler(requestHandler));
}
http.sessionManagement(
sessionManagement -> {
if (v2Enabled) {
sessionManagement ->
sessionManagement.sessionCreationPolicy(
SessionCreationPolicy.STATELESS);
} else {
sessionManagement
.sessionCreationPolicy(SessionCreationPolicy.IF_REQUIRED)
.maximumSessions(10)
.maxSessionsPreventsLogin(false)
.sessionRegistry(sessionRegistry)
.expiredUrl("/login?logout=true");
}
});
SessionCreationPolicy.STATELESS));
http.authenticationProvider(daoAuthenticationProvider());
http.requestCache(requestCache -> requestCache.requestCache(new NullRequestCache()));
@ -348,18 +289,7 @@ public class SecurityConfiguration {
if (securityProperties.isOauth2Active()) {
http.oauth2Login(
oauth2 -> {
// v1: Use /oauth2 as login page for Thymeleaf templates
if (!v2Enabled) {
oauth2.loginPage("/oauth2");
}
// v2: Don't set loginPage, let default OAuth2 flow handle it
oauth2
/*
This Custom handler is used to check if the OAUTH2 user trying to log in, already exists in the database.
If user exists, login proceeds as usual. If user does not exist, then it is auto-created but only if 'OAUTH2AutoCreateUser'
is set as true, else login fails with an error message advising the same.
*/
oauth2.loginPage("/login")
.successHandler(
new CustomOAuth2AuthenticationSuccessHandler(
loginAttemptService,
@ -393,12 +323,8 @@ public class SecurityConfiguration {
.saml2Login(
saml2 -> {
try {
// Only set login page for v1/Thymeleaf mode
if (!v2Enabled) {
saml2.loginPage("/saml2");
}
saml2.relyingPartyRegistrationRepository(
saml2.loginPage("/login")
.relyingPartyRegistrationRepository(
saml2RelyingPartyRegistrations)
.authenticationManager(
new ProviderManager(authenticationProvider))
@ -408,7 +334,8 @@ public class SecurityConfiguration {
securityProperties.getSaml2(),
userService,
jwtService,
licenseSettingsService))
licenseSettingsService,
applicationProperties))
.failureHandler(
new CustomSaml2AuthenticationFailureHandler())
.authenticationRequestResolver(

View File

@ -244,10 +244,13 @@ public class AuthController {
userMap.put("username", user.getUsername());
userMap.put("role", user.getRolesAsString());
userMap.put("enabled", user.isEnabled());
userMap.put(
"authenticationType",
user.getAuthenticationType()); // Expose authentication type for SSO detection
// Add metadata for OAuth compatibility
Map<String, Object> appMetadata = new HashMap<>();
appMetadata.put("provider", user.getAuthenticationType()); // Default to email provider
appMetadata.put("provider", user.getAuthenticationType());
userMap.put("app_metadata", appMetadata);
return userMap;

View File

@ -7,6 +7,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
@ -18,6 +19,7 @@ import org.springframework.security.oauth2.core.user.OAuth2User;
import org.springframework.security.web.authentication.logout.SecurityContextLogoutHandler;
import org.springframework.web.bind.annotation.*;
import jakarta.mail.MessagingException;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.transaction.Transactional;
@ -236,6 +238,8 @@ public class UserController {
return ResponseEntity.status(HttpStatus.UNAUTHORIZED)
.body(Map.of("error", "incorrectPassword", "message", "Incorrect password"));
}
// Set flags before changing password so they're saved together
user.setForcePasswordChange(false);
userService.changePassword(user, newPassword);
userService.changeFirstUse(user, false);
// Logout using Spring's utility
@ -584,6 +588,79 @@ public class UserController {
return ResponseEntity.ok(Map.of("message", "User role updated successfully"));
}
@PreAuthorize("hasRole('ROLE_ADMIN')")
@PostMapping("/admin/changePasswordForUser")
public ResponseEntity<?> changePasswordForUser(
@RequestParam(name = "username") String username,
@RequestParam(name = "newPassword", required = false) String newPassword,
@RequestParam(name = "generateRandom", defaultValue = "false") boolean generateRandom,
@RequestParam(name = "sendEmail", defaultValue = "false") boolean sendEmail,
@RequestParam(name = "includePassword", defaultValue = "false") boolean includePassword,
@RequestParam(name = "forcePasswordChange", defaultValue = "false")
boolean forcePasswordChange,
HttpServletRequest request,
Authentication authentication)
throws SQLException, UnsupportedProviderException, MessagingException {
Optional<User> userOpt = userService.findByUsernameIgnoreCase(username);
if (userOpt.isEmpty()) {
return ResponseEntity.status(HttpStatus.NOT_FOUND)
.body(Map.of("error", "User not found."));
}
String currentUsername = authentication.getName();
if (currentUsername.equalsIgnoreCase(username)) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(Map.of("error", "Cannot change your own password."));
}
User user = userOpt.get();
String finalPassword = newPassword;
if (generateRandom) {
finalPassword = UUID.randomUUID().toString().replace("-", "").substring(0, 12);
}
if (finalPassword == null || finalPassword.trim().isEmpty()) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(Map.of("error", "New password is required."));
}
// Set force password change flag before changing password so both are saved together
user.setForcePasswordChange(forcePasswordChange);
userService.changePassword(user, finalPassword);
// Invalidate all active sessions to force reauthentication
userService.invalidateUserSessions(username);
if (sendEmail) {
if (emailService.isEmpty() || !applicationProperties.getMail().isEnabled()) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(Map.of("error", "Email is not configured."));
}
String userEmail = user.getUsername();
// Check if username is a valid email format
if (userEmail == null || userEmail.isBlank() || !userEmail.contains("@")) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(
Map.of(
"error",
"User's email is not a valid email address. Notifications are disabled."));
}
String loginUrl = buildLoginUrl(request);
emailService
.get()
.sendPasswordChangedNotification(
userEmail,
user.getUsername(),
includePassword ? finalPassword : null,
loginUrl);
}
return ResponseEntity.ok(Map.of("message", "User password updated successfully"));
}
@PreAuthorize("hasRole('ROLE_ADMIN')")
@PostMapping("/admin/changeUserEnabled/{username}")
public ResponseEntity<?> changeUserEnabled(

View File

@ -26,6 +26,7 @@ import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
@ -39,6 +40,7 @@ import stirling.software.proprietary.security.service.JwtServiceInterface;
import stirling.software.proprietary.security.service.UserService;
@Slf4j
@RequiredArgsConstructor
public class JwtAuthenticationFilter extends OncePerRequestFilter {
private final JwtServiceInterface jwtService;
@ -47,19 +49,6 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
private final AuthenticationEntryPoint authenticationEntryPoint;
private final ApplicationProperties.Security securityProperties;
public JwtAuthenticationFilter(
JwtServiceInterface jwtService,
UserService userService,
CustomUserDetailsService userDetailsService,
AuthenticationEntryPoint authenticationEntryPoint,
ApplicationProperties.Security securityProperties) {
this.jwtService = jwtService;
this.userService = userService;
this.userDetailsService = userDetailsService;
this.authenticationEntryPoint = authenticationEntryPoint;
this.securityProperties = securityProperties;
}
@Override
protected void doFilterInternal(
HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
@ -68,7 +57,11 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
filterChain.doFilter(request, response);
return;
}
if (isStaticResource(request.getContextPath(), request.getRequestURI())) {
String requestURI = request.getRequestURI();
String contextPath = request.getContextPath();
if (isStaticResource(contextPath, requestURI)) {
filterChain.doFilter(request, response);
return;
}
@ -77,10 +70,7 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
String jwtToken = jwtService.extractToken(request);
if (jwtToken == null) {
// Allow specific auth endpoints to pass through without JWT
String requestURI = request.getRequestURI();
String contextPath = request.getContextPath();
// Allow auth endpoints to pass through without JWT
if (!isPublicAuthEndpoint(requestURI, contextPath)) {
// For API requests, return 401 JSON
String acceptHeader = request.getHeader("Accept");

View File

@ -241,24 +241,6 @@ public class UserAuthenticationFilter extends OncePerRequestFilter {
filterChain.doFilter(request, response);
}
private static boolean isPublicAuthEndpoint(String requestURI, String contextPath) {
// Remove context path from URI to normalize path matching
String trimmedUri =
requestURI.startsWith(contextPath)
? requestURI.substring(contextPath.length())
: requestURI;
// Public auth endpoints that don't require authentication
return trimmedUri.startsWith("/login")
|| trimmedUri.startsWith("/auth/")
|| trimmedUri.startsWith("/oauth2")
|| trimmedUri.startsWith("/saml2")
|| trimmedUri.startsWith("/api/v1/auth/login")
|| trimmedUri.startsWith("/api/v1/auth/refresh")
|| trimmedUri.startsWith("/api/v1/auth/logout")
|| trimmedUri.startsWith("/api/v1/proprietary/ui-data/login");
}
private enum UserLoginType {
USERDETAILS("UserDetails"),
OAUTH2USER("OAuth2User"),

View File

@ -59,6 +59,9 @@ public class User implements UserDetails, Serializable {
@Column(name = "hasCompletedInitialSetup")
private Boolean hasCompletedInitialSetup = false;
@Column(name = "forcePasswordChange")
private Boolean forcePasswordChange = false;
@Column(name = "roleName")
private String roleName;
@ -117,6 +120,14 @@ public class User implements UserDetails, Serializable {
this.hasCompletedInitialSetup = hasCompletedInitialSetup;
}
public boolean isForcePasswordChange() {
return forcePasswordChange != null && forcePasswordChange;
}
public void setForcePasswordChange(boolean forcePasswordChange) {
this.forcePasswordChange = forcePasswordChange;
}
public void setAuthenticationType(AuthenticationType authenticationType) {
this.authenticationType = authenticationType.toString().toLowerCase();
}

View File

@ -27,6 +27,7 @@ import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
import stirling.software.common.model.exception.UnsupportedProviderException;
@ -39,6 +40,7 @@ import stirling.software.proprietary.security.service.JwtServiceInterface;
import stirling.software.proprietary.security.service.LoginAttemptService;
import stirling.software.proprietary.security.service.UserService;
@Slf4j
@RequiredArgsConstructor
public class CustomOAuth2AuthenticationSuccessHandler
extends SavedRequestAwareAuthenticationSuccessHandler {
@ -77,12 +79,18 @@ public class CustomOAuth2AuthenticationSuccessHandler
if (user != null && !licenseSettingsService.isOAuthEligible(user)) {
// User is not grandfathered and no paid license - block OAuth login
log.warn(
"OAuth login blocked for existing user '{}' - not eligible (not grandfathered and no paid license)",
username);
response.sendRedirect(
request.getContextPath() + "/logout?oAuth2RequiresLicense=true");
return;
}
} else if (!licenseSettingsService.isOAuthEligible(null)) {
// No existing user and no paid license -> block auto creation
log.warn(
"OAuth login blocked for new user '{}' - not eligible (no paid license for auto-creation)",
username);
response.sendRedirect(request.getContextPath() + "/logout?oAuth2RequiresLicense=true");
return;
}

View File

@ -67,10 +67,15 @@ public class OAuth2Configuration {
keycloakClientRegistration().ifPresent(registrations::add);
if (registrations.isEmpty()) {
log.error("No OAuth2 provider registered");
log.error("No OAuth2 provider registered - check your OAuth2 configuration");
throw new NoProviderFoundException("At least one OAuth2 provider must be configured.");
}
log.info(
"OAuth2 ClientRegistrationRepository created with {} provider(s): {}",
registrations.size(),
registrations.stream().map(ClientRegistration::getRegistrationId).toList());
return new InMemoryClientRegistrationRepository(registrations);
}
@ -165,7 +170,6 @@ public class OAuth2Configuration {
githubClient.getUseAsUsername());
boolean isValid = validateProvider(github);
log.info("Initialised GitHub OAuth2 provider");
return isValid
? Optional.of(
@ -208,7 +212,19 @@ public class OAuth2Configuration {
null,
null);
return !isStringEmpty(oidcProvider.getIssuer()) || validateProvider(oidcProvider)
boolean isValid =
!isStringEmpty(oidcProvider.getIssuer()) || validateProvider(oidcProvider);
if (isValid) {
log.info(
"Initialised OIDC OAuth2 provider: registrationId='{}', issuer='{}', redirectUri='{}'",
name,
oauth.getIssuer(),
REDIRECT_URI_PATH + name);
} else {
log.warn("OIDC OAuth2 provider validation failed - provider will not be registered");
}
return isValid
? Optional.of(
ClientRegistrations.fromIssuerLocation(oauth.getIssuer())
.registrationId(name)
@ -217,7 +233,7 @@ public class OAuth2Configuration {
.scope(oidcProvider.getScopes())
.userNameAttributeName(oidcProvider.getUseAsUsername().getName())
.clientName(clientName)
.redirectUri(REDIRECT_URI_PATH + "oidc")
.redirectUri(REDIRECT_URI_PATH + name)
.authorizationGrantType(AUTHORIZATION_CODE)
.build())
: Optional.empty();

View File

@ -51,6 +51,7 @@ public class CustomSaml2AuthenticationSuccessHandler
private final JwtServiceInterface jwtService;
private final stirling.software.proprietary.service.UserLicenseSettingsService
licenseSettingsService;
private final ApplicationProperties applicationProperties;
@Override
@Audited(type = AuditEventType.USER_LOGIN, level = AuditLevel.BASIC)
@ -67,21 +68,27 @@ public class CustomSaml2AuthenticationSuccessHandler
boolean userExists = userService.usernameExistsIgnoreCase(username);
// Check if user is eligible for SAML (grandfathered or system has paid license)
// Check if user is eligible for SAML (grandfathered or system has ENTERPRISE license)
if (userExists) {
stirling.software.proprietary.security.model.User user =
userService.findByUsernameIgnoreCase(username).orElse(null);
if (user != null && !licenseSettingsService.isOAuthEligible(user)) {
// User is not grandfathered and no paid license - block SAML login
response.sendRedirect(
request.getContextPath() + "/logout?saml2RequiresLicense=true");
if (user != null && !licenseSettingsService.isSamlEligible(user)) {
// User is not grandfathered and no ENTERPRISE license - block SAML login
log.warn(
"SAML2 login blocked for existing user '{}' - not eligible (not grandfathered and no ENTERPRISE license)",
username);
String origin = resolveOrigin(request);
response.sendRedirect(origin + "/logout?saml2RequiresLicense=true");
return;
}
} else if (!licenseSettingsService.isOAuthEligible(null)) {
// No existing user and no paid license -> block auto creation
response.sendRedirect(
request.getContextPath() + "/logout?saml2RequiresLicense=true");
} else if (!licenseSettingsService.isSamlEligible(null)) {
// No existing user and no ENTERPRISE license -> block auto creation
log.warn(
"SAML2 login blocked for new user '{}' - not eligible (no ENTERPRISE license for auto-creation)",
username);
String origin = resolveOrigin(request);
response.sendRedirect(origin + "/logout?saml2RequiresLicense=true");
return;
}
@ -138,20 +145,28 @@ public class CustomSaml2AuthenticationSuccessHandler
log.debug(
"User {} exists with password but is not SSO user, redirecting to logout",
username);
response.sendRedirect(
contextPath + "/logout?oAuth2AuthenticationErrorWeb=true");
String origin = resolveOrigin(request);
response.sendRedirect(origin + "/logout?oAuth2AuthenticationErrorWeb=true");
return;
}
try {
if (!userExists || saml2Properties.getBlockRegistration()) {
log.debug("Registration blocked for new user: {}", username);
response.sendRedirect(
contextPath + "/login?errorOAuth=oAuth2AdminBlockedUser");
// Block new users only if: blockRegistration is true OR autoCreateUser is false
if (!userExists
&& (saml2Properties.getBlockRegistration()
|| !saml2Properties.getAutoCreateUser())) {
log.debug(
"Registration blocked for new user '{}' (blockRegistration: {}, autoCreateUser: {})",
username,
saml2Properties.getBlockRegistration(),
saml2Properties.getAutoCreateUser());
String origin = resolveOrigin(request);
response.sendRedirect(origin + "/login?errorOAuth=oAuth2AdminBlockedUser");
return;
}
if (!userExists && licenseSettingsService.wouldExceedLimit(1)) {
response.sendRedirect(contextPath + "/logout?maxUsersReached=true");
String origin = resolveOrigin(request);
response.sendRedirect(origin + "/logout?maxUsersReached=true");
return;
}
@ -216,16 +231,30 @@ public class CustomSaml2AuthenticationSuccessHandler
String contextPath,
String jwt) {
String redirectPath = resolveRedirectPath(request, contextPath);
String origin =
resolveForwardedOrigin(request)
.orElseGet(
() ->
resolveOriginFromReferer(request)
.orElseGet(() -> buildOriginFromRequest(request)));
String origin = resolveOrigin(request);
clearRedirectCookie(response);
return origin + redirectPath + "#access_token=" + jwt;
}
/**
* Resolve the origin (frontend URL) for redirects. First checks system.frontendUrl from config,
* then falls back to detecting from request headers.
*/
private String resolveOrigin(HttpServletRequest request) {
// First check if frontendUrl is configured
String configuredFrontendUrl = applicationProperties.getSystem().getFrontendUrl();
if (configuredFrontendUrl != null && !configuredFrontendUrl.trim().isEmpty()) {
return configuredFrontendUrl.trim();
}
// Fall back to auto-detection from request headers
return resolveForwardedOrigin(request)
.orElseGet(
() ->
resolveOriginFromReferer(request)
.orElseGet(() -> buildOriginFromRequest(request)));
}
private String resolveRedirectPath(HttpServletRequest request, String contextPath) {
return extractRedirectPathFromCookie(request)
.filter(path -> path.startsWith("/"))

View File

@ -41,22 +41,74 @@ public class Saml2Configuration {
@ConditionalOnProperty(name = "security.saml2.enabled", havingValue = "true")
public RelyingPartyRegistrationRepository relyingPartyRegistrations() throws Exception {
SAML2 samlConf = applicationProperties.getSecurity().getSaml2();
X509Certificate idpCert = CertificateUtils.readCertificate(samlConf.getIdpCert());
log.info(
"Initializing SAML2 configuration with registration ID: {}",
samlConf.getRegistrationId());
// Load IdP certificate
X509Certificate idpCert;
try {
Resource idpCertResource = samlConf.getIdpCert();
log.info("Loading IdP certificate from: {}", idpCertResource.getDescription());
if (!idpCertResource.exists()) {
log.error(
"SAML2 IdP certificate not found at: {}", idpCertResource.getDescription());
throw new IllegalStateException(
"SAML2 IdP certificate file does not exist: "
+ idpCertResource.getDescription());
}
idpCert = CertificateUtils.readCertificate(idpCertResource);
log.info(
"Successfully loaded IdP certificate. Subject: {}",
idpCert.getSubjectX500Principal().getName());
} catch (Exception e) {
log.error("Failed to load SAML2 IdP certificate: {}", e.getMessage(), e);
throw new IllegalStateException("Failed to load SAML2 IdP certificate", e);
}
Saml2X509Credential verificationCredential = Saml2X509Credential.verification(idpCert);
// Load SP private key and certificate
Resource privateKeyResource = samlConf.getPrivateKey();
Resource certificateResource = samlConf.getSpCert();
Saml2X509Credential signingCredential =
new Saml2X509Credential(
CertificateUtils.readPrivateKey(privateKeyResource),
CertificateUtils.readCertificate(certificateResource),
Saml2X509CredentialType.SIGNING);
log.info("Loading SP private key from: {}", privateKeyResource.getDescription());
if (!privateKeyResource.exists()) {
log.error("SAML2 SP private key not found at: {}", privateKeyResource.getDescription());
throw new IllegalStateException(
"SAML2 SP private key file does not exist: "
+ privateKeyResource.getDescription());
}
log.info("Loading SP certificate from: {}", certificateResource.getDescription());
if (!certificateResource.exists()) {
log.error(
"SAML2 SP certificate not found at: {}", certificateResource.getDescription());
throw new IllegalStateException(
"SAML2 SP certificate file does not exist: "
+ certificateResource.getDescription());
}
Saml2X509Credential signingCredential;
try {
signingCredential =
new Saml2X509Credential(
CertificateUtils.readPrivateKey(privateKeyResource),
CertificateUtils.readCertificate(certificateResource),
Saml2X509CredentialType.SIGNING);
log.info("Successfully loaded SP credentials");
} catch (Exception e) {
log.error("Failed to load SAML2 SP credentials: {}", e.getMessage(), e);
throw new IllegalStateException("Failed to load SAML2 SP credentials", e);
}
RelyingPartyRegistration rp =
RelyingPartyRegistration.withRegistrationId(samlConf.getRegistrationId())
.signingX509Credentials(c -> c.add(signingCredential))
.entityId(samlConf.getIdpIssuer())
.singleLogoutServiceBinding(Saml2MessageBinding.POST)
.singleLogoutServiceLocation(samlConf.getIdpSingleLogoutUrl())
.singleLogoutServiceResponseLocation("http://localhost:8080/login")
.singleLogoutServiceResponseLocation("{baseUrl}/login")
.assertionConsumerServiceBinding(Saml2MessageBinding.POST)
.assertionConsumerServiceLocation(
"{baseUrl}/login/saml2/sso/{registrationId}")
@ -75,9 +127,14 @@ public class Saml2Configuration {
.singleLogoutServiceLocation(
samlConf.getIdpSingleLogoutUrl())
.singleLogoutServiceResponseLocation(
"http://localhost:8080/login")
"{baseUrl}/login")
.wantAuthnRequestsSigned(true))
.build();
log.info(
"SAML2 configuration initialized successfully. Registration ID: {}, IdP: {}",
samlConf.getRegistrationId(),
samlConf.getIdpIssuer());
return new InMemoryRelyingPartyRegistrationRepository(rp);
}

Some files were not shown because too many files have changed in this diff Show More