diff --git a/.docker/README.md b/.docker/README.md
index c92cee89..07e28903 100644
--- a/.docker/README.md
+++ b/.docker/README.md
@@ -94,7 +94,7 @@ Configure the application via `docker-compose.yml`:
| `CHARON_ENV` | `production` | Set to `development` for verbose logging (`CPM_ENV` supported for backward compatibility). |
| `CHARON_HTTP_PORT` | `8080` | Port for the Web UI (`CPM_HTTP_PORT` supported for backward compatibility). |
| `CHARON_DB_PATH` | `/app/data/charon.db` | Path to the SQLite database (`CPM_DB_PATH` supported for backward compatibility). |
-| `CHARON_CADDY_ADMIN_API` | `http://localhost:2019` | Internal URL for Caddy API (`CPM_CADDY_ADMIN_API` supported for backward compatibility). |
+| `CHARON_CADDY_ADMIN_API` | `http://localhost:2019` | Internal URL for Caddy API (`CPM_CADDY_ADMIN_API` supported for backward compatibility). Must resolve to an internal allowlisted host on port `2019`. |
| `CHARON_CADDY_CONFIG_ROOT` | `/config` | Path to Caddy autosave configuration directory. |
| `CHARON_CADDY_LOG_DIR` | `/var/log/caddy` | Directory for Caddy access logs. |
| `CHARON_CROWDSEC_LOG_DIR` | `/var/log/crowdsec` | Directory for CrowdSec logs. |
@@ -218,6 +218,8 @@ environment:
- CPM_CADDY_ADMIN_API=http://your-caddy-host:2019
```
+If using a non-localhost internal hostname, add it to `CHARON_SSRF_INTERNAL_HOST_ALLOWLIST`.
+
**Warning**: Charon will replace Caddy's entire configuration. Backup first!
## Performance Tuning
diff --git a/.docker/compose/docker-compose.dev.yml b/.docker/compose/docker-compose.dev.yml
index 9816fb1a..dde0b8d8 100644
--- a/.docker/compose/docker-compose.dev.yml
+++ b/.docker/compose/docker-compose.dev.yml
@@ -32,6 +32,8 @@ services:
#- CPM_SECURITY_RATELIMIT_ENABLED=false
#- CPM_SECURITY_ACL_ENABLED=false
- FEATURE_CERBERUS_ENABLED=true
+ # Docker socket group access: copy docker-compose.override.example.yml
+ # to docker-compose.override.yml and set your host's docker GID.
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro # For local container discovery
- crowdsec_data:/app/data/crowdsec
diff --git a/.docker/compose/docker-compose.local.yml b/.docker/compose/docker-compose.local.yml
index af941ce2..a7c0f73d 100644
--- a/.docker/compose/docker-compose.local.yml
+++ b/.docker/compose/docker-compose.local.yml
@@ -27,6 +27,8 @@ services:
- FEATURE_CERBERUS_ENABLED=true
# Emergency "break-glass" token for security reset when ACL blocks access
- CHARON_EMERGENCY_TOKEN=03e4682c1164f0c1cb8e17c99bd1a2d9156b59824dde41af3bb67c513e5c5e92
+ # Docker socket group access: copy docker-compose.override.example.yml
+ # to docker-compose.override.yml and set your host's docker GID.
extra_hosts:
- "host.docker.internal:host-gateway"
cap_add:
diff --git a/.docker/compose/docker-compose.override.example.yml b/.docker/compose/docker-compose.override.example.yml
new file mode 100644
index 00000000..90edc835
--- /dev/null
+++ b/.docker/compose/docker-compose.override.example.yml
@@ -0,0 +1,26 @@
+# Docker Compose override — copy to docker-compose.override.yml to activate.
+#
+# Use case: grant the container access to the host Docker socket so that
+# Charon can discover running containers.
+#
+# 1. cp docker-compose.override.example.yml docker-compose.override.yml
+# 2. Uncomment the service that matches your compose file:
+# - "charon" for docker-compose.local.yml
+# - "app" for docker-compose.dev.yml
+# 3. Replace with the output of: stat -c '%g' /var/run/docker.sock
+# 4. docker compose up -d
+
+services:
+ # Uncomment for docker-compose.local.yml
+ charon:
+ group_add:
+ - "" # e.g. "988" — run: stat -c '%g' /var/run/docker.sock
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro
+
+ # Uncomment for docker-compose.dev.yml
+ app:
+ group_add:
+ - "" # e.g. "988" — run: stat -c '%g' /var/run/docker.sock
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro
diff --git a/.docker/compose/docker-compose.playwright-ci.yml b/.docker/compose/docker-compose.playwright-ci.yml
index 0a0e4606..94e7d5a3 100644
--- a/.docker/compose/docker-compose.playwright-ci.yml
+++ b/.docker/compose/docker-compose.playwright-ci.yml
@@ -85,6 +85,7 @@ services:
- playwright_data:/app/data
- playwright_caddy_data:/data
- playwright_caddy_config:/config
+ - /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
healthcheck:
test: ["CMD", "curl", "-sf", "http://localhost:8080/api/v1/health"]
interval: 5s
@@ -111,6 +112,7 @@ services:
volumes:
- playwright_crowdsec_data:/var/lib/crowdsec/data
- playwright_crowdsec_config:/etc/crowdsec
+ - /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
healthcheck:
test: ["CMD", "cscli", "version"]
interval: 10s
diff --git a/.docker/compose/docker-compose.playwright-local.yml b/.docker/compose/docker-compose.playwright-local.yml
index a752693f..735fe6b6 100644
--- a/.docker/compose/docker-compose.playwright-local.yml
+++ b/.docker/compose/docker-compose.playwright-local.yml
@@ -49,6 +49,8 @@ services:
# True tmpfs for E2E test data - fresh on every run, in-memory only
# mode=1777 allows any user to write (container runs as non-root)
- /app/data:size=100M,mode=1777
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
interval: 5s
diff --git a/.docker/docker-entrypoint.sh b/.docker/docker-entrypoint.sh
index 0a786b50..a5e74e7e 100755
--- a/.docker/docker-entrypoint.sh
+++ b/.docker/docker-entrypoint.sh
@@ -27,30 +27,24 @@ get_group_by_gid() {
}
create_group_with_gid() {
- local gid="$1"
- local name="$2"
-
if command -v addgroup >/dev/null 2>&1; then
- addgroup -g "$gid" "$name" 2>/dev/null || true
+ addgroup -g "$1" "$2" 2>/dev/null || true
return
fi
if command -v groupadd >/dev/null 2>&1; then
- groupadd -g "$gid" "$name" 2>/dev/null || true
+ groupadd -g "$1" "$2" 2>/dev/null || true
fi
}
add_user_to_group() {
- local user="$1"
- local group="$2"
-
if command -v addgroup >/dev/null 2>&1; then
- addgroup "$user" "$group" 2>/dev/null || true
+ addgroup "$1" "$2" 2>/dev/null || true
return
fi
if command -v usermod >/dev/null 2>&1; then
- usermod -aG "$group" "$user" 2>/dev/null || true
+ usermod -aG "$2" "$1" 2>/dev/null || true
fi
}
@@ -142,8 +136,15 @@ if [ -S "/var/run/docker.sock" ] && is_root; then
fi
fi
elif [ -S "/var/run/docker.sock" ]; then
- echo "Note: Docker socket mounted but container is running non-root; skipping docker.sock group setup."
- echo " If Docker discovery is needed, run with matching group permissions (e.g., --group-add)"
+ DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo "unknown")
+ echo "Note: Docker socket mounted (GID=$DOCKER_SOCK_GID) but container is running non-root; skipping docker.sock group setup."
+ echo " If Docker discovery is needed, add 'group_add: [\"$DOCKER_SOCK_GID\"]' to your compose service."
+ if [ "$DOCKER_SOCK_GID" = "0" ]; then
+ if [ "${ALLOW_DOCKER_SOCK_GID_0:-false}" != "true" ]; then
+ echo "⚠️ WARNING: Docker socket GID is 0 (root group). group_add: [\"0\"] grants root-group access."
+ echo " Set ALLOW_DOCKER_SOCK_GID_0=true to acknowledge this risk."
+ fi
+ fi
else
echo "Note: Docker socket not found. Docker container discovery will be unavailable."
fi
@@ -191,7 +192,7 @@ if command -v cscli >/dev/null; then
echo "Initializing persistent CrowdSec configuration..."
# Check if .dist has content
- if [ -d "/etc/crowdsec.dist" ] && [ -n "$(ls -A /etc/crowdsec.dist 2>/dev/null)" ]; then
+ if [ -d "/etc/crowdsec.dist" ] && find /etc/crowdsec.dist -mindepth 1 -maxdepth 1 -print -quit 2>/dev/null | grep -q .; then
echo "Copying config from /etc/crowdsec.dist..."
if ! cp -r /etc/crowdsec.dist/* "$CS_CONFIG_DIR/"; then
echo "ERROR: Failed to copy config from /etc/crowdsec.dist"
@@ -208,7 +209,7 @@ if command -v cscli >/dev/null; then
exit 1
fi
echo "✓ Successfully initialized config from .dist directory"
- elif [ -d "/etc/crowdsec" ] && [ ! -L "/etc/crowdsec" ] && [ -n "$(ls -A /etc/crowdsec 2>/dev/null)" ]; then
+ elif [ -d "/etc/crowdsec" ] && [ ! -L "/etc/crowdsec" ] && find /etc/crowdsec -mindepth 1 -maxdepth 1 -print -quit 2>/dev/null | grep -q .; then
echo "Copying config from /etc/crowdsec (fallback)..."
if ! cp -r /etc/crowdsec/* "$CS_CONFIG_DIR/"; then
echo "ERROR: Failed to copy config from /etc/crowdsec (fallback)"
@@ -248,7 +249,7 @@ if command -v cscli >/dev/null; then
echo "Expected: /etc/crowdsec -> /app/data/crowdsec/config"
echo "This indicates a critical build-time issue. Symlink must be created at build time as root."
echo "DEBUG: Directory check:"
- ls -la /etc/ | grep crowdsec || echo " (no crowdsec entry found)"
+ find /etc -mindepth 1 -maxdepth 1 -name '*crowdsec*' -exec ls -ld {} \; 2>/dev/null || echo " (no crowdsec entry found)"
exit 1
fi
diff --git a/.github/agents/Backend_Dev.agent.md b/.github/agents/Backend_Dev.agent.md
index 0f94d44f..667ee509 100644
--- a/.github/agents/Backend_Dev.agent.md
+++ b/.github/agents/Backend_Dev.agent.md
@@ -2,9 +2,9 @@
name: 'Backend Dev'
description: 'Senior Go Engineer focused on high-performance, secure backend implementation.'
argument-hint: 'The specific backend task from the Plan (e.g., "Implement ProxyHost CRUD endpoints")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/DevOps.agent.md b/.github/agents/DevOps.agent.md
index 354b936d..dcd2f435 100644
--- a/.github/agents/DevOps.agent.md
+++ b/.github/agents/DevOps.agent.md
@@ -2,9 +2,8 @@
name: 'DevOps'
description: 'DevOps specialist for CI/CD pipelines, deployment debugging, and GitOps workflows focused on making deployments boring and reliable'
argument-hint: 'The CI/CD or infrastructure task (e.g., "Debug failing GitHub Action workflow")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Doc_Writer.agent.md b/.github/agents/Doc_Writer.agent.md
index cca99c0f..fa7bad80 100644
--- a/.github/agents/Doc_Writer.agent.md
+++ b/.github/agents/Doc_Writer.agent.md
@@ -2,9 +2,8 @@
name: 'Docs Writer'
description: 'User Advocate and Writer focused on creating simple, layman-friendly documentation.'
argument-hint: 'The feature to document (e.g., "Write the guide for the new Real-Time Logs")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Frontend_Dev.agent.md b/.github/agents/Frontend_Dev.agent.md
index 61153063..040f6984 100644
--- a/.github/agents/Frontend_Dev.agent.md
+++ b/.github/agents/Frontend_Dev.agent.md
@@ -2,9 +2,9 @@
name: 'Frontend Dev'
description: 'Senior React/TypeScript Engineer for frontend implementation.'
argument-hint: 'The frontend feature or component to implement (e.g., "Implement the Real-Time Logs dashboard component")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Management.agent.md b/.github/agents/Management.agent.md
index f5c5f9c9..07eff5b2 100644
--- a/.github/agents/Management.agent.md
+++ b/.github/agents/Management.agent.md
@@ -3,9 +3,9 @@ name: 'Management'
description: 'Engineering Director. Delegates ALL research and execution. DO NOT ask it to debug code directly.'
argument-hint: 'The high-level goal (e.g., "Build the new Proxy Host Dashboard widget")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', '', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Planning.agent.md b/.github/agents/Planning.agent.md
index ed5b58ef..76705698 100644
--- a/.github/agents/Planning.agent.md
+++ b/.github/agents/Planning.agent.md
@@ -2,9 +2,9 @@
name: 'Planning'
description: 'Principal Architect for technical planning and design decisions.'
argument-hint: 'The feature or system to plan (e.g., "Design the architecture for Real-Time Logs")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment , ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Playwright_Dev.agent.md b/.github/agents/Playwright_Dev.agent.md
index 730b9894..0de32a1c 100644
--- a/.github/agents/Playwright_Dev.agent.md
+++ b/.github/agents/Playwright_Dev.agent.md
@@ -3,9 +3,9 @@ name: 'Playwright Dev'
description: 'E2E Testing Specialist for Playwright test automation.'
argument-hint: 'The feature or flow to test (e.g., "Write E2E tests for the login flow")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', '', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/QA_Security.agent.md b/.github/agents/QA_Security.agent.md
index 0160dc65..ab96aaea 100644
--- a/.github/agents/QA_Security.agent.md
+++ b/.github/agents/QA_Security.agent.md
@@ -2,9 +2,9 @@
name: 'QA Security'
description: 'Quality Assurance and Security Engineer for testing and vulnerability assessment.'
argument-hint: 'The component or feature to test (e.g., "Run security scan on authentication endpoints")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, ''
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
+
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/agents/Supervisor.agent.md b/.github/agents/Supervisor.agent.md
index c3d2527c..32d026cd 100644
--- a/.github/agents/Supervisor.agent.md
+++ b/.github/agents/Supervisor.agent.md
@@ -3,9 +3,8 @@ name: 'Supervisor'
description: 'Code Review Lead for quality assurance and PR review.'
argument-hint: 'The PR or code change to review (e.g., "Review PR #123 for security issues")'
-tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/openIntegratedBrowser, vscode/runCommand, vscode/askQuestions, vscode/vscodeAPI, execute, read, agent, 'github/*', 'github/*', 'io.github.goreleaser/mcp/*', edit, search, web, 'github/*', 'playwright/*', '', vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo
+tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, agent/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment
-model: GPT-5.3-Codex (copilot)
target: vscode
user-invocable: true
disable-model-invocation: false
diff --git a/.github/security-severity-policy.yml b/.github/security-severity-policy.yml
new file mode 100644
index 00000000..81860a2a
--- /dev/null
+++ b/.github/security-severity-policy.yml
@@ -0,0 +1,55 @@
+version: 1
+effective_date: 2026-02-25
+scope:
+ - local pre-commit manual security hooks
+ - github actions security workflows
+
+defaults:
+ blocking:
+ - critical
+ - high
+ medium:
+ mode: risk-based
+ default_action: report
+ require_sla: true
+ default_sla_days: 14
+ escalation:
+ trigger: high-signal class or repeated finding
+ action: require issue + owner + due date
+ low:
+ action: report
+
+codeql:
+ severity_mapping:
+ error: high_or_critical
+ warning: medium_or_lower
+ note: informational
+ blocking_levels:
+ - error
+ warning_policy:
+ default_action: report
+ escalation_high_signal_rule_ids:
+ - go/request-forgery
+ - js/missing-rate-limiting
+ - js/insecure-randomness
+
+trivy:
+ blocking_severities:
+ - CRITICAL
+ - HIGH
+ medium_policy:
+ action: report
+ escalation: issue-with-sla
+
+grype:
+ blocking_severities:
+ - Critical
+ - High
+ medium_policy:
+ action: report
+ escalation: issue-with-sla
+
+enforcement_contract:
+ codeql_local_vs_ci: "local and ci block on codeql error-level findings only"
+ supply_chain_medium: "medium vulnerabilities are non-blocking by default and require explicit triage"
+ auth_regression_guard: "state-changing routes must remain protected by auth middleware"
diff --git a/.github/skills/test-backend-coverage-scripts/run.sh b/.github/skills/test-backend-coverage-scripts/run.sh
index 01b62efd..c707d78a 100755
--- a/.github/skills/test-backend-coverage-scripts/run.sh
+++ b/.github/skills/test-backend-coverage-scripts/run.sh
@@ -32,7 +32,7 @@ cd "${PROJECT_ROOT}"
validate_project_structure "backend" "scripts/go-test-coverage.sh" || error_exit "Invalid project structure"
# Set default environment variables
-set_default_env "CHARON_MIN_COVERAGE" "85"
+set_default_env "CHARON_MIN_COVERAGE" "87"
set_default_env "PERF_MAX_MS_GETSTATUS_P95" "25ms"
set_default_env "PERF_MAX_MS_GETSTATUS_P95_PARALLEL" "50ms"
set_default_env "PERF_MAX_MS_LISTDECISIONS_P95" "75ms"
diff --git a/.github/skills/test-frontend-coverage-scripts/run.sh b/.github/skills/test-frontend-coverage-scripts/run.sh
index fb81959c..90afa0e0 100755
--- a/.github/skills/test-frontend-coverage-scripts/run.sh
+++ b/.github/skills/test-frontend-coverage-scripts/run.sh
@@ -32,7 +32,7 @@ cd "${PROJECT_ROOT}"
validate_project_structure "frontend" "scripts/frontend-test-coverage.sh" || error_exit "Invalid project structure"
# Set default environment variables
-set_default_env "CHARON_MIN_COVERAGE" "85"
+set_default_env "CHARON_MIN_COVERAGE" "87"
# Execute the legacy script
log_step "EXECUTION" "Running frontend tests with coverage"
diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index 560ce655..9b5b155b 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -3,6 +3,8 @@ name: Go Benchmark
on:
pull_request:
push:
+ branches:
+ - main
workflow_dispatch:
concurrency:
@@ -33,7 +35,7 @@ jobs:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Go
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
diff --git a/.github/workflows/codecov-upload.yml b/.github/workflows/codecov-upload.yml
index b811a060..7eb29ca9 100644
--- a/.github/workflows/codecov-upload.yml
+++ b/.github/workflows/codecov-upload.yml
@@ -3,6 +3,8 @@ name: Upload Coverage to Codecov
on:
pull_request:
push:
+ branches:
+ - main
workflow_dispatch:
inputs:
run_backend:
@@ -17,7 +19,7 @@ on:
type: boolean
concurrency:
- group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.run_id }}
+ group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
@@ -43,7 +45,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index e8277c11..29529967 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -4,7 +4,7 @@ on:
pull_request:
branches: [main, nightly, development]
push:
- branches: [main, nightly, development, 'feature/**', 'fix/**']
+ branches: [main]
workflow_dispatch:
schedule:
- cron: '0 3 * * 1' # Mondays 03:00 UTC
@@ -57,7 +57,7 @@ jobs:
- name: Setup Go
if: matrix.language == 'go'
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: 1.26.0
cache-dependency-path: backend/go.sum
@@ -122,10 +122,28 @@ jobs:
exit 1
fi
+ # shellcheck disable=SC2016
+ EFFECTIVE_LEVELS_JQ='[
+ .runs[] as $run
+ | $run.results[]
+ | . as $result
+ | ($run.tool.driver.rules // []) as $rules
+ | ((
+ $result.level
+ // (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | (.defaultConfiguration.level // empty)
+ ][0] // empty)
+ // ""
+ ) | ascii_downcase)
+ ]'
+
echo "Found SARIF file: $SARIF_FILE"
- ERROR_COUNT=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$SARIF_FILE")
- WARNING_COUNT=$(jq '[.runs[].results[] | select(.level == "warning")] | length' "$SARIF_FILE")
- NOTE_COUNT=$(jq '[.runs[].results[] | select(.level == "note")] | length' "$SARIF_FILE")
+ ERROR_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"error\")) | length" "$SARIF_FILE")
+ WARNING_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"warning\")) | length" "$SARIF_FILE")
+ NOTE_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"note\")) | length" "$SARIF_FILE")
{
echo "**Findings:**"
@@ -135,14 +153,32 @@ jobs:
echo ""
if [ "$ERROR_COUNT" -gt 0 ]; then
- echo "❌ **CRITICAL:** High-severity security issues found!"
+ echo "❌ **BLOCKING:** CodeQL error-level security issues found"
echo ""
echo "### Top Issues:"
echo '```'
- jq -r '.runs[].results[] | select(.level == "error") | "\(.ruleId): \(.message.text)"' "$SARIF_FILE" | head -5
+ # shellcheck disable=SC2016
+ jq -r '
+ .runs[] as $run
+ | $run.results[]
+ | . as $result
+ | ($run.tool.driver.rules // []) as $rules
+ | ((
+ $result.level
+ // (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | (.defaultConfiguration.level // empty)
+ ][0] // empty)
+ // ""
+ ) | ascii_downcase) as $effectiveLevel
+ | select($effectiveLevel == "error")
+ | "\($effectiveLevel): \($result.ruleId // \"\"): \($result.message.text)"
+ ' "$SARIF_FILE" | head -5
echo '```'
else
- echo "✅ No high-severity issues found"
+ echo "✅ No blocking CodeQL issues found"
fi
} >> "$GITHUB_STEP_SUMMARY"
@@ -169,9 +205,26 @@ jobs:
exit 1
fi
- ERROR_COUNT=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$SARIF_FILE")
+ # shellcheck disable=SC2016
+ ERROR_COUNT=$(jq -r '[
+ .runs[] as $run
+ | $run.results[]
+ | . as $result
+ | ($run.tool.driver.rules // []) as $rules
+ | ((
+ $result.level
+ // (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | (.defaultConfiguration.level // empty)
+ ][0] // empty)
+ // ""
+ ) | ascii_downcase) as $effectiveLevel
+ | select($effectiveLevel == "error")
+ ] | length' "$SARIF_FILE")
if [ "$ERROR_COUNT" -gt 0 ]; then
- echo "::error::CodeQL found $ERROR_COUNT high-severity security issues. Fix before merging."
+ echo "::error::CodeQL found $ERROR_COUNT blocking findings (effective-level=error). Fix before merging. Policy: .github/security-severity-policy.yml"
exit 1
fi
diff --git a/.github/workflows/container-prune.yml b/.github/workflows/container-prune.yml
index 771282e5..7008e327 100644
--- a/.github/workflows/container-prune.yml
+++ b/.github/workflows/container-prune.yml
@@ -5,10 +5,6 @@ on:
- cron: '0 3 * * 0' # Weekly: Sundays at 03:00 UTC
workflow_dispatch:
inputs:
- registries:
- description: 'Comma-separated registries to prune (ghcr,dockerhub)'
- required: false
- default: 'ghcr,dockerhub'
keep_days:
description: 'Number of days to retain images (unprotected)'
required: false
@@ -27,16 +23,17 @@ permissions:
contents: read
jobs:
- prune:
+ prune-ghcr:
runs-on: ubuntu-latest
env:
OWNER: ${{ github.repository_owner }}
IMAGE_NAME: charon
- REGISTRIES: ${{ github.event.inputs.registries || 'ghcr,dockerhub' }}
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
- DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
- PROTECTED_REGEX: '["^v","^latest$","^main$","^develop$"]'
+ DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
+ PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
+ PRUNE_UNTAGGED: 'true'
+ PRUNE_SBOM_TAGS: 'true'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
@@ -45,21 +42,19 @@ jobs:
run: |
sudo apt-get update && sudo apt-get install -y jq curl
- - name: Run container prune
+ - name: Run GHCR prune
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
- chmod +x scripts/prune-container-images.sh
- ./scripts/prune-container-images.sh 2>&1 | tee prune-${{ github.run_id }}.log
+ chmod +x scripts/prune-ghcr.sh
+ ./scripts/prune-ghcr.sh 2>&1 | tee prune-ghcr-${{ github.run_id }}.log
- - name: Summarize prune results (space reclaimed)
- if: ${{ always() }}
+ - name: Summarize GHCR results
+ if: always()
run: |
set -euo pipefail
- SUMMARY_FILE=prune-summary.env
- LOG_FILE=prune-${{ github.run_id }}.log
+ SUMMARY_FILE=prune-summary-ghcr.env
+ LOG_FILE=prune-ghcr-${{ github.run_id }}.log
human() {
local bytes=${1:-0}
@@ -67,7 +62,7 @@ jobs:
echo "0 B"
return
fi
- awk -v b="$bytes" 'function human(x){ split("B KiB MiB GiB TiB",u," "); i=0; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1]} END{human(b)}'
+ awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
if [ -f "$SUMMARY_FILE" ]; then
@@ -77,34 +72,155 @@ jobs:
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
{
- echo "## Container prune summary"
+ echo "## GHCR prune summary"
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
} >> "$GITHUB_STEP_SUMMARY"
-
- printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
- "${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
- echo "Deleted approximately: $(human "${TOTAL_DELETED_BYTES}")"
- echo "space_saved=$(human "${TOTAL_DELETED_BYTES}")" >> "$GITHUB_OUTPUT"
else
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
{
- echo "## Container prune summary"
+ echo "## GHCR prune summary"
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
} >> "$GITHUB_STEP_SUMMARY"
-
- printf 'PRUNE_SUMMARY: deleted_approx=%s deleted_bytes=%s\n' "${deleted_count}" "${deleted_bytes}"
- echo "Deleted approximately: $(human "${deleted_bytes}")"
- echo "space_saved=$(human "${deleted_bytes}")" >> "$GITHUB_OUTPUT"
fi
- - name: Upload prune artifacts
- if: ${{ always() }}
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ - name: Upload GHCR prune artifacts
+ if: always()
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
- name: prune-log-${{ github.run_id }}
+ name: prune-ghcr-log-${{ github.run_id }}
path: |
- prune-${{ github.run_id }}.log
- prune-summary.env
+ prune-ghcr-${{ github.run_id }}.log
+ prune-summary-ghcr.env
+
+ prune-dockerhub:
+ runs-on: ubuntu-latest
+ env:
+ OWNER: ${{ github.repository_owner }}
+ IMAGE_NAME: charon
+ KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
+ KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
+ DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
+ PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
+ steps:
+ - name: Checkout
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
+
+ - name: Install tools
+ run: |
+ sudo apt-get update && sudo apt-get install -y jq curl
+
+ - name: Run Docker Hub prune
+ env:
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+ run: |
+ chmod +x scripts/prune-dockerhub.sh
+ ./scripts/prune-dockerhub.sh 2>&1 | tee prune-dockerhub-${{ github.run_id }}.log
+
+ - name: Summarize Docker Hub results
+ if: always()
+ run: |
+ set -euo pipefail
+ SUMMARY_FILE=prune-summary-dockerhub.env
+ LOG_FILE=prune-dockerhub-${{ github.run_id }}.log
+
+ human() {
+ local bytes=${1:-0}
+ if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
+ echo "0 B"
+ return
+ fi
+ awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
+ }
+
+ if [ -f "$SUMMARY_FILE" ]; then
+ TOTAL_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
+ TOTAL_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
+ TOTAL_DELETED=$(grep -E '^TOTAL_DELETED=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
+ TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
+
+ {
+ echo "## Docker Hub prune summary"
+ echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
+ echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
+ } >> "$GITHUB_STEP_SUMMARY"
+ else
+ deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
+ deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
+
+ {
+ echo "## Docker Hub prune summary"
+ echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
+ } >> "$GITHUB_STEP_SUMMARY"
+ fi
+
+ - name: Upload Docker Hub prune artifacts
+ if: always()
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: prune-dockerhub-log-${{ github.run_id }}
+ path: |
+ prune-dockerhub-${{ github.run_id }}.log
+ prune-summary-dockerhub.env
+
+ summarize:
+ runs-on: ubuntu-latest
+ needs: [prune-ghcr, prune-dockerhub]
+ if: always()
+ steps:
+ - name: Download all artifacts
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
+ with:
+ pattern: prune-*-log-${{ github.run_id }}
+ merge-multiple: true
+
+ - name: Combined summary
+ run: |
+ set -euo pipefail
+
+ human() {
+ local bytes=${1:-0}
+ if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
+ echo "0 B"
+ return
+ fi
+ awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
+ }
+
+ GHCR_CANDIDATES=0 GHCR_CANDIDATES_BYTES=0 GHCR_DELETED=0 GHCR_DELETED_BYTES=0
+ if [ -f prune-summary-ghcr.env ]; then
+ GHCR_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
+ GHCR_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
+ GHCR_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
+ GHCR_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
+ fi
+
+ HUB_CANDIDATES=0 HUB_CANDIDATES_BYTES=0 HUB_DELETED=0 HUB_DELETED_BYTES=0
+ if [ -f prune-summary-dockerhub.env ]; then
+ HUB_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
+ HUB_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
+ HUB_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
+ HUB_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
+ fi
+
+ TOTAL_CANDIDATES=$((GHCR_CANDIDATES + HUB_CANDIDATES))
+ TOTAL_CANDIDATES_BYTES=$((GHCR_CANDIDATES_BYTES + HUB_CANDIDATES_BYTES))
+ TOTAL_DELETED=$((GHCR_DELETED + HUB_DELETED))
+ TOTAL_DELETED_BYTES=$((GHCR_DELETED_BYTES + HUB_DELETED_BYTES))
+
+ {
+ echo "## Combined container prune summary"
+ echo ""
+ echo "| Registry | Candidates | Deleted | Space Reclaimed |"
+ echo "|----------|------------|---------|-----------------|"
+ echo "| GHCR | ${GHCR_CANDIDATES} | ${GHCR_DELETED} | $(human "${GHCR_DELETED_BYTES}") |"
+ echo "| Docker Hub | ${HUB_CANDIDATES} | ${HUB_DELETED} | $(human "${HUB_DELETED_BYTES}") |"
+ echo "| **Total** | **${TOTAL_CANDIDATES}** | **${TOTAL_DELETED}** | **$(human "${TOTAL_DELETED_BYTES}")** |"
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
+ "${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
+ echo "Total space reclaimed: $(human "${TOTAL_DELETED_BYTES}")"
diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml
index f6c11e4b..a6a3f90d 100644
--- a/.github/workflows/docker-build.yml
+++ b/.github/workflows/docker-build.yml
@@ -23,7 +23,11 @@ name: Docker Build, Publish & Test
on:
pull_request:
push:
+ branches: [main]
workflow_dispatch:
+ workflow_run:
+ workflows: ["Docker Lint"]
+ types: [completed]
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
@@ -38,7 +42,7 @@ env:
TRIGGER_HEAD_SHA: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
TRIGGER_REF: ${{ github.event_name == 'workflow_run' && format('refs/heads/{0}', github.event.workflow_run.head_branch) || github.ref }}
TRIGGER_HEAD_REF: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref }}
- TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.pull_requests[0].number || github.event.pull_request.number }}
+ TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || github.event.pull_request.number }}
TRIGGER_ACTOR: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.actor.login || github.actor }}
jobs:
@@ -339,7 +343,7 @@ jobs:
- name: Upload Image Artifact
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: ${{ env.TRIGGER_EVENT == 'pull_request' && format('pr-image-{0}', env.TRIGGER_PR_NUMBER) || 'push-image' }}
path: /tmp/charon-pr-image.tar
@@ -561,12 +565,13 @@ jobs:
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
with:
sarif_file: 'trivy-results.sarif'
+ category: '.github/workflows/docker-build.yml:build-and-push'
token: ${{ secrets.GITHUB_TOKEN }}
# Generate SBOM (Software Bill of Materials) for supply chain security
# Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml
- name: Generate SBOM
- uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
+ uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
@@ -575,7 +580,7 @@ jobs:
# Create verifiable attestation for the SBOM
- name: Attest SBOM
- uses: actions/attest-sbom@4651f806c01d8637787e274ac3bdf724ef169f34 # v3.0.0
+ uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
@@ -702,13 +707,47 @@ jobs:
exit-code: '1' # Intended to block, but continued on error for now
continue-on-error: true
- - name: Upload Trivy scan results
+ - name: Check Trivy PR SARIF exists
if: always()
+ id: trivy-pr-check
+ run: |
+ if [ -f trivy-pr-results.sarif ]; then
+ echo "exists=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "exists=false" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Upload Trivy scan results
+ if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'docker-pr-image'
+ - name: Upload Trivy compatibility results (docker-build category)
+ if: always() && steps.trivy-pr-check.outputs.exists == 'true'
+ uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
+ with:
+ sarif_file: 'trivy-pr-results.sarif'
+ category: '.github/workflows/docker-build.yml:build-and-push'
+ continue-on-error: true
+
+ - name: Upload Trivy compatibility results (docker-publish alias)
+ if: always() && steps.trivy-pr-check.outputs.exists == 'true'
+ uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
+ with:
+ sarif_file: 'trivy-pr-results.sarif'
+ category: '.github/workflows/docker-publish.yml:build-and-push'
+ continue-on-error: true
+
+ - name: Upload Trivy compatibility results (nightly alias)
+ if: always() && steps.trivy-pr-check.outputs.exists == 'true'
+ uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
+ with:
+ sarif_file: 'trivy-pr-results.sarif'
+ category: 'trivy-nightly'
+ continue-on-error: true
+
- name: Create scan summary
if: always()
run: |
diff --git a/.github/workflows/e2e-tests-split.yml b/.github/workflows/e2e-tests-split.yml
index e6d38cdb..73eee00b 100644
--- a/.github/workflows/e2e-tests-split.yml
+++ b/.github/workflows/e2e-tests-split.yml
@@ -80,7 +80,6 @@ on:
default: false
type: boolean
pull_request:
- push:
env:
NODE_VERSION: '20'
@@ -96,7 +95,7 @@ env:
CI_LOG_LEVEL: 'verbose'
concurrency:
- group: e2e-split-${{ github.workflow }}-${{ github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}
+ group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
@@ -143,7 +142,7 @@ jobs:
- name: Set up Go
if: steps.resolve-image.outputs.image_source == 'build'
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache: true
@@ -191,7 +190,7 @@ jobs:
- name: Upload Docker image artifact
if: steps.resolve-image.outputs.image_source == 'build'
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-image
path: charon-e2e-image.tar
@@ -230,6 +229,7 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -247,7 +247,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -347,7 +347,7 @@ jobs:
- name: Upload HTML report (Chromium Security)
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-report-chromium-security
path: playwright-report/
@@ -355,7 +355,7 @@ jobs:
- name: Upload Chromium Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-coverage-chromium-security
path: coverage/e2e/
@@ -363,7 +363,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: traces-chromium-security
path: test-results/**/*.zip
@@ -382,7 +382,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-diagnostics-chromium-security
path: diagnostics/
@@ -395,7 +395,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-logs-chromium-security
path: docker-logs-chromium-security.txt
@@ -431,6 +431,7 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -448,7 +449,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -556,7 +557,7 @@ jobs:
- name: Upload HTML report (Firefox Security)
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-report-firefox-security
path: playwright-report/
@@ -564,7 +565,7 @@ jobs:
- name: Upload Firefox Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-coverage-firefox-security
path: coverage/e2e/
@@ -572,7 +573,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: traces-firefox-security
path: test-results/**/*.zip
@@ -591,7 +592,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-diagnostics-firefox-security
path: diagnostics/
@@ -604,7 +605,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-logs-firefox-security
path: docker-logs-firefox-security.txt
@@ -640,6 +641,7 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -657,7 +659,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -765,7 +767,7 @@ jobs:
- name: Upload HTML report (WebKit Security)
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-report-webkit-security
path: playwright-report/
@@ -773,7 +775,7 @@ jobs:
- name: Upload WebKit Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-coverage-webkit-security
path: coverage/e2e/
@@ -781,7 +783,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: traces-webkit-security
path: test-results/**/*.zip
@@ -800,7 +802,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-diagnostics-webkit-security
path: diagnostics/
@@ -813,7 +815,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-logs-webkit-security
path: docker-logs-webkit-security.txt
@@ -861,6 +863,39 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+ - name: Preflight disk diagnostics (before cleanup)
+ run: |
+ echo "Disk usage before cleanup"
+ df -h
+ docker system df || true
+
+ - name: Preflight cleanup (best effort)
+ run: |
+ echo "Best-effort cleanup for CI runner"
+ docker system prune -af || true
+ rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
+ rm -f docker-logs-*.txt charon-e2e-image.tar || true
+
+ - name: Preflight disk diagnostics and threshold gate
+ run: |
+ set -euo pipefail
+ MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
+ echo "Disk usage after cleanup"
+ df -h
+ docker system df || true
+
+ WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
+ FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
+ FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
+
+ echo "Free bytes on /: $FREE_ROOT_BYTES"
+ echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
+
+ if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
+ echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
+ exit 42
+ fi
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -878,7 +913,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -968,7 +1003,7 @@ jobs:
- name: Upload HTML report (Chromium shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-report-chromium-shard-${{ matrix.shard }}
path: playwright-report/
@@ -976,7 +1011,7 @@ jobs:
- name: Upload Playwright output (Chromium shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-output-chromium-shard-${{ matrix.shard }}
path: playwright-output/chromium-shard-${{ matrix.shard }}/
@@ -984,7 +1019,7 @@ jobs:
- name: Upload Chromium coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-coverage-chromium-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -992,7 +1027,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: traces-chromium-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1011,7 +1046,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-diagnostics-chromium-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1024,7 +1059,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-logs-chromium-shard-${{ matrix.shard }}
path: docker-logs-chromium-shard-${{ matrix.shard }}.txt
@@ -1065,6 +1100,39 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+ - name: Preflight disk diagnostics (before cleanup)
+ run: |
+ echo "Disk usage before cleanup"
+ df -h
+ docker system df || true
+
+ - name: Preflight cleanup (best effort)
+ run: |
+ echo "Best-effort cleanup for CI runner"
+ docker system prune -af || true
+ rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
+ rm -f docker-logs-*.txt charon-e2e-image.tar || true
+
+ - name: Preflight disk diagnostics and threshold gate
+ run: |
+ set -euo pipefail
+ MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
+ echo "Disk usage after cleanup"
+ df -h
+ docker system df || true
+
+ WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
+ FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
+ FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
+
+ echo "Free bytes on /: $FREE_ROOT_BYTES"
+ echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
+
+ if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
+ echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
+ exit 42
+ fi
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -1082,7 +1150,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -1180,7 +1248,7 @@ jobs:
- name: Upload HTML report (Firefox shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-report-firefox-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1188,7 +1256,7 @@ jobs:
- name: Upload Playwright output (Firefox shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: playwright-output-firefox-shard-${{ matrix.shard }}
path: playwright-output/firefox-shard-${{ matrix.shard }}/
@@ -1196,7 +1264,7 @@ jobs:
- name: Upload Firefox coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-coverage-firefox-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1204,7 +1272,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: traces-firefox-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1223,7 +1291,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: e2e-diagnostics-firefox-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1236,7 +1304,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docker-logs-firefox-shard-${{ matrix.shard }}
path: docker-logs-firefox-shard-${{ matrix.shard }}.txt
@@ -1277,6 +1345,39 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
+ - name: Preflight disk diagnostics (before cleanup)
+ run: |
+ echo "Disk usage before cleanup"
+ df -h
+ docker system df || true
+
+ - name: Preflight cleanup (best effort)
+ run: |
+ echo "Best-effort cleanup for CI runner"
+ docker system prune -af || true
+ rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
+ rm -f docker-logs-*.txt charon-e2e-image.tar || true
+
+ - name: Preflight disk diagnostics and threshold gate
+ run: |
+ set -euo pipefail
+ MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
+ echo "Disk usage after cleanup"
+ df -h
+ docker system df || true
+
+ WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
+ FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
+ FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
+
+ echo "Free bytes on /: $FREE_ROOT_BYTES"
+ echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
+
+ if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
+ echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
+ exit 42
+ fi
+
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -1294,7 +1395,7 @@ jobs:
- name: Download Docker image artifact
if: needs.build.outputs.image_source == 'build'
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
name: docker-image
@@ -1392,7 +1493,7 @@ jobs:
- name: Upload HTML report (WebKit shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: playwright-report-webkit-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1400,7 +1501,7 @@ jobs:
- name: Upload Playwright output (WebKit shard ${{ matrix.shard }})
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: playwright-output-webkit-shard-${{ matrix.shard }}
path: playwright-output/webkit-shard-${{ matrix.shard }}/
@@ -1408,7 +1509,7 @@ jobs:
- name: Upload WebKit coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: e2e-coverage-webkit-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1416,7 +1517,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: traces-webkit-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1435,7 +1536,7 @@ jobs:
- name: Upload diagnostics
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: e2e-diagnostics-webkit-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1448,7 +1549,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: docker-logs-webkit-shard-${{ matrix.shard }}
path: docker-logs-webkit-shard-${{ matrix.shard }}.txt
diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml
index 4e7a2da4..2f682686 100644
--- a/.github/workflows/nightly-build.yml
+++ b/.github/workflows/nightly-build.yml
@@ -103,11 +103,12 @@ jobs:
const workflows = [
{ id: 'e2e-tests-split.yml' },
{ id: 'codecov-upload.yml', inputs: { run_backend: 'true', run_frontend: 'true' } },
- { id: 'security-pr.yml' },
{ id: 'supply-chain-verify.yml' },
{ id: 'codeql.yml' },
];
+ core.info('Skipping security-pr.yml: PR-only workflow intentionally excluded from nightly non-PR dispatch');
+
for (const workflow of workflows) {
const { data: workflowRuns } = await github.rest.actions.listWorkflowRuns({
owner,
@@ -220,14 +221,66 @@ jobs:
echo "- ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" >> "$GITHUB_STEP_SUMMARY"
- name: Generate SBOM
- uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
+ id: sbom_primary
+ continue-on-error: true
+ uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}
format: cyclonedx-json
output-file: sbom-nightly.json
+ syft-version: v1.42.1
+
+ - name: Generate SBOM fallback with pinned Syft
+ if: always()
+ run: |
+ set -euo pipefail
+
+ if [[ "${{ steps.sbom_primary.outcome }}" == "success" ]] && [[ -s sbom-nightly.json ]] && jq -e . sbom-nightly.json >/dev/null 2>&1; then
+ echo "Primary SBOM generation succeeded with valid JSON; skipping fallback"
+ exit 0
+ fi
+
+ echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback"
+
+ SYFT_VERSION="v1.42.1"
+ OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
+ ARCH="$(uname -m)"
+ case "$ARCH" in
+ x86_64) ARCH="amd64" ;;
+ aarch64|arm64) ARCH="arm64" ;;
+ *) echo "Unsupported architecture: $ARCH"; exit 1 ;;
+ esac
+
+ TARBALL="syft_${SYFT_VERSION#v}_${OS}_${ARCH}.tar.gz"
+ BASE_URL="https://github.com/anchore/syft/releases/download/${SYFT_VERSION}"
+
+ curl -fsSLo "$TARBALL" "${BASE_URL}/${TARBALL}"
+ curl -fsSLo checksums.txt "${BASE_URL}/syft_${SYFT_VERSION#v}_checksums.txt"
+
+ grep " ${TARBALL}$" checksums.txt > checksum_line.txt
+ sha256sum -c checksum_line.txt
+
+ tar -xzf "$TARBALL" syft
+ chmod +x syft
+
+ ./syft "${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" -o cyclonedx-json=sbom-nightly.json
+
+ - name: Verify SBOM artifact
+ if: always()
+ run: |
+ set -euo pipefail
+ test -s sbom-nightly.json
+ jq -e . sbom-nightly.json >/dev/null
+ jq -e '
+ .bomFormat == "CycloneDX"
+ and (.specVersion | type == "string" and length > 0)
+ and has("version")
+ and has("metadata")
+ and (.components | type == "array")
+ ' sbom-nightly.json >/dev/null
- name: Upload SBOM artifact
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: sbom-nightly
path: sbom-nightly.json
@@ -331,7 +384,7 @@ jobs:
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Download SBOM
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
name: sbom-nightly
@@ -355,10 +408,116 @@ jobs:
sarif_file: 'trivy-nightly.sarif'
category: 'trivy-nightly'
- - name: Check for critical CVEs
+ - name: Security severity policy summary
run: |
- if grep -q "CRITICAL" trivy-nightly.sarif; then
- echo "❌ Critical vulnerabilities found in nightly build"
+ {
+ echo "## 🔐 Nightly Supply Chain Severity Policy"
+ echo ""
+ echo "- Blocking: Critical, High"
+ echo "- Medium: non-blocking by default (report + triage SLA)"
+ echo "- Policy file: .github/security-severity-policy.yml"
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ - name: Check for Critical/High CVEs
+ run: |
+ set -euo pipefail
+
+ jq -e . trivy-nightly.sarif >/dev/null
+
+ CRITICAL_COUNT=$(jq -r '
+ [
+ .runs[] as $run
+ | ($run.tool.driver.rules // []) as $rules
+ | $run.results[]?
+ | . as $result
+ | (
+ (
+ if (($result.ruleIndex | type) == "number") then
+ ($rules[$result.ruleIndex].properties["security-severity"] // empty)
+ else
+ empty
+ end
+ )
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | .properties["security-severity"]
+ ][0] // empty)
+ // empty
+ ) as $securitySeverity
+ | (try ($securitySeverity | tonumber) catch empty) as $score
+ | select($score != null and $score >= 9.0)
+ ] | length
+ ' trivy-nightly.sarif)
+
+ HIGH_COUNT=$(jq -r '
+ [
+ .runs[] as $run
+ | ($run.tool.driver.rules // []) as $rules
+ | $run.results[]?
+ | . as $result
+ | (
+ (
+ if (($result.ruleIndex | type) == "number") then
+ ($rules[$result.ruleIndex].properties["security-severity"] // empty)
+ else
+ empty
+ end
+ )
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | .properties["security-severity"]
+ ][0] // empty)
+ // empty
+ ) as $securitySeverity
+ | (try ($securitySeverity | tonumber) catch empty) as $score
+ | select($score != null and $score >= 7.0 and $score < 9.0)
+ ] | length
+ ' trivy-nightly.sarif)
+
+ MEDIUM_COUNT=$(jq -r '
+ [
+ .runs[] as $run
+ | ($run.tool.driver.rules // []) as $rules
+ | $run.results[]?
+ | . as $result
+ | (
+ (
+ if (($result.ruleIndex | type) == "number") then
+ ($rules[$result.ruleIndex].properties["security-severity"] // empty)
+ else
+ empty
+ end
+ )
+ // ([
+ $rules[]?
+ | select((.id // "") == ($result.ruleId // ""))
+ | .properties["security-severity"]
+ ][0] // empty)
+ // empty
+ ) as $securitySeverity
+ | (try ($securitySeverity | tonumber) catch empty) as $score
+ | select($score != null and $score >= 4.0 and $score < 7.0)
+ ] | length
+ ' trivy-nightly.sarif)
+
+ {
+ echo "- Structured SARIF counts: CRITICAL=${CRITICAL_COUNT}, HIGH=${HIGH_COUNT}, MEDIUM=${MEDIUM_COUNT}"
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ if [ "$CRITICAL_COUNT" -gt 0 ]; then
+ echo "❌ Critical vulnerabilities found in nightly build (${CRITICAL_COUNT})"
exit 1
fi
- echo "✅ No critical vulnerabilities found"
+
+ if [ "$HIGH_COUNT" -gt 0 ]; then
+ echo "❌ High vulnerabilities found in nightly build (${HIGH_COUNT})"
+ exit 1
+ fi
+
+ if [ "$MEDIUM_COUNT" -gt 0 ]; then
+ echo "::warning::Medium vulnerabilities found in nightly build (${MEDIUM_COUNT}). Non-blocking by policy; triage with SLA per .github/security-severity-policy.yml"
+ fi
+
+ echo "✅ No Critical/High vulnerabilities found"
diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml
index 562c5c05..9b9a09e8 100644
--- a/.github/workflows/quality-checks.yml
+++ b/.github/workflows/quality-checks.yml
@@ -3,6 +3,8 @@ name: Quality Checks
on:
pull_request:
push:
+ branches:
+ - main
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -18,6 +20,27 @@ env:
GOTOOLCHAIN: auto
jobs:
+ auth-route-protection-contract:
+ name: Auth Route Protection Contract
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
+ with:
+ fetch-depth: 0
+ ref: ${{ github.sha }}
+
+ - name: Set up Go
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
+ with:
+ go-version: ${{ env.GO_VERSION }}
+ cache-dependency-path: backend/go.sum
+
+ - name: Run auth protection contract tests
+ run: |
+ set -euo pipefail
+ cd backend
+ go test ./internal/api/routes -run 'TestRegister_StateChangingRoutesRequireAuthentication|TestRegister_StateChangingRoutesDenyByDefaultWithExplicitAllowlist|TestRegister_AuthenticatedRoutes' -count=1 -v
+
codecov-trigger-parity-guard:
name: Codecov Trigger/Comment Parity Guard
runs-on: ubuntu-latest
@@ -113,7 +136,7 @@ jobs:
} >> "$GITHUB_ENV"
- name: Set up Go
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
diff --git a/.github/workflows/release-goreleaser.yml b/.github/workflows/release-goreleaser.yml
index 0bab3e02..c79a0eb1 100644
--- a/.github/workflows/release-goreleaser.yml
+++ b/.github/workflows/release-goreleaser.yml
@@ -20,6 +20,7 @@ permissions:
jobs:
goreleaser:
+ if: ${{ !contains(github.ref_name, '-candidate') && !contains(github.ref_name, '-rc') }}
runs-on: ubuntu-latest
env:
# Use the built-in GITHUB_TOKEN by default for GitHub API operations.
@@ -32,10 +33,22 @@ jobs:
with:
fetch-depth: 0
+ - name: Enforce PR-2 release promotion guard
+ env:
+ REPO_VARS_JSON: ${{ toJSON(vars) }}
+ run: |
+ PR2_GATE_STATUS="$(printf '%s' "$REPO_VARS_JSON" | jq -r '.CHARON_PR2_GATES_PASSED // "false"')"
+ if [[ "$PR2_GATE_STATUS" != "true" ]]; then
+ echo "::error::Releasable tag promotion is blocked until PR-2 security/retirement gates pass."
+ echo "::error::Set repository variable CHARON_PR2_GATES_PASSED=true only after PR-2 approval."
+ exit 1
+ fi
+
- name: Set up Go
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
+ cache-dependency-path: backend/go.sum
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml
index 36958d43..6d17aa86 100644
--- a/.github/workflows/renovate.yml
+++ b/.github/workflows/renovate.yml
@@ -25,7 +25,7 @@ jobs:
fetch-depth: 1
- name: Run Renovate
- uses: renovatebot/github-action@d65ef9e20512193cc070238b49c3873a361cd50c # v46.1.1
+ uses: renovatebot/github-action@7b4b65bf31e07d4e3e51708d07700fb41bc03166 # v46.1.3
with:
configurationFile: .github/renovate.json
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/repo-health.yml b/.github/workflows/repo-health.yml
index a41db062..6c11cec3 100644
--- a/.github/workflows/repo-health.yml
+++ b/.github/workflows/repo-health.yml
@@ -34,7 +34,7 @@ jobs:
- name: Upload health output
if: always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: repo-health-output
path: |
diff --git a/.github/workflows/security-pr.yml b/.github/workflows/security-pr.yml
index 94406466..8eeb9569 100644
--- a/.github/workflows/security-pr.yml
+++ b/.github/workflows/security-pr.yml
@@ -4,18 +4,22 @@
name: Security Scan (PR)
on:
+ workflow_run:
+ workflows: ["Docker Build, Publish & Test"]
+ types: [completed]
workflow_dispatch:
inputs:
pr_number:
- description: 'PR number to scan (optional)'
- required: false
+ description: 'PR number to scan'
+ required: true
type: string
pull_request:
push:
+ branches: [main]
concurrency:
- group: security-pr-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
+ group: security-pr-${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
@@ -23,16 +27,18 @@ jobs:
name: Trivy Binary Scan
runs-on: ubuntu-latest
timeout-minutes: 10
- # Run for: manual dispatch, PR builds, or any push builds from docker-build
+ # Run for manual dispatch, direct PR/push, or successful upstream workflow_run
if: >-
github.event_name == 'workflow_dispatch' ||
github.event_name == 'pull_request' ||
- ((github.event.workflow_run.event == 'push' || github.event.workflow_run.pull_requests[0].number != null) &&
- (github.event.workflow_run.status != 'completed' || github.event.workflow_run.conclusion == 'success'))
+ github.event_name == 'push' ||
+ (github.event_name == 'workflow_run' &&
+ github.event.workflow_run.event == 'pull_request' &&
+ github.event.workflow_run.status == 'completed' &&
+ github.event.workflow_run.conclusion == 'success')
permissions:
contents: read
- pull-requests: write
security-events: write
actions: read
@@ -41,27 +47,65 @@ jobs:
# actions/checkout v4.2.2
uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98
with:
- ref: ${{ github.event.workflow_run.head_sha || github.sha }}
+ ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
- name: Extract PR number from workflow_run
id: pr-info
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
- if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
- # Manual dispatch - use input or fail gracefully
- if [[ -n "${{ inputs.pr_number }}" ]]; then
- echo "pr_number=${{ inputs.pr_number }}" >> "$GITHUB_OUTPUT"
- echo "✅ Using manually provided PR number: ${{ inputs.pr_number }}"
- else
- echo "⚠️ No PR number provided for manual dispatch"
- echo "pr_number=" >> "$GITHUB_OUTPUT"
- fi
+ if [[ "${{ github.event_name }}" == "push" ]]; then
+ echo "pr_number=" >> "$GITHUB_OUTPUT"
+ echo "is_push=true" >> "$GITHUB_OUTPUT"
+ echo "✅ Push event detected; using local image path"
exit 0
fi
+ if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+ echo "pr_number=${{ github.event.pull_request.number }}" >> "$GITHUB_OUTPUT"
+ echo "is_push=false" >> "$GITHUB_OUTPUT"
+ echo "✅ Pull request event detected: PR #${{ github.event.pull_request.number }}"
+ exit 0
+ fi
+
+ if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
+ INPUT_PR_NUMBER="${{ inputs.pr_number }}"
+ if [[ -z "${INPUT_PR_NUMBER}" ]]; then
+ echo "❌ workflow_dispatch requires inputs.pr_number"
+ exit 1
+ fi
+
+ if [[ ! "${INPUT_PR_NUMBER}" =~ ^[0-9]+$ ]]; then
+ echo "❌ reason_category=invalid_input"
+ echo "reason=workflow_dispatch pr_number must be digits-only"
+ exit 1
+ fi
+
+ PR_NUMBER="${INPUT_PR_NUMBER}"
+ echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
+ echo "is_push=false" >> "$GITHUB_OUTPUT"
+ echo "✅ Using manually provided PR number: ${PR_NUMBER}"
+ exit 0
+ fi
+
+ if [[ "${{ github.event_name }}" == "workflow_run" ]]; then
+ if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
+ # Explicit contract validation happens in the dedicated guard step.
+ echo "pr_number=" >> "$GITHUB_OUTPUT"
+ echo "is_push=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ if [[ -n "${{ github.event.workflow_run.pull_requests[0].number || '' }}" ]]; then
+ echo "pr_number=${{ github.event.workflow_run.pull_requests[0].number }}" >> "$GITHUB_OUTPUT"
+ echo "is_push=false" >> "$GITHUB_OUTPUT"
+ echo "✅ Found PR number from workflow_run payload: ${{ github.event.workflow_run.pull_requests[0].number }}"
+ exit 0
+ fi
+ fi
+
# Extract PR number from context
- HEAD_SHA="${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
+ HEAD_SHA="${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
echo "🔍 Looking for PR with head SHA: ${HEAD_SHA}"
# Query GitHub API for PR associated with this commit
@@ -73,21 +117,38 @@ jobs:
if [[ -n "${PR_NUMBER}" ]]; then
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
+ echo "is_push=false" >> "$GITHUB_OUTPUT"
echo "✅ Found PR number: ${PR_NUMBER}"
else
- echo "⚠️ Could not find PR number for SHA: ${HEAD_SHA}"
- echo "pr_number=" >> "$GITHUB_OUTPUT"
+ echo "❌ Could not determine PR number for workflow_run SHA: ${HEAD_SHA}"
+ exit 1
fi
- # Check if this is a push event (not a PR)
- if [[ "${{ github.event_name }}" == "push" || "${{ github.event.workflow_run.event }}" == "push" || -z "${PR_NUMBER}" ]]; then
- HEAD_BRANCH="${{ github.event.workflow_run.head_branch || github.ref_name }}"
- echo "is_push=true" >> "$GITHUB_OUTPUT"
- echo "✅ Detected push build from branch: ${HEAD_BRANCH}"
- else
- echo "is_push=false" >> "$GITHUB_OUTPUT"
+ - name: Validate workflow_run trust boundary and event contract
+ if: github.event_name == 'workflow_run'
+ run: |
+ if [[ "${{ github.event.workflow_run.name }}" != "Docker Build, Publish & Test" ]]; then
+ echo "❌ reason_category=unexpected_upstream_workflow"
+ echo "workflow_name=${{ github.event.workflow_run.name }}"
+ exit 1
fi
+ if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
+ echo "❌ reason_category=unsupported_upstream_event"
+ echo "upstream_event=${{ github.event.workflow_run.event }}"
+ echo "run_id=${{ github.event.workflow_run.id }}"
+ exit 1
+ fi
+
+ if [[ "${{ github.event.workflow_run.head_repository.full_name }}" != "${{ github.repository }}" ]]; then
+ echo "❌ reason_category=untrusted_upstream_repository"
+ echo "upstream_head_repository=${{ github.event.workflow_run.head_repository.full_name }}"
+ echo "expected_repository=${{ github.repository }}"
+ exit 1
+ fi
+
+ echo "✅ workflow_run trust boundary and event contract validated"
+
- name: Build Docker image (Local)
if: github.event_name == 'push' || github.event_name == 'pull_request'
run: |
@@ -97,95 +158,149 @@ jobs:
- name: Check for PR image artifact
id: check-artifact
- if: (steps.pr-info.outputs.pr_number != '' || steps.pr-info.outputs.is_push == 'true') && github.event_name != 'push' && github.event_name != 'pull_request'
+ if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
- # Determine artifact name based on event type
- if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
- ARTIFACT_NAME="push-image"
- else
- PR_NUMBER="${{ steps.pr-info.outputs.pr_number }}"
- ARTIFACT_NAME="pr-image-${PR_NUMBER}"
+ PR_NUMBER="${{ steps.pr-info.outputs.pr_number }}"
+ if [[ ! "${PR_NUMBER}" =~ ^[0-9]+$ ]]; then
+ echo "❌ reason_category=invalid_input"
+ echo "reason=Resolved PR number must be digits-only"
+ exit 1
fi
- RUN_ID="${{ github.event.workflow_run.id }}"
+
+ ARTIFACT_NAME="pr-image-${PR_NUMBER}"
+ RUN_ID="${{ github.event_name == 'workflow_run' && github.event.workflow_run.id || '' }}"
echo "🔍 Checking for artifact: ${ARTIFACT_NAME}"
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
- # For manual dispatch, find the most recent workflow run with this artifact
- RUN_ID=$(gh api \
+ # Manual replay path: find latest successful docker-build pull_request run for this PR.
+ RUNS_JSON=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
- "/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?status=success&per_page=10" \
- --jq '.workflow_runs[0].id // empty' 2>/dev/null || echo "")
+ "/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?event=pull_request&status=success&per_page=100" 2>&1)
+ RUNS_STATUS=$?
+
+ if [[ ${RUNS_STATUS} -ne 0 ]]; then
+ echo "❌ reason_category=api_error"
+ echo "reason=Failed to query workflow runs for PR lookup"
+ echo "upstream_run_id=unknown"
+ echo "artifact_name=${ARTIFACT_NAME}"
+ echo "api_output=${RUNS_JSON}"
+ exit 1
+ fi
+
+ RUN_ID=$(printf '%s' "${RUNS_JSON}" | jq -r --argjson pr "${PR_NUMBER}" '.workflow_runs[] | select((.pull_requests // []) | any(.number == $pr)) | .id' | head -n 1)
if [[ -z "${RUN_ID}" ]]; then
- echo "⚠️ No successful workflow runs found"
- echo "artifact_exists=false" >> "$GITHUB_OUTPUT"
- exit 0
+ echo "❌ reason_category=not_found"
+ echo "reason=No successful docker-build pull_request run found for PR #${PR_NUMBER}"
+ echo "upstream_run_id=unknown"
+ echo "artifact_name=${ARTIFACT_NAME}"
+ exit 1
fi
- elif [[ -z "${RUN_ID}" ]]; then
- # If triggered by push/pull_request, RUN_ID is empty. Find recent run for this commit.
- HEAD_SHA="${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
- echo "🔍 Searching for workflow run for SHA: ${HEAD_SHA}"
- # Retry a few times as the run might be just starting or finishing
- for i in {1..3}; do
- RUN_ID=$(gh api \
- -H "Accept: application/vnd.github+json" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- "/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?head_sha=${HEAD_SHA}&status=success&per_page=1" \
- --jq '.workflow_runs[0].id // empty' 2>/dev/null || echo "")
- if [[ -n "${RUN_ID}" ]]; then break; fi
- echo "⏳ Waiting for workflow run to appear/complete... ($i/3)"
- sleep 5
- done
fi
echo "run_id=${RUN_ID}" >> "$GITHUB_OUTPUT"
# Check if the artifact exists in the workflow run
- ARTIFACT_ID=$(gh api \
+ ARTIFACTS_JSON=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
- "/repos/${{ github.repository }}/actions/runs/${RUN_ID}/artifacts" \
- --jq ".artifacts[] | select(.name == \"${ARTIFACT_NAME}\") | .id" 2>/dev/null || echo "")
+ "/repos/${{ github.repository }}/actions/runs/${RUN_ID}/artifacts" 2>&1)
+ ARTIFACTS_STATUS=$?
- if [[ -n "${ARTIFACT_ID}" ]]; then
- echo "artifact_exists=true" >> "$GITHUB_OUTPUT"
- echo "artifact_id=${ARTIFACT_ID}" >> "$GITHUB_OUTPUT"
- echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
- else
- echo "artifact_exists=false" >> "$GITHUB_OUTPUT"
- echo "⚠️ Artifact not found: ${ARTIFACT_NAME}"
- echo "ℹ️ This is expected for non-PR builds or if the image was not uploaded"
+ if [[ ${ARTIFACTS_STATUS} -ne 0 ]]; then
+ echo "❌ reason_category=api_error"
+ echo "reason=Failed to query artifacts for upstream run"
+ echo "upstream_run_id=${RUN_ID}"
+ echo "artifact_name=${ARTIFACT_NAME}"
+ echo "api_output=${ARTIFACTS_JSON}"
+ exit 1
fi
- - name: Skip if no artifact
- if: ((steps.pr-info.outputs.pr_number == '' && steps.pr-info.outputs.is_push != 'true') || steps.check-artifact.outputs.artifact_exists != 'true') && github.event_name != 'push' && github.event_name != 'pull_request'
- run: |
- echo "ℹ️ Skipping security scan - no PR image artifact available"
- echo "This is expected for:"
- echo " - Pushes to main/release branches"
- echo " - PRs where Docker build failed"
- echo " - Manual dispatch without PR number"
- exit 0
+ ARTIFACT_ID=$(printf '%s' "${ARTIFACTS_JSON}" | jq -r --arg name "${ARTIFACT_NAME}" '.artifacts[] | select(.name == $name) | .id' | head -n 1)
+
+ if [[ -z "${ARTIFACT_ID}" ]]; then
+ echo "❌ reason_category=not_found"
+ echo "reason=Required artifact was not found"
+ echo "upstream_run_id=${RUN_ID}"
+ echo "artifact_name=${ARTIFACT_NAME}"
+ exit 1
+ fi
+
+ {
+ echo "artifact_exists=true"
+ echo "artifact_id=${ARTIFACT_ID}"
+ echo "artifact_name=${ARTIFACT_NAME}"
+ } >> "$GITHUB_OUTPUT"
+ echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
- name: Download PR image artifact
- if: steps.check-artifact.outputs.artifact_exists == 'true'
+ if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
# actions/download-artifact v4.1.8
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
+ uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3
with:
- name: ${{ steps.pr-info.outputs.is_push == 'true' && 'push-image' || format('pr-image-{0}', steps.pr-info.outputs.pr_number) }}
+ name: ${{ steps.check-artifact.outputs.artifact_name }}
run-id: ${{ steps.check-artifact.outputs.run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Load Docker image
- if: steps.check-artifact.outputs.artifact_exists == 'true'
+ if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
+ id: load-image
run: |
echo "📦 Loading Docker image..."
- docker load < charon-pr-image.tar
- echo "✅ Docker image loaded"
+
+ if [[ ! -r "charon-pr-image.tar" ]]; then
+ echo "❌ ERROR: Artifact image tar is missing or unreadable"
+ exit 1
+ fi
+
+ MANIFEST_TAGS=""
+ if tar -tf charon-pr-image.tar | grep -qx "manifest.json"; then
+ MANIFEST_TAGS=$(tar -xOf charon-pr-image.tar manifest.json 2>/dev/null | jq -r '.[]?.RepoTags[]?' 2>/dev/null | sed '/^$/d' || true)
+ else
+ echo "⚠️ manifest.json not found in artifact tar; will try docker-load-image-id fallback"
+ fi
+
+ LOAD_OUTPUT=$(docker load < charon-pr-image.tar 2>&1)
+ echo "${LOAD_OUTPUT}"
+
+ SOURCE_IMAGE_REF=""
+ SOURCE_RESOLUTION_MODE=""
+
+ while IFS= read -r tag; do
+ [[ -z "${tag}" ]] && continue
+ if docker image inspect "${tag}" >/dev/null 2>&1; then
+ SOURCE_IMAGE_REF="${tag}"
+ SOURCE_RESOLUTION_MODE="manifest_tag"
+ break
+ fi
+ done <<< "${MANIFEST_TAGS}"
+
+ if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
+ LOAD_IMAGE_ID=$(printf '%s\n' "${LOAD_OUTPUT}" | sed -nE 's/^Loaded image ID: (sha256:[0-9a-f]+)$/\1/p' | head -n1)
+ if [[ -n "${LOAD_IMAGE_ID}" ]] && docker image inspect "${LOAD_IMAGE_ID}" >/dev/null 2>&1; then
+ SOURCE_IMAGE_REF="${LOAD_IMAGE_ID}"
+ SOURCE_RESOLUTION_MODE="load_image_id"
+ fi
+ fi
+
+ if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
+ echo "❌ ERROR: Could not resolve a valid image reference from manifest tags or docker load image ID"
+ exit 1
+ fi
+
+ docker tag "${SOURCE_IMAGE_REF}" "charon:artifact"
+
+ {
+ echo "source_image_ref=${SOURCE_IMAGE_REF}"
+ echo "source_resolution_mode=${SOURCE_RESOLUTION_MODE}"
+ echo "image_ref=charon:artifact"
+ } >> "$GITHUB_OUTPUT"
+
+ echo "✅ Docker image resolved via ${SOURCE_RESOLUTION_MODE} and tagged as charon:artifact"
docker images | grep charon
- name: Extract charon binary from container
@@ -214,31 +329,10 @@ jobs:
exit 0
fi
- # Normalize image name for reference
- IMAGE_NAME=$(echo "${{ github.repository_owner }}/charon" | tr '[:upper:]' '[:lower:]')
- if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
- BRANCH_NAME="${{ github.event.workflow_run.head_branch }}"
- if [[ -z "${BRANCH_NAME}" ]]; then
- echo "❌ ERROR: Branch name is empty for push build"
- exit 1
- fi
- # Normalize branch name for Docker tag (replace / and other special chars with -)
- # This matches docker/metadata-action behavior: type=ref,event=branch
- TAG_SAFE_BRANCH="${BRANCH_NAME//\//-}"
- IMAGE_REF="ghcr.io/${IMAGE_NAME}:${TAG_SAFE_BRANCH}"
- elif [[ -n "${{ steps.pr-info.outputs.pr_number }}" ]]; then
- IMAGE_REF="ghcr.io/${IMAGE_NAME}:pr-${{ steps.pr-info.outputs.pr_number }}"
- else
- echo "❌ ERROR: Cannot determine image reference"
- echo " - is_push: ${{ steps.pr-info.outputs.is_push }}"
- echo " - pr_number: ${{ steps.pr-info.outputs.pr_number }}"
- echo " - branch: ${{ github.event.workflow_run.head_branch }}"
- exit 1
- fi
-
- # Validate the image reference format
- if [[ ! "${IMAGE_REF}" =~ ^ghcr\.io/[a-z0-9_-]+/[a-z0-9_-]+:[a-zA-Z0-9._-]+$ ]]; then
- echo "❌ ERROR: Invalid image reference format: ${IMAGE_REF}"
+ # For workflow_run artifact path, always use locally tagged image from loaded artifact.
+ IMAGE_REF="${{ steps.load-image.outputs.image_ref }}"
+ if [[ -z "${IMAGE_REF}" ]]; then
+ echo "❌ ERROR: Loaded artifact image reference is empty"
exit 1
fi
@@ -268,7 +362,7 @@ jobs:
- name: Run Trivy filesystem scan (SARIF output)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
- uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518
+ uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}
@@ -277,19 +371,30 @@ jobs:
severity: 'CRITICAL,HIGH,MEDIUM'
continue-on-error: true
+ - name: Check Trivy SARIF output exists
+ if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
+ id: trivy-sarif-check
+ run: |
+ if [[ -f trivy-binary-results.sarif ]]; then
+ echo "exists=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "exists=false" >> "$GITHUB_OUTPUT"
+ echo "ℹ️ No Trivy SARIF output found; skipping SARIF/artifact upload steps"
+ fi
+
- name: Upload Trivy SARIF to GitHub Security
- if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
+ if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# github/codeql-action v4
- uses: github/codeql-action/upload-sarif@710e2945787622b429f8982cacb154faa182de18
+ uses: github/codeql-action/upload-sarif@0ec47d036c68ae0cf94c629009b1029407111281
with:
sarif_file: 'trivy-binary-results.sarif'
- category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event.workflow_run.head_branch) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
+ category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
continue-on-error: true
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
- uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518
+ uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}
@@ -298,11 +403,11 @@ jobs:
exit-code: '1'
- name: Upload scan artifacts
- if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
+ if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# actions/upload-artifact v4.4.3
- uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
- name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event.workflow_run.head_branch) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
+ name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
path: |
trivy-binary-results.sarif
retention-days: 14
@@ -312,7 +417,7 @@ jobs:
run: |
{
if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
- echo "## 🔒 Security Scan Results - Branch: ${{ github.event.workflow_run.head_branch }}"
+ echo "## 🔒 Security Scan Results - Branch: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name }}"
else
echo "## 🔒 Security Scan Results - PR #${{ steps.pr-info.outputs.pr_number }}"
fi
diff --git a/.github/workflows/security-weekly-rebuild.yml b/.github/workflows/security-weekly-rebuild.yml
index 3f4a4b52..db2916f5 100644
--- a/.github/workflows/security-weekly-rebuild.yml
+++ b/.github/workflows/security-weekly-rebuild.yml
@@ -6,7 +6,7 @@ name: Weekly Security Rebuild
on:
schedule:
- - cron: '0 2 * * 0' # Sundays at 02:00 UTC
+ - cron: '0 12 * * 2' # Tuesdays at 12:00 UTC
workflow_dispatch:
inputs:
force_rebuild:
@@ -119,7 +119,7 @@ jobs:
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
- name: Upload Trivy JSON results
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: trivy-weekly-scan-${{ github.run_number }}
path: trivy-weekly-results.json
diff --git a/.github/workflows/supply-chain-pr.yml b/.github/workflows/supply-chain-pr.yml
index 9c4e2b95..8529639f 100644
--- a/.github/workflows/supply-chain-pr.yml
+++ b/.github/workflows/supply-chain-pr.yml
@@ -11,6 +11,8 @@ on:
type: string
pull_request:
push:
+ branches:
+ - main
concurrency:
group: supply-chain-pr-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
@@ -264,7 +266,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate SBOM
if: steps.set-target.outputs.image_name != ''
- uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
+ uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
id: sbom
with:
image: ${{ steps.set-target.outputs.image_name }}
@@ -337,6 +339,27 @@ jobs:
echo " Low: ${LOW_COUNT}"
echo " Total: ${TOTAL_COUNT}"
+ - name: Security severity policy summary
+ if: steps.set-target.outputs.image_name != ''
+ run: |
+ CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
+ HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
+ MEDIUM_COUNT="${{ steps.vuln-summary.outputs.medium_count }}"
+
+ {
+ echo "## 🔐 Supply Chain Severity Policy"
+ echo ""
+ echo "- Blocking: Critical, High"
+ echo "- Medium: non-blocking by default (report + triage SLA)"
+ echo "- Policy file: .github/security-severity-policy.yml"
+ echo ""
+ echo "Current scan counts: Critical=${CRITICAL_COUNT}, High=${HIGH_COUNT}, Medium=${MEDIUM_COUNT}"
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ if [[ "${MEDIUM_COUNT}" -gt 0 ]]; then
+ echo "::warning::${MEDIUM_COUNT} medium vulnerabilities found. Non-blocking by policy; create/maintain triage issue with SLA per .github/security-severity-policy.yml"
+ fi
+
- name: Upload SARIF to GitHub Security
if: steps.check-artifact.outputs.artifact_found == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
@@ -348,7 +371,7 @@ jobs:
- name: Upload supply chain artifacts
if: steps.set-target.outputs.image_name != ''
# actions/upload-artifact v4.6.0
- uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: ${{ steps.pr-number.outputs.is_push == 'true' && format('supply-chain-{0}', steps.sanitize.outputs.branch) || format('supply-chain-pr-{0}', steps.pr-number.outputs.pr_number) }}
path: |
@@ -433,10 +456,11 @@ jobs:
echo "✅ PR comment posted"
- - name: Fail on critical vulnerabilities
+ - name: Fail on Critical/High vulnerabilities
if: steps.set-target.outputs.image_name != ''
run: |
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
+ HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
if [[ "${CRITICAL_COUNT}" -gt 0 ]]; then
echo "🚨 Found ${CRITICAL_COUNT} CRITICAL vulnerabilities!"
@@ -444,4 +468,10 @@ jobs:
exit 1
fi
- echo "✅ No critical vulnerabilities found"
+ if [[ "${HIGH_COUNT}" -gt 0 ]]; then
+ echo "🚨 Found ${HIGH_COUNT} HIGH vulnerabilities!"
+ echo "Please review the vulnerability report and address high severity issues before merging."
+ exit 1
+ fi
+
+ echo "✅ No Critical/High vulnerabilities found"
diff --git a/.github/workflows/supply-chain-verify.yml b/.github/workflows/supply-chain-verify.yml
index aacab9b6..fa24ee8b 100644
--- a/.github/workflows/supply-chain-verify.yml
+++ b/.github/workflows/supply-chain-verify.yml
@@ -119,7 +119,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate and Verify SBOM
if: steps.image-check.outputs.exists == 'true'
- uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
+ uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
format: cyclonedx-json
@@ -144,7 +144,7 @@ jobs:
- name: Upload SBOM Artifact
if: steps.image-check.outputs.exists == 'true' && always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: sbom-${{ steps.tag.outputs.tag }}
path: sbom-verify.cyclonedx.json
@@ -324,7 +324,7 @@ jobs:
- name: Upload Vulnerability Scan Artifact
if: steps.validate-sbom.outputs.valid == 'true' && always()
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: vulnerability-scan-${{ steps.tag.outputs.tag }}
path: |
diff --git a/.github/workflows/weekly-nightly-promotion.yml b/.github/workflows/weekly-nightly-promotion.yml
index d0f57ae4..47ad9fd6 100644
--- a/.github/workflows/weekly-nightly-promotion.yml
+++ b/.github/workflows/weekly-nightly-promotion.yml
@@ -5,9 +5,9 @@ name: Weekly Nightly to Main Promotion
on:
schedule:
- # Every Monday at 10:30 UTC (5:30am EST / 6:30am EDT)
+ # Every Monday at 12:00 UTC (7:00am EST / 8:00am EDT)
# Offset from nightly sync (09:00 UTC) to avoid schedule race and allow validation completion.
- - cron: '30 10 * * 1'
+ - cron: '0 12 * * 1'
workflow_dispatch:
inputs:
reason:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 78127bdc..b48f855e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -113,7 +113,7 @@ repos:
stages: [manual] # Only runs when explicitly called
- id: frontend-type-check
name: Frontend TypeScript Check
- entry: bash -c 'cd frontend && npm run type-check'
+ entry: bash -c 'cd frontend && npx tsc --noEmit'
language: system
files: '^frontend/.*\.(ts|tsx)$'
pass_filenames: false
diff --git a/.version b/.version
index 96fb87f8..3a7f17e4 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-v0.19.0
+v0.19.1
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index c8eef9be..6a06bb9e 100644
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -724,6 +724,13 @@
"group": "test",
"problemMatcher": []
},
+ {
+ "label": "Security: Caddy PR-1 Compatibility Matrix",
+ "type": "shell",
+ "command": "cd /projects/Charon && bash scripts/caddy-compat-matrix.sh --candidate-version 2.11.1 --patch-scenarios A,B,C --platforms linux/amd64,linux/arm64 --smoke-set boot_caddy,plugin_modules,config_validate,admin_api_health --output-dir test-results/caddy-compat --docs-report docs/reports/caddy-compatibility-matrix.md",
+ "group": "test",
+ "problemMatcher": []
+ },
{
"label": "Test: E2E Playwright (Skill)",
"type": "shell",
@@ -808,6 +815,162 @@
"close": false
}
},
+ {
+ "label": "Test: E2E Playwright (Chromium) - Non-Security Shards 1/4-4/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=chromium --shard=1/4 --output=playwright-output/chromium-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=chromium --shard=2/4 --output=playwright-output/chromium-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=chromium --shard=3/4 --output=playwright-output/chromium-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=chromium --shard=4/4 --output=playwright-output/chromium-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (Chromium) - Non-Security Shard 1/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=chromium --shard=1/4 --output=playwright-output/chromium-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (Chromium) - Non-Security Shard 2/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=chromium --shard=2/4 --output=playwright-output/chromium-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (Chromium) - Non-Security Shard 3/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=chromium --shard=3/4 --output=playwright-output/chromium-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (Chromium) - Non-Security Shard 4/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=chromium --shard=4/4 --output=playwright-output/chromium-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Non-Security Shards 1/4-4/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=webkit --shard=1/4 --output=playwright-output/webkit-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=webkit --shard=2/4 --output=playwright-output/webkit-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=webkit --shard=3/4 --output=playwright-output/webkit-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=webkit --shard=4/4 --output=playwright-output/webkit-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Non-Security Shard 1/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=webkit --shard=1/4 --output=playwright-output/webkit-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Non-Security Shard 2/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=webkit --shard=2/4 --output=playwright-output/webkit-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Non-Security Shard 3/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=webkit --shard=3/4 --output=playwright-output/webkit-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Non-Security Shard 4/4",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=webkit --shard=4/4 --output=playwright-output/webkit-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (Chromium) - Security Suite",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=security-tests --output=playwright-output/chromium-security tests/security",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (FireFox) - Security Suite",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=firefox --output=playwright-output/firefox-security tests/security",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
+ {
+ "label": "Test: E2E Playwright (WebKit) - Security Suite",
+ "type": "shell",
+ "command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=webkit --output=playwright-output/webkit-security tests/security",
+ "group": "test",
+ "problemMatcher": [],
+ "presentation": {
+ "reveal": "always",
+ "panel": "dedicated",
+ "close": false
+ }
+ },
{
"label": "Test: E2E Playwright with Coverage",
"type": "shell",
diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md
index 6d5323ce..52387d26 100644
--- a/ARCHITECTURE.md
+++ b/ARCHITECTURE.md
@@ -126,7 +126,7 @@ graph TB
| **HTTP Framework** | Gin | Latest | Routing, middleware, HTTP handling |
| **Database** | SQLite | 3.x | Embedded database |
| **ORM** | GORM | Latest | Database abstraction layer |
-| **Reverse Proxy** | Caddy Server | 2.11.0-beta.2 | Embedded HTTP/HTTPS proxy |
+| **Reverse Proxy** | Caddy Server | 2.11.1 | Embedded HTTP/HTTPS proxy |
| **WebSocket** | gorilla/websocket | Latest | Real-time log streaming |
| **Crypto** | golang.org/x/crypto | Latest | Password hashing, encryption |
| **Metrics** | Prometheus Client | Latest | Application metrics |
@@ -1259,6 +1259,14 @@ go test ./integration/...
9. **Release Notes:** Generate changelog from commits
10. **Notify:** Send release notification (Discord, email)
+**Mandatory rollout gates (sign-off block):**
+
+1. Digest freshness and index digest parity across GHCR and Docker Hub
+2. Per-arch digest parity across GHCR and Docker Hub
+3. SBOM and vulnerability scans against immutable refs (`image@sha256:...`)
+4. Artifact freshness timestamps after push
+5. Evidence block with required rollout verification fields
+
### Supply Chain Security
**Components:**
@@ -1292,10 +1300,10 @@ cosign verify \
wikid82/charon:latest
# Inspect SBOM
-syft wikid82/charon:latest -o json
+syft ghcr.io/wikid82/charon@sha256: -o json
# Scan for vulnerabilities
-grype wikid82/charon:latest
+grype ghcr.io/wikid82/charon@sha256:
```
### Rollback Strategy
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 342812a3..ea12fcb1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -31,6 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
- Fixed: Added robust validation and debug logging for Docker image tags to prevent invalid reference errors.
- Fixed: Removed log masking for image references and added manifest validation to debug CI failures.
+- **Proxy Hosts**: Fixed ACL and Security Headers dropdown selections so create/edit saves now keep the selected values (including clearing to none) after submit and reload.
- **CI**: Fixed Docker image reference output so integration jobs never pull an empty image ref
- **E2E Test Reliability**: Resolved test timeout issues affecting CI/CD pipeline stability
- Fixed config reload overlay blocking test interactions
diff --git a/Dockerfile b/Dockerfile
index d796e890..f26ed1e9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,8 +14,11 @@ ARG BUILD_DEBUG=0
# avoid accidentally pulling a v3 major release. Renovate can still update
# this ARG to a specific v2.x tag when desired.
## Try to build the requested Caddy v2.x tag (Renovate can update this ARG).
-## If the requested tag isn't available, fall back to a known-good v2.11.0-beta.2 build.
-ARG CADDY_VERSION=2.11.0-beta.2
+## If the requested tag isn't available, fall back to a known-good v2.11.1 build.
+ARG CADDY_VERSION=2.11.1
+ARG CADDY_CANDIDATE_VERSION=2.11.1
+ARG CADDY_USE_CANDIDATE=0
+ARG CADDY_PATCH_SCENARIO=B
## When an official caddy image tag isn't available on the host, use a
## plain Alpine base image and overwrite its caddy binary with our
## xcaddy-built binary in the later COPY step. This avoids relying on
@@ -65,7 +68,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
# ---- Frontend Builder ----
# Build the frontend using the BUILDPLATFORM to avoid arm64 musl Rollup native issues
# renovate: datasource=docker depName=node
-FROM --platform=$BUILDPLATFORM node:24.13.1-alpine AS frontend-builder
+FROM --platform=$BUILDPLATFORM node:24.14.0-alpine AS frontend-builder
WORKDIR /app/frontend
# Copy frontend package files
@@ -196,6 +199,9 @@ FROM --platform=$BUILDPLATFORM golang:1.26-alpine AS caddy-builder
ARG TARGETOS
ARG TARGETARCH
ARG CADDY_VERSION
+ARG CADDY_CANDIDATE_VERSION
+ARG CADDY_USE_CANDIDATE
+ARG CADDY_PATCH_SCENARIO
# renovate: datasource=go depName=github.com/caddyserver/xcaddy
ARG XCADDY_VERSION=0.4.5
@@ -213,10 +219,16 @@ RUN --mount=type=cache,target=/go/pkg/mod \
RUN --mount=type=cache,target=/root/.cache/go-build \
--mount=type=cache,target=/go/pkg/mod \
sh -c 'set -e; \
+ CADDY_TARGET_VERSION="${CADDY_VERSION}"; \
+ if [ "${CADDY_USE_CANDIDATE}" = "1" ]; then \
+ CADDY_TARGET_VERSION="${CADDY_CANDIDATE_VERSION}"; \
+ fi; \
+ echo "Using Caddy target version: v${CADDY_TARGET_VERSION}"; \
+ echo "Using Caddy patch scenario: ${CADDY_PATCH_SCENARIO}"; \
export XCADDY_SKIP_CLEANUP=1; \
echo "Stage 1: Generate go.mod with xcaddy..."; \
# Run xcaddy to generate the build directory and go.mod
- GOOS=$TARGETOS GOARCH=$TARGETARCH xcaddy build v${CADDY_VERSION} \
+ GOOS=$TARGETOS GOARCH=$TARGETARCH xcaddy build v${CADDY_TARGET_VERSION} \
--with github.com/greenpau/caddy-security \
--with github.com/corazawaf/coraza-caddy/v2 \
--with github.com/hslatman/caddy-crowdsec-bouncer@v0.10.0 \
@@ -239,12 +251,21 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
go get github.com/expr-lang/expr@v1.17.7; \
# renovate: datasource=go depName=github.com/hslatman/ipstore
go get github.com/hslatman/ipstore@v0.4.0; \
- # NOTE: smallstep/certificates (pulled by caddy-security stack) currently
- # uses legacy nebula APIs removed in nebula v1.10+, which causes compile
- # failures in authority/provisioner. Keep this pinned to a known-compatible
- # v1.9.x release until upstream stack supports nebula v1.10+.
- # renovate: datasource=go depName=github.com/slackhq/nebula
- go get github.com/slackhq/nebula@v1.9.7; \
+ if [ "${CADDY_PATCH_SCENARIO}" = "A" ]; then \
+ # Rollback scenario: keep explicit nebula pin if upstream compatibility regresses.
+ # NOTE: smallstep/certificates (pulled by caddy-security stack) currently
+ # uses legacy nebula APIs removed in nebula v1.10+, which causes compile
+ # failures in authority/provisioner. Keep this pinned to a known-compatible
+ # v1.9.x release until upstream stack supports nebula v1.10+.
+ # renovate: datasource=go depName=github.com/slackhq/nebula
+ go get github.com/slackhq/nebula@v1.9.7; \
+ elif [ "${CADDY_PATCH_SCENARIO}" = "B" ] || [ "${CADDY_PATCH_SCENARIO}" = "C" ]; then \
+ # Default PR-2 posture: retire explicit nebula pin and use upstream resolution.
+ echo "Skipping nebula pin for scenario ${CADDY_PATCH_SCENARIO}"; \
+ else \
+ echo "Unsupported CADDY_PATCH_SCENARIO=${CADDY_PATCH_SCENARIO}"; \
+ exit 1; \
+ fi; \
# Clean up go.mod and ensure all dependencies are resolved
go mod tidy; \
echo "Dependencies patched successfully"; \
diff --git a/README.md b/README.md
index 74556475..64f23ed8 100644
--- a/README.md
+++ b/README.md
@@ -94,6 +94,19 @@ services:
retries: 3
start_period: 40s
```
+> **Docker Socket Access:** Charon runs as a non-root user. If you mount the Docker socket for container discovery, the container needs permission to read it. Find your socket's group ID and add it to the compose file:
+>
+> ```bash
+> stat -c '%g' /var/run/docker.sock
+> ```
+>
+> Then add `group_add: [""]` under your service (replace `` with the number from the command above). For example, if the result is `998`:
+>
+> ```yaml
+> group_add:
+> - "998"
+> ```
+
### 2️⃣ Generate encryption key:
```bash
openssl rand -base64 32
diff --git a/SECURITY.md b/SECURITY.md
index 149f771e..64457bdc 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -25,11 +25,10 @@ We take security seriously. If you discover a security vulnerability in Charon,
- Impact assessment
- Suggested fix (if applicable)
-**Alternative Method**: Email
+**Alternative Method**: GitHub Issues (Public)
-- Send to: `security@charon.dev` (if configured)
-- Use PGP encryption (key available below, if applicable)
-- Include same information as GitHub advisory
+1. Go to
+2. Create a new issue with the same information as above
### What to Include
@@ -125,6 +124,7 @@ For complete technical details, see:
### Infrastructure Security
+- **Non-root by default**: Charon runs as an unprivileged user (`charon`, uid 1000) inside the container. Docker socket access is granted via a minimal supplemental group matching the host socket's GID—never by running as root. If the socket GID is `0` (root group), Charon requires explicit opt-in before granting access.
- **Container isolation**: Docker-based deployment
- **Minimal attack surface**: Alpine Linux base image
- **Dependency scanning**: Regular Trivy and govulncheck scans
diff --git a/VERSION.md b/VERSION.md
index d20f5a8d..90129050 100644
--- a/VERSION.md
+++ b/VERSION.md
@@ -19,36 +19,76 @@ Example: `0.1.0-alpha`, `1.0.0-beta.1`, `2.0.0-rc.2`
## Creating a Release
-### Automated Release Process
+### Canonical Release Process (Tag-Derived CI)
-1. **Update version** in `.version` file:
+1. **Create and push a release tag**:
```bash
- echo "1.0.0" > .version
+ git tag -a v1.0.0 -m "Release v1.0.0"
+ git push origin v1.0.0
```
-2. **Commit version bump**:
+2. **GitHub Actions automatically**:
+ - Runs release workflow from the pushed tag (`.github/workflows/release-goreleaser.yml`)
+ - Builds and publishes release artifacts/images through CI (`.github/workflows/docker-build.yml`)
+ - Creates/updates GitHub Release metadata
+
+3. **Container tags are published**:
+ - `v1.0.0` (exact version)
+ - `1.0` (minor version)
+ - `1` (major version)
+ - `latest` (for non-prerelease on main branch)
+
+### Legacy/Optional `.version` Path
+
+The `.version` file is optional and not the canonical release trigger.
+
+Use it only when you need local/version-file parity checks:
+
+1. **Set `.version` locally (optional)**:
```bash
- git add .version
- git commit -m "chore: bump version to 1.0.0"
+ echo "1.0.0" > .version
```
-3. **Create and push tag**:
+2. **Validate `.version` matches the latest tag**:
```bash
- git tag -a v1.0.0 -m "Release v1.0.0"
- git push origin v1.0.0
+ bash scripts/check-version-match-tag.sh
```
-4. **GitHub Actions automatically**:
- - Creates GitHub Release with changelog
- - Builds multi-arch Docker images (amd64, arm64)
- - Publishes to GitHub Container Registry with tags:
- - `v1.0.0` (exact version)
- - `1.0` (minor version)
- - `1` (major version)
- - `latest` (for non-prerelease on main branch)
+### Deterministic Rollout Verification Gates (Mandatory)
+
+Release sign-off is blocked until all items below pass in the same validation
+run.
+
+Enforcement points:
+
+- Release sign-off checklist/process (mandatory): All gates below remain required for release sign-off.
+- CI-supported checks (current): `.github/workflows/docker-build.yml` and `.github/workflows/supply-chain-verify.yml` enforce the subset currently implemented in workflows.
+- Manual validation required until CI parity: Validate any not-yet-implemented workflow gates via VS Code tasks `Security: Full Supply Chain Audit`, `Security: Verify SBOM`, `Security: Generate SLSA Provenance`, and `Security: Sign with Cosign`.
+- Optional version-file parity check: `Utility: Check Version Match Tag` (script: `scripts/check-version-match-tag.sh`).
+
+- [ ] **Digest freshness/parity:** Capture pre-push and post-push index digests
+ for the target tag in GHCR and Docker Hub, confirm expected freshness,
+ and confirm cross-registry index digest parity.
+- [ ] **Per-arch parity:** Confirm per-platform (`linux/amd64`, `linux/arm64`,
+ and any published platform) digest parity between GHCR and Docker Hub.
+- [ ] **Immutable digest scanning:** Run SBOM and vulnerability scans against
+ immutable refs only, using `image@sha256:`.
+- [ ] **Artifact freshness:** Confirm scan artifacts are generated after the
+ push timestamp and in the same validation run.
+- [ ] **Evidence block present:** Include the mandatory evidence block fields
+ listed below.
+
+#### Mandatory Evidence Block Fields
+
+- Tag name
+- Index digest (`sha256:...`)
+- Per-arch digests (platform -> digest)
+- Scan tool versions
+- Push timestamp and scan timestamp(s)
+- Artifact file names generated in this run
## Container Image Tags
diff --git a/backend/.golangci-fast.yml b/backend/.golangci-fast.yml
index acf0c621..e9b54d63 100644
--- a/backend/.golangci-fast.yml
+++ b/backend/.golangci-fast.yml
@@ -12,7 +12,7 @@ linters:
- ineffassign # Ineffectual assignments
- unused # Unused code detection
- gosec # Security checks (critical issues only)
- linters-settings:
+ settings:
govet:
enable:
- shadow
diff --git a/backend/.golangci.yml b/backend/.golangci.yml
index c89d75aa..4663bd4d 100644
--- a/backend/.golangci.yml
+++ b/backend/.golangci.yml
@@ -1,5 +1,5 @@
# golangci-lint configuration
-version: 2
+version: "2"
run:
timeout: 5m
tests: true
@@ -14,7 +14,7 @@ linters:
- staticcheck
- unused
- errcheck
- linters-settings:
+ settings:
gocritic:
enabled-tags:
- diagnostic
diff --git a/backend/cmd/api/main.go b/backend/cmd/api/main.go
index acd31c44..5bc85409 100644
--- a/backend/cmd/api/main.go
+++ b/backend/cmd/api/main.go
@@ -260,7 +260,7 @@ func main() {
}
// Register import handler with config dependencies
- routes.RegisterImportHandler(router, db, cfg.CaddyBinary, cfg.ImportDir, cfg.ImportCaddyfile)
+ routes.RegisterImportHandler(router, db, cfg, cfg.CaddyBinary, cfg.ImportDir, cfg.ImportCaddyfile)
// Check for mounted Caddyfile on startup
if err := handlers.CheckMountedImport(db, cfg.ImportCaddyfile, cfg.CaddyBinary, cfg.ImportDir); err != nil {
diff --git a/backend/cmd/api/main_test.go b/backend/cmd/api/main_test.go
index 69bc5a9c..d260b552 100644
--- a/backend/cmd/api/main_test.go
+++ b/backend/cmd/api/main_test.go
@@ -311,7 +311,8 @@ func TestMain_DefaultStartupGracefulShutdown_Subprocess(t *testing.T) {
if err != nil {
t.Fatalf("find free http port: %v", err)
}
- if err := os.MkdirAll(filepath.Dir(dbPath), 0o750); err != nil {
+ err = os.MkdirAll(filepath.Dir(dbPath), 0o750)
+ if err != nil {
t.Fatalf("mkdir db dir: %v", err)
}
diff --git a/backend/cmd/localpatchreport/main.go b/backend/cmd/localpatchreport/main.go
index 74d8ec0e..479b2d36 100644
--- a/backend/cmd/localpatchreport/main.go
+++ b/backend/cmd/localpatchreport/main.go
@@ -64,11 +64,13 @@ func main() {
jsonOutPath := resolvePath(repoRoot, *jsonOutFlag)
mdOutPath := resolvePath(repoRoot, *mdOutFlag)
- if err := assertFileExists(backendCoveragePath, "backend coverage file"); err != nil {
+ err = assertFileExists(backendCoveragePath, "backend coverage file")
+ if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
- if err := assertFileExists(frontendCoveragePath, "frontend coverage file"); err != nil {
+ err = assertFileExists(frontendCoveragePath, "frontend coverage file")
+ if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
diff --git a/backend/cmd/localpatchreport/main_test.go b/backend/cmd/localpatchreport/main_test.go
index df04b8f8..a7e2a758 100644
--- a/backend/cmd/localpatchreport/main_test.go
+++ b/backend/cmd/localpatchreport/main_test.go
@@ -235,7 +235,8 @@ func TestGitDiffAndWriters(t *testing.T) {
t.Fatalf("expected empty diff for HEAD...HEAD, got: %q", diffContent)
}
- if _, err := gitDiff(repoRoot, "bad-baseline"); err == nil {
+ _, err = gitDiff(repoRoot, "bad-baseline")
+ if err == nil {
t.Fatal("expected gitDiff failure for invalid baseline")
}
@@ -263,7 +264,8 @@ func TestGitDiffAndWriters(t *testing.T) {
}
jsonPath := filepath.Join(t.TempDir(), "report.json")
- if err := writeJSON(jsonPath, report); err != nil {
+ err = writeJSON(jsonPath, report)
+ if err != nil {
t.Fatalf("writeJSON should succeed: %v", err)
}
// #nosec G304 -- Test reads artifact path created by this test.
@@ -276,7 +278,8 @@ func TestGitDiffAndWriters(t *testing.T) {
}
markdownPath := filepath.Join(t.TempDir(), "report.md")
- if err := writeMarkdown(markdownPath, report, "backend/coverage.txt", "frontend/coverage/lcov.info"); err != nil {
+ err = writeMarkdown(markdownPath, report, "backend/coverage.txt", "frontend/coverage/lcov.info")
+ if err != nil {
t.Fatalf("writeMarkdown should succeed: %v", err)
}
// #nosec G304 -- Test reads artifact path created by this test.
diff --git a/backend/go.mod b/backend/go.mod
index 42e48b09..5e60f1f7 100644
--- a/backend/go.mod
+++ b/backend/go.mod
@@ -5,7 +5,7 @@ go 1.26
require (
github.com/docker/docker v28.5.2+incompatible
github.com/gin-contrib/gzip v1.2.5
- github.com/gin-gonic/gin v1.11.0
+ github.com/gin-gonic/gin v1.12.0
github.com/glebarez/sqlite v1.11.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/uuid v1.6.0
@@ -17,7 +17,7 @@ require (
github.com/sirupsen/logrus v1.9.4
github.com/stretchr/testify v1.11.1
golang.org/x/crypto v0.48.0
- golang.org/x/net v0.50.0
+ golang.org/x/net v0.51.0
golang.org/x/text v0.34.0
golang.org/x/time v0.14.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@@ -29,8 +29,8 @@ require (
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/gopkg v0.1.3 // indirect
- github.com/bytedance/sonic v1.14.1 // indirect
- github.com/bytedance/sonic/loader v0.3.0 // indirect
+ github.com/bytedance/sonic v1.15.0 // indirect
+ github.com/bytedance/sonic/loader v0.5.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
@@ -42,16 +42,16 @@ require (
github.com/docker/go-units v0.5.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
- github.com/gabriel-vasile/mimetype v1.4.12 // indirect
+ github.com/gabriel-vasile/mimetype v1.4.13 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
- github.com/glebarez/go-sqlite v1.21.2 // indirect
+ github.com/glebarez/go-sqlite v1.22.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.30.1 // indirect
github.com/goccy/go-json v0.10.5 // indirect
- github.com/goccy/go-yaml v1.18.0 // indirect
+ github.com/goccy/go-yaml v1.19.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
@@ -66,6 +66,7 @@ require (
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
+ github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/oschwald/maxminddb-golang/v2 v2.1.1 // indirect
@@ -73,28 +74,29 @@ require (
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
- github.com/prometheus/common v0.66.1 // indirect
- github.com/prometheus/procfs v0.16.1 // indirect
+ github.com/prometheus/common v0.67.5 // indirect
+ github.com/prometheus/procfs v0.20.1 // indirect
github.com/quic-go/qpack v0.6.0 // indirect
github.com/quic-go/quic-go v0.59.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
- github.com/ugorji/go/codec v1.3.0 // indirect
- go.opentelemetry.io/auto/sdk v1.1.0 // indirect
- go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect
- go.opentelemetry.io/otel v1.38.0 // indirect
+ github.com/ugorji/go/codec v1.3.1 // indirect
+ go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect
+ go.opentelemetry.io/auto/sdk v1.2.1 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 // indirect
+ go.opentelemetry.io/otel v1.40.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
- go.opentelemetry.io/otel/metric v1.38.0 // indirect
- go.opentelemetry.io/otel/trace v1.38.0 // indirect
- go.yaml.in/yaml/v2 v2.4.2 // indirect
- golang.org/x/arch v0.22.0 // indirect
+ go.opentelemetry.io/otel/metric v1.40.0 // indirect
+ go.opentelemetry.io/otel/trace v1.40.0 // indirect
+ go.yaml.in/yaml/v2 v2.4.3 // indirect
+ golang.org/x/arch v0.24.0 // indirect
golang.org/x/sys v0.41.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gotest.tools/v3 v3.5.2 // indirect
- modernc.org/libc v1.22.5 // indirect
- modernc.org/mathutil v1.5.0 // indirect
- modernc.org/memory v1.5.0 // indirect
- modernc.org/sqlite v1.23.1 // indirect
+ modernc.org/libc v1.69.0 // indirect
+ modernc.org/mathutil v1.7.1 // indirect
+ modernc.org/memory v1.11.0 // indirect
+ modernc.org/sqlite v1.46.1 // indirect
)
diff --git a/backend/go.sum b/backend/go.sum
index abe43414..489d36a5 100644
--- a/backend/go.sum
+++ b/backend/go.sum
@@ -6,10 +6,10 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
-github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w=
-github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc=
-github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
-github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
+github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE=
+github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k=
+github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE=
+github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
@@ -37,16 +37,16 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
-github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw=
-github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
+github.com/gabriel-vasile/mimetype v1.4.13 h1:46nXokslUBsAJE/wMsp5gtO500a4F3Nkz9Ufpk2AcUM=
+github.com/gabriel-vasile/mimetype v1.4.13/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/gin-contrib/gzip v1.2.5 h1:fIZs0S+l17pIu1P5XRJOo/YNqfIuPCrZZ3TWB7pjckI=
github.com/gin-contrib/gzip v1.2.5/go.mod h1:aomRgR7ftdZV3uWY0gW/m8rChfxau0n8YVvwlOHONzw=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
-github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
-github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
-github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo=
-github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k=
+github.com/gin-gonic/gin v1.12.0 h1:b3YAbrZtnf8N//yjKeU2+MQsh2mY5htkZidOM7O0wG8=
+github.com/gin-gonic/gin v1.12.0/go.mod h1:VxccKfsSllpKshkBWgVgRniFFAzFb9csfngsqANjnLc=
+github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
+github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
@@ -64,21 +64,23 @@ github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy0
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
-github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
-github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
+github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
+github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
-github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
+github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
+github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
+github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
+github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
@@ -118,6 +120,8 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
+github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
@@ -136,21 +140,20 @@ github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
-github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
-github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
-github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
-github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
+github.com/prometheus/common v0.67.5 h1:pIgK94WWlQt1WLwAC5j2ynLaBRDiinoAb86HZHTUGI4=
+github.com/prometheus/common v0.67.5/go.mod h1:SjE/0MzDEEAyrdr5Gqc6G+sXI67maCxzaT3A2+HqjUw=
+github.com/prometheus/procfs v0.20.1 h1:XwbrGOIplXW/AU3YhIhLODXMJYyC1isLFfYCsTEycfc=
+github.com/prometheus/procfs v0.20.1/go.mod h1:o9EMBZGRyvDrSPH1RqdxhojkuXstoe4UlK79eF5TGGo=
github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8=
github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII=
github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw=
github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU=
-github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
-github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
-github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
+github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
+github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w=
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -161,45 +164,52 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
-github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
-github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
-github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
-go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
-go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
-go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
-go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
+github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY=
+github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
+go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
+go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
+go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
+go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 h1:7iP2uCb7sGddAr30RRS6xjKy7AZ2JtTOPA3oolgVSw8=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0/go.mod h1:c7hN3ddxs/z6q9xwvfLPk+UHlWRQyaeR1LdgfL/66l0=
+go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
+go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
-go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
-go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
-go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
-go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
-go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
-go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
-go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
-go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
+go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g=
+go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc=
+go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ71f8=
+go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE=
+go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw=
+go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg=
+go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
+go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4=
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
-go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
-go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
-golang.org/x/arch v0.22.0 h1:c/Zle32i5ttqRXjdLyyHZESLD/bB90DCU1g9l/0YBDI=
-golang.org/x/arch v0.22.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
+go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
+go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
+golang.org/x/arch v0.24.0 h1:qlJ3M9upxvFfwRM51tTg3Yl+8CP9vCC1E7vlFpgv99Y=
+golang.org/x/arch v0.24.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
-golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
-golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
+golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
+golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
+golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
+golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
+golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
+golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
@@ -207,6 +217,8 @@ golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
+golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
+golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=
@@ -229,11 +241,31 @@ gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
-modernc.org/libc v1.22.5 h1:91BNch/e5B0uPbJFgqbxXuOnxBQjlS//icfQEGmvyjE=
-modernc.org/libc v1.22.5/go.mod h1:jj+Z7dTNX8fBScMVNRAYZ/jF91K8fdT2hYMThc3YjBY=
-modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
-modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
-modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds=
-modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
-modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM=
-modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
+modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
+modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
+modernc.org/ccgo/v4 v4.31.0 h1:/bsaxqdgX3gy/0DboxcvWrc3NpzH+6wpFfI/ZaA/hrg=
+modernc.org/ccgo/v4 v4.31.0/go.mod h1:jKe8kPBjIN/VdGTVqARTQ8N1gAziBmiISY8j5HoKwjg=
+modernc.org/fileutil v1.4.0 h1:j6ZzNTftVS054gi281TyLjHPp6CPHr2KCxEXjEbD6SM=
+modernc.org/fileutil v1.4.0/go.mod h1:EqdKFDxiByqxLk8ozOxObDSfcVOv/54xDs/DUHdvCUU=
+modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
+modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
+modernc.org/gc/v3 v3.1.2 h1:ZtDCnhonXSZexk/AYsegNRV1lJGgaNZJuKjJSWKyEqo=
+modernc.org/gc/v3 v3.1.2/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
+modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
+modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
+modernc.org/libc v1.69.0 h1:YQJ5QMSReTgQ3QFmI0dudfjXIjCcYTUxcH8/9P9f0D8=
+modernc.org/libc v1.69.0/go.mod h1:YfLLduUEbodNV2xLU5JOnRHBTAHVHsVW3bVYGw0ZCV4=
+modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
+modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
+modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
+modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
+modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
+modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
+modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
+modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
+modernc.org/sqlite v1.46.1 h1:eFJ2ShBLIEnUWlLy12raN0Z1plqmFX9Qe3rjQTKt6sU=
+modernc.org/sqlite v1.46.1/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=
+modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
+modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
+modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
+modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
diff --git a/backend/integration/notification_http_wrapper_integration_test.go b/backend/integration/notification_http_wrapper_integration_test.go
new file mode 100644
index 00000000..2b228a0e
--- /dev/null
+++ b/backend/integration/notification_http_wrapper_integration_test.go
@@ -0,0 +1,124 @@
+//go:build integration
+// +build integration
+
+package integration
+
+import (
+ "context"
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "sync/atomic"
+ "testing"
+
+ "github.com/Wikid82/charon/backend/internal/notifications"
+)
+
+func TestNotificationHTTPWrapperIntegration_RetriesOn429AndSucceeds(t *testing.T) {
+ t.Parallel()
+
+ var calls int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ current := atomic.AddInt32(&calls, 1)
+ if current == 1 {
+ w.WriteHeader(http.StatusTooManyRequests)
+ return
+ }
+ w.WriteHeader(http.StatusOK)
+ _, _ = w.Write([]byte(`{"ok":true}`))
+ }))
+ defer server.Close()
+
+ wrapper := notifications.NewNotifyHTTPWrapper()
+ result, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err != nil {
+ t.Fatalf("expected retry success, got error: %v", err)
+ }
+ if result.Attempts != 2 {
+ t.Fatalf("expected 2 attempts, got %d", result.Attempts)
+ }
+}
+
+func TestNotificationHTTPWrapperIntegration_DoesNotRetryOn400(t *testing.T) {
+ t.Parallel()
+
+ var calls int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ atomic.AddInt32(&calls, 1)
+ w.WriteHeader(http.StatusBadRequest)
+ }))
+ defer server.Close()
+
+ wrapper := notifications.NewNotifyHTTPWrapper()
+ _, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil {
+ t.Fatalf("expected non-retryable 400 error")
+ }
+ if atomic.LoadInt32(&calls) != 1 {
+ t.Fatalf("expected one request attempt, got %d", calls)
+ }
+}
+
+func TestNotificationHTTPWrapperIntegration_RejectsTokenizedQueryWithoutEcho(t *testing.T) {
+ t.Parallel()
+
+ wrapper := notifications.NewNotifyHTTPWrapper()
+ secret := "pr1-secret-token-value"
+ _, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
+ URL: "http://example.com/hook?token=" + secret,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil {
+ t.Fatalf("expected tokenized query rejection")
+ }
+ if !strings.Contains(err.Error(), "query authentication is not allowed") {
+ t.Fatalf("expected sanitized query-auth rejection, got: %v", err)
+ }
+ if strings.Contains(err.Error(), secret) {
+ t.Fatalf("error must not echo secret token")
+ }
+}
+
+func TestNotificationHTTPWrapperIntegration_HeaderAllowlistSafety(t *testing.T) {
+ t.Parallel()
+
+ var seenAuthHeader string
+ var seenCookieHeader string
+ var seenGotifyKey string
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ seenAuthHeader = r.Header.Get("Authorization")
+ seenCookieHeader = r.Header.Get("Cookie")
+ seenGotifyKey = r.Header.Get("X-Gotify-Key")
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer server.Close()
+
+ wrapper := notifications.NewNotifyHTTPWrapper()
+ _, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
+ URL: server.URL,
+ Headers: map[string]string{
+ "Authorization": "Bearer should-not-leak",
+ "Cookie": "session=should-not-leak",
+ "X-Gotify-Key": "allowed-token",
+ },
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err != nil {
+ t.Fatalf("expected success, got error: %v", err)
+ }
+ if seenAuthHeader != "" {
+ t.Fatalf("authorization header must be stripped")
+ }
+ if seenCookieHeader != "" {
+ t.Fatalf("cookie header must be stripped")
+ }
+ if seenGotifyKey != "allowed-token" {
+ t.Fatalf("expected X-Gotify-Key to pass through")
+ }
+}
diff --git a/backend/internal/api/handlers/additional_coverage_test.go b/backend/internal/api/handlers/additional_coverage_test.go
index a0181092..63b95a1f 100644
--- a/backend/internal/api/handlers/additional_coverage_test.go
+++ b/backend/internal/api/handlers/additional_coverage_test.go
@@ -170,6 +170,7 @@ func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
c.Request = httptest.NewRequest("PUT", "/security/config", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
@@ -190,6 +191,7 @@ func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/security/breakglass", http.NoBody)
h.GenerateBreakGlass(c)
@@ -252,6 +254,7 @@ func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/security/rulesets", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
@@ -277,6 +280,7 @@ func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/security/decisions", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
@@ -297,6 +301,7 @@ func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
c.Params = gin.Params{{Key: "id", Value: "999"}}
h.DeleteRuleSet(c)
diff --git a/backend/internal/api/handlers/auth_handler.go b/backend/internal/api/handlers/auth_handler.go
index 28695ec8..8d6c86e0 100644
--- a/backend/internal/api/handlers/auth_handler.go
+++ b/backend/internal/api/handlers/auth_handler.go
@@ -127,18 +127,20 @@ func isLocalRequest(c *gin.Context) bool {
// setSecureCookie sets an auth cookie with security best practices
// - HttpOnly: prevents JavaScript access (XSS protection)
-// - Secure: derived from request scheme to allow HTTP/IP logins when needed
+// - Secure: true for HTTPS; false only for local non-HTTPS loopback flows
// - SameSite: Strict for HTTPS, Lax for HTTP/IP to allow forward-auth redirects
func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
scheme := requestScheme(c)
- secure := scheme == "https"
+ secure := true
sameSite := http.SameSiteStrictMode
if scheme != "https" {
sameSite = http.SameSiteLaxMode
+ if isLocalRequest(c) {
+ secure = false
+ }
}
if isLocalRequest(c) {
- secure = false
sameSite = http.SameSiteLaxMode
}
@@ -152,7 +154,7 @@ func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
maxAge, // maxAge in seconds
"/", // path
domain, // domain (empty = current host)
- secure, // secure (HTTPS only in production)
+ secure, // secure (always true)
true, // httpOnly (no JS access)
)
}
diff --git a/backend/internal/api/handlers/auth_handler_test.go b/backend/internal/api/handlers/auth_handler_test.go
index 4241adea..72f73c88 100644
--- a/backend/internal/api/handlers/auth_handler_test.go
+++ b/backend/internal/api/handlers/auth_handler_test.go
@@ -94,10 +94,28 @@ func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
c := cookies[0]
- assert.False(t, c.Secure)
+ assert.True(t, c.Secure)
assert.Equal(t, http.SameSiteLaxMode, c.SameSite)
}
+func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
+ t.Parallel()
+ gin.SetMode(gin.TestMode)
+ recorder := httptest.NewRecorder()
+ ctx, _ := gin.CreateTestContext(recorder)
+ req := httptest.NewRequest("POST", "http://127.0.0.1:8080/login", http.NoBody)
+ req.Host = "127.0.0.1:8080"
+ req.Header.Set("X-Forwarded-Proto", "http")
+ ctx.Request = req
+
+ setSecureCookie(ctx, "auth_token", "abc", 60)
+ cookies := recorder.Result().Cookies()
+ require.Len(t, cookies, 1)
+ cookie := cookies[0]
+ assert.False(t, cookie.Secure)
+ assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
+}
+
func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
@@ -115,7 +133,7 @@ func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
- assert.False(t, cookie.Secure)
+ assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
@@ -136,7 +154,7 @@ func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
- assert.False(t, cookie.Secure)
+ assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
@@ -158,7 +176,7 @@ func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
- assert.False(t, cookie.Secure)
+ assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
@@ -180,7 +198,7 @@ func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
- assert.False(t, cookie.Secure)
+ assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
diff --git a/backend/internal/api/handlers/docker_handler.go b/backend/internal/api/handlers/docker_handler.go
index 93cdf816..945339b3 100644
--- a/backend/internal/api/handlers/docker_handler.go
+++ b/backend/internal/api/handlers/docker_handler.go
@@ -71,10 +71,14 @@ func (h *DockerHandler) ListContainers(c *gin.Context) {
if err != nil {
var unavailableErr *services.DockerUnavailableError
if errors.As(err, &unavailableErr) {
+ details := unavailableErr.Details()
+ if details == "" {
+ details = "Cannot connect to Docker. Please ensure Docker is running and the socket is accessible (e.g., /var/run/docker.sock is mounted)."
+ }
log.WithFields(map[string]any{"server_id": util.SanitizeForLog(serverID), "host": util.SanitizeForLog(host), "error": util.SanitizeForLog(err.Error())}).Warn("docker unavailable")
c.JSON(http.StatusServiceUnavailable, gin.H{
"error": "Docker daemon unavailable",
- "details": "Cannot connect to Docker. Please ensure Docker is running and the socket is accessible (e.g., /var/run/docker.sock is mounted).",
+ "details": details,
})
return
}
diff --git a/backend/internal/api/handlers/docker_handler_test.go b/backend/internal/api/handlers/docker_handler_test.go
index fa4d1cca..99a297fd 100644
--- a/backend/internal/api/handlers/docker_handler_test.go
+++ b/backend/internal/api/handlers/docker_handler_test.go
@@ -63,7 +63,7 @@ func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T)
gin.SetMode(gin.TestMode)
router := gin.New()
- dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("no docker socket"))}
+ dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("no docker socket"), "Local Docker socket is mounted but not accessible by current process")}
remoteSvc := &fakeRemoteServerService{}
h := NewDockerHandler(dockerSvc, remoteSvc)
@@ -78,7 +78,7 @@ func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T)
assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
// Verify the new details field is included in the response
assert.Contains(t, w.Body.String(), "details")
- assert.Contains(t, w.Body.String(), "Docker is running")
+ assert.Contains(t, w.Body.String(), "not accessible by current process")
}
func TestDockerHandler_ListContainers_ServerIDResolvesToTCPHost(t *testing.T) {
@@ -360,3 +360,47 @@ func TestDockerHandler_ListContainers_GenericError(t *testing.T) {
})
}
}
+
+func TestDockerHandler_ListContainers_503FallbackDetailsWhenEmpty(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("socket error"))}
+ remoteSvc := &fakeRemoteServerService{}
+ h := NewDockerHandler(dockerSvc, remoteSvc)
+
+ api := router.Group("/api/v1")
+ h.RegisterRoutes(api)
+
+ req := httptest.NewRequest(http.MethodGet, "/api/v1/docker/containers", http.NoBody)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusServiceUnavailable, w.Code)
+ assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
+ assert.Contains(t, w.Body.String(), "docker.sock is mounted")
+}
+
+func TestDockerHandler_ListContainers_503DetailsWithGroupGuidance(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ groupDetails := `Local Docker socket is mounted but not accessible by current process (uid=1000 gid=1000). Process groups (1000) do not include socket gid 988; run container with matching supplemental group (e.g., --group-add 988 or compose group_add: ["988"]).`
+ dockerSvc := &fakeDockerService{
+ err: services.NewDockerUnavailableError(errors.New("EACCES"), groupDetails),
+ }
+ remoteSvc := &fakeRemoteServerService{}
+ h := NewDockerHandler(dockerSvc, remoteSvc)
+
+ api := router.Group("/api/v1")
+ h.RegisterRoutes(api)
+
+ req := httptest.NewRequest(http.MethodGet, "/api/v1/docker/containers?host=local", http.NoBody)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusServiceUnavailable, w.Code)
+ assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
+ assert.Contains(t, w.Body.String(), "--group-add 988")
+ assert.Contains(t, w.Body.String(), "group_add")
+}
diff --git a/backend/internal/api/handlers/feature_flags_handler.go b/backend/internal/api/handlers/feature_flags_handler.go
index eefd36b2..dd991326 100644
--- a/backend/internal/api/handlers/feature_flags_handler.go
+++ b/backend/internal/api/handlers/feature_flags_handler.go
@@ -31,6 +31,7 @@ var defaultFlags = []string{
"feature.notifications.engine.notify_v1.enabled",
"feature.notifications.service.discord.enabled",
"feature.notifications.service.gotify.enabled",
+ "feature.notifications.service.webhook.enabled",
"feature.notifications.legacy.fallback_enabled",
"feature.notifications.security_provider_events.enabled", // Blocker 3: Add security_provider_events gate
}
@@ -42,6 +43,7 @@ var defaultFlagValues = map[string]bool{
"feature.notifications.engine.notify_v1.enabled": false,
"feature.notifications.service.discord.enabled": false,
"feature.notifications.service.gotify.enabled": false,
+ "feature.notifications.service.webhook.enabled": false,
"feature.notifications.legacy.fallback_enabled": false,
"feature.notifications.security_provider_events.enabled": false, // Blocker 3: Default disabled for this stage
}
diff --git a/backend/internal/api/handlers/import_handler.go b/backend/internal/api/handlers/import_handler.go
index af233532..78d94aa7 100644
--- a/backend/internal/api/handlers/import_handler.go
+++ b/backend/internal/api/handlers/import_handler.go
@@ -93,6 +93,10 @@ func (h *ImportHandler) RegisterRoutes(router *gin.RouterGroup) {
// GetStatus returns current import session status.
func (h *ImportHandler) GetStatus(c *gin.Context) {
+ if !requireAuthenticatedAdmin(c) {
+ return
+ }
+
var session models.ImportSession
err := h.db.Where("status IN ?", []string{"pending", "reviewing"}).
Order("created_at DESC").
@@ -155,6 +159,10 @@ func (h *ImportHandler) GetStatus(c *gin.Context) {
// GetPreview returns parsed hosts and conflicts for review.
func (h *ImportHandler) GetPreview(c *gin.Context) {
+ if !requireAuthenticatedAdmin(c) {
+ return
+ }
+
var session models.ImportSession
err := h.db.Where("status IN ?", []string{"pending", "reviewing"}).
Order("created_at DESC").
diff --git a/backend/internal/api/handlers/notification_coverage_test.go b/backend/internal/api/handlers/notification_coverage_test.go
index 4b280275..162364dc 100644
--- a/backend/internal/api/handlers/notification_coverage_test.go
+++ b/backend/internal/api/handlers/notification_coverage_test.go
@@ -3,6 +3,7 @@ package handlers
import (
"bytes"
"encoding/json"
+ "errors"
"net/http"
"net/http/httptest"
"testing"
@@ -14,6 +15,7 @@ import (
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
+ "github.com/Wikid82/charon/backend/internal/trace"
)
func setupNotificationCoverageDB(t *testing.T) *gorm.DB {
@@ -319,6 +321,159 @@ func TestNotificationProviderHandler_Test_InvalidJSON(t *testing.T) {
assert.Equal(t, 400, w.Code)
}
+func TestNotificationProviderHandler_Test_RejectsClientSuppliedGotifyToken(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ payload := map[string]any{
+ "type": "gotify",
+ "url": "https://gotify.example/message",
+ "token": "super-secret-client-token",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Set(string(trace.RequestIDKey), "req-token-reject-1")
+ c.Request = httptest.NewRequest(http.MethodPost, "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, http.StatusBadRequest, w.Code)
+ var resp map[string]any
+ require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
+ assert.Equal(t, "TOKEN_WRITE_ONLY", resp["code"])
+ assert.Equal(t, "validation", resp["category"])
+ assert.Equal(t, "Gotify token is accepted only on provider create/update", resp["error"])
+ assert.Equal(t, "req-token-reject-1", resp["request_id"])
+ assert.NotContains(t, w.Body.String(), "super-secret-client-token")
+}
+
+func TestNotificationProviderHandler_Test_RejectsGotifyTokenWithWhitespace(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ payload := map[string]any{
+ "type": "gotify",
+ "token": " secret-with-space ",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest(http.MethodPost, "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, http.StatusBadRequest, w.Code)
+ assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY")
+ assert.NotContains(t, w.Body.String(), "secret-with-space")
+}
+
+func TestClassifyProviderTestFailure_NilError(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(nil)
+
+ assert.Equal(t, "PROVIDER_TEST_FAILED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Equal(t, "Provider test failed", message)
+}
+
+func TestClassifyProviderTestFailure_DefaultStatusCode(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("provider returned status 500"))
+
+ assert.Equal(t, "PROVIDER_TEST_REMOTE_REJECTED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "HTTP 500")
+}
+
+func TestClassifyProviderTestFailure_GenericError(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("something completely unexpected"))
+
+ assert.Equal(t, "PROVIDER_TEST_FAILED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Equal(t, "Provider test failed", message)
+}
+
+func TestClassifyProviderTestFailure_InvalidDiscordWebhookURL(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("invalid discord webhook url"))
+
+ assert.Equal(t, "PROVIDER_TEST_URL_INVALID", code)
+ assert.Equal(t, "validation", category)
+ assert.Contains(t, message, "Provider URL")
+}
+
+func TestClassifyProviderTestFailure_URLValidation(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("destination URL validation failed"))
+
+ assert.Equal(t, "PROVIDER_TEST_URL_INVALID", code)
+ assert.Equal(t, "validation", category)
+ assert.Contains(t, message, "Provider URL")
+}
+
+func TestClassifyProviderTestFailure_AuthRejected(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: provider returned status 401"))
+
+ assert.Equal(t, "PROVIDER_TEST_AUTH_REJECTED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "rejected authentication")
+}
+
+func TestClassifyProviderTestFailure_EndpointNotFound(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: provider returned status 404"))
+
+ assert.Equal(t, "PROVIDER_TEST_ENDPOINT_NOT_FOUND", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "endpoint was not found")
+}
+
+func TestClassifyProviderTestFailure_UnreachableEndpoint(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed"))
+
+ assert.Equal(t, "PROVIDER_TEST_UNREACHABLE", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "Could not reach provider endpoint")
+}
+
+func TestClassifyProviderTestFailure_DNSLookupFailed(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: dns lookup failed"))
+
+ assert.Equal(t, "PROVIDER_TEST_DNS_FAILED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "DNS lookup failed")
+}
+
+func TestClassifyProviderTestFailure_ConnectionRefused(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: connection refused"))
+
+ assert.Equal(t, "PROVIDER_TEST_CONNECTION_REFUSED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "refused the connection")
+}
+
+func TestClassifyProviderTestFailure_Timeout(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: request timed out"))
+
+ assert.Equal(t, "PROVIDER_TEST_TIMEOUT", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "timed out")
+}
+
+func TestClassifyProviderTestFailure_TLSHandshakeFailed(t *testing.T) {
+ code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: tls handshake failed"))
+
+ assert.Equal(t, "PROVIDER_TEST_TLS_FAILED", code)
+ assert.Equal(t, "dispatch", category)
+ assert.Contains(t, message, "TLS handshake failed")
+}
+
func TestNotificationProviderHandler_Templates(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
@@ -625,3 +780,258 @@ func TestNotificationTemplateHandler_Preview_InvalidTemplate(t *testing.T) {
assert.Equal(t, 400, w.Code)
}
+
+func TestNotificationProviderHandler_Preview_TokenWriteOnly(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ payload := map[string]any{
+ "template": "minimal",
+ "token": "secret-token-value",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest("POST", "/providers/preview", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Preview(c)
+
+ assert.Equal(t, 400, w.Code)
+ assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY")
+}
+
+func TestNotificationProviderHandler_Update_TypeChangeRejected(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ existing := models.NotificationProvider{
+ ID: "update-type-test",
+ Name: "Discord Provider",
+ Type: "discord",
+ URL: "https://discord.com/api/webhooks/123/abc",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ payload := map[string]any{
+ "name": "Changed Type Provider",
+ "type": "gotify",
+ "url": "https://gotify.example.com",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Params = gin.Params{{Key: "id", Value: "update-type-test"}}
+ c.Request = httptest.NewRequest("PUT", "/providers/update-type-test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Update(c)
+
+ assert.Equal(t, 400, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_TYPE_IMMUTABLE")
+}
+
+func TestNotificationProviderHandler_Test_MissingProviderID(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ payload := map[string]any{
+ "type": "discord",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, 400, w.Code)
+ assert.Contains(t, w.Body.String(), "MISSING_PROVIDER_ID")
+}
+
+func TestNotificationProviderHandler_Test_ProviderNotFound(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ payload := map[string]any{
+ "type": "discord",
+ "id": "nonexistent-provider",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, 404, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_NOT_FOUND")
+}
+
+func TestNotificationProviderHandler_Test_EmptyProviderURL(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ existing := models.NotificationProvider{
+ ID: "empty-url-test",
+ Name: "Empty URL Provider",
+ Type: "discord",
+ URL: "",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ payload := map[string]any{
+ "type": "discord",
+ "id": "empty-url-test",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, 400, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_CONFIG_MISSING")
+}
+
+func TestIsProviderValidationError_Comprehensive(t *testing.T) {
+ cases := []struct {
+ name string
+ err error
+ expect bool
+ }{
+ {"nil", nil, false},
+ {"invalid_custom_template", errors.New("invalid custom template: missing field"), true},
+ {"rendered_template", errors.New("rendered template exceeds maximum"), true},
+ {"failed_to_parse", errors.New("failed to parse template: unexpected end"), true},
+ {"failed_to_render", errors.New("failed to render template: missing key"), true},
+ {"invalid_discord_webhook", errors.New("invalid Discord webhook URL"), true},
+ {"unrelated_error", errors.New("database connection failed"), false},
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ assert.Equal(t, tc.expect, isProviderValidationError(tc.err))
+ })
+ }
+}
+
+func TestNotificationProviderHandler_Update_UnsupportedType(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ existing := models.NotificationProvider{
+ ID: "unsupported-type",
+ Name: "Custom Provider",
+ Type: "slack",
+ URL: "https://hooks.slack.com/test",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ payload := map[string]any{
+ "name": "Updated Slack Provider",
+ "url": "https://hooks.slack.com/updated",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Params = gin.Params{{Key: "id", Value: "unsupported-type"}}
+ c.Request = httptest.NewRequest("PUT", "/providers/unsupported-type", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Update(c)
+
+ assert.Equal(t, 400, w.Code)
+ assert.Contains(t, w.Body.String(), "UNSUPPORTED_PROVIDER_TYPE")
+}
+
+func TestNotificationProviderHandler_Update_GotifyKeepsExistingToken(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ existing := models.NotificationProvider{
+ ID: "gotify-keep-token",
+ Name: "Gotify Provider",
+ Type: "gotify",
+ URL: "https://gotify.example.com",
+ Token: "existing-secret-token",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ payload := map[string]any{
+ "name": "Updated Gotify",
+ "url": "https://gotify.example.com/new",
+ "template": "minimal",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Params = gin.Params{{Key: "id", Value: "gotify-keep-token"}}
+ c.Request = httptest.NewRequest("PUT", "/providers/gotify-keep-token", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Update(c)
+
+ assert.Equal(t, 200, w.Code)
+
+ var updated models.NotificationProvider
+ require.NoError(t, db.Where("id = ?", "gotify-keep-token").First(&updated).Error)
+ assert.Equal(t, "existing-secret-token", updated.Token)
+}
+
+func TestNotificationProviderHandler_Test_ReadDBError(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupNotificationCoverageDB(t)
+ svc := services.NewNotificationService(db)
+ h := NewNotificationProviderHandler(svc)
+
+ _ = db.Migrator().DropTable(&models.NotificationProvider{})
+
+ payload := map[string]any{
+ "type": "discord",
+ "id": "some-provider",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(w)
+ setAdminContext(c)
+ c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
+ c.Request.Header.Set("Content-Type", "application/json")
+
+ h.Test(c)
+
+ assert.Equal(t, 500, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_READ_FAILED")
+}
diff --git a/backend/internal/api/handlers/notification_provider_blocker3_test.go b/backend/internal/api/handlers/notification_provider_blocker3_test.go
index 9b5e8089..324cb5fc 100644
--- a/backend/internal/api/handlers/notification_provider_blocker3_test.go
+++ b/backend/internal/api/handlers/notification_provider_blocker3_test.go
@@ -15,7 +15,7 @@ import (
"gorm.io/gorm"
)
-// TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents tests that create rejects non-Discord providers with security events.
+// TestBlocker3_CreateProviderValidationWithSecurityEvents verifies supported/unsupported provider handling with security events enabled.
func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -31,15 +31,16 @@ func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T
service := services.NewNotificationService(db)
handler := NewNotificationProviderHandler(service)
- // Test cases: non-Discord provider types with security events enabled
+ // Test cases: provider types with security events enabled
testCases := []struct {
name string
providerType string
+ wantStatus int
}{
- {"webhook", "webhook"},
- {"slack", "slack"},
- {"gotify", "gotify"},
- {"email", "email"},
+ {"webhook", "webhook", http.StatusCreated},
+ {"gotify", "gotify", http.StatusCreated},
+ {"slack", "slack", http.StatusBadRequest},
+ {"email", "email", http.StatusBadRequest},
}
for _, tc := range testCases {
@@ -69,14 +70,15 @@ func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T
// Call Create
handler.Create(c)
- // Blocker 3: Should reject with 400
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject non-Discord provider with security events")
+ assert.Equal(t, tc.wantStatus, w.Code)
// Verify error message
var response map[string]interface{}
err = json.Unmarshal(w.Body.Bytes(), &response)
assert.NoError(t, err)
- assert.Contains(t, response["error"], "discord", "Error should mention Discord")
+ if tc.wantStatus == http.StatusBadRequest {
+ assert.Contains(t, response["code"], "UNSUPPORTED_PROVIDER_TYPE")
+ }
})
}
}
@@ -129,8 +131,7 @@ func TestBlocker3_CreateProviderAcceptsDiscordWithSecurityEvents(t *testing.T) {
assert.Equal(t, http.StatusCreated, w.Code, "Should accept Discord provider with security events")
}
-// TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents tests that create NOW REJECTS non-Discord providers even without security events.
-// NOTE: This test was updated for Discord-only rollout (current_spec.md) - now globally rejects all non-Discord.
+// TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents verifies webhook create without security events remains accepted.
func TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -172,17 +173,10 @@ func TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents(t *testin
// Call Create
handler.Create(c)
- // Discord-only rollout: Now REJECTS with 400
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject non-Discord provider (Discord-only rollout)")
-
- // Verify error message
- var response map[string]interface{}
- err = json.Unmarshal(w.Body.Bytes(), &response)
- assert.NoError(t, err)
- assert.Contains(t, response["error"], "discord", "Error should mention Discord")
+ assert.Equal(t, http.StatusCreated, w.Code)
}
-// TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents tests that update rejects non-Discord providers with security events.
+// TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents verifies webhook update with security events is allowed in PR-1 scope.
func TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -235,14 +229,7 @@ func TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T
// Call Update
handler.Update(c)
- // Blocker 3: Should reject with 400
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject non-Discord provider update with security events")
-
- // Verify error message
- var response map[string]interface{}
- err = json.Unmarshal(w.Body.Bytes(), &response)
- assert.NoError(t, err)
- assert.Contains(t, response["error"], "discord", "Error should mention Discord")
+ assert.Equal(t, http.StatusOK, w.Code)
}
// TestBlocker3_UpdateProviderAcceptsDiscordWithSecurityEvents tests that update accepts Discord providers with security events.
@@ -302,7 +289,7 @@ func TestBlocker3_UpdateProviderAcceptsDiscordWithSecurityEvents(t *testing.T) {
assert.Equal(t, http.StatusOK, w.Code, "Should accept Discord provider update with security events")
}
-// TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly tests that having any security event enabled enforces Discord-only.
+// TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly tests webhook remains accepted with security flags in PR-1 scope.
func TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -353,9 +340,8 @@ func TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly(t *testing.T) {
// Call Create
handler.Create(c)
- // Blocker 3: Should reject with 400
- assert.Equal(t, http.StatusBadRequest, w.Code,
- "Should reject webhook provider with %s enabled", field)
+ assert.Equal(t, http.StatusCreated, w.Code,
+ "Should accept webhook provider with %s enabled", field)
})
}
}
@@ -407,5 +393,5 @@ func TestBlocker3_UpdateProvider_DatabaseError(t *testing.T) {
var response map[string]interface{}
err = json.Unmarshal(w.Body.Bytes(), &response)
assert.NoError(t, err)
- assert.Equal(t, "provider not found", response["error"])
+ assert.Equal(t, "Provider not found", response["error"])
}
diff --git a/backend/internal/api/handlers/notification_provider_discord_only_test.go b/backend/internal/api/handlers/notification_provider_discord_only_test.go
index e4f86e26..5b911ae8 100644
--- a/backend/internal/api/handlers/notification_provider_discord_only_test.go
+++ b/backend/internal/api/handlers/notification_provider_discord_only_test.go
@@ -16,7 +16,7 @@ import (
"gorm.io/gorm"
)
-// TestDiscordOnly_CreateRejectsNonDiscord tests that create globally rejects non-Discord providers.
+// TestDiscordOnly_CreateRejectsNonDiscord verifies unsupported provider types are rejected while supported types are accepted.
func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -30,13 +30,15 @@ func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) {
testCases := []struct {
name string
providerType string
+ wantStatus int
+ wantCode string
}{
- {"webhook", "webhook"},
- {"slack", "slack"},
- {"gotify", "gotify"},
- {"telegram", "telegram"},
- {"generic", "generic"},
- {"email", "email"},
+ {"webhook", "webhook", http.StatusCreated, ""},
+ {"gotify", "gotify", http.StatusCreated, ""},
+ {"slack", "slack", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
+ {"telegram", "telegram", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
+ {"generic", "generic", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
+ {"email", "email", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
}
for _, tc := range testCases {
@@ -61,13 +63,14 @@ func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) {
handler.Create(c)
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject non-Discord provider")
+ assert.Equal(t, tc.wantStatus, w.Code)
var response map[string]interface{}
err = json.Unmarshal(w.Body.Bytes(), &response)
require.NoError(t, err)
- assert.Equal(t, "PROVIDER_TYPE_DISCORD_ONLY", response["code"])
- assert.Contains(t, response["error"], "discord")
+ if tc.wantCode != "" {
+ assert.Equal(t, tc.wantCode, response["code"])
+ }
})
}
}
@@ -156,8 +159,8 @@ func TestDiscordOnly_UpdateRejectsTypeMutation(t *testing.T) {
var response map[string]interface{}
err = json.Unmarshal(w.Body.Bytes(), &response)
require.NoError(t, err)
- assert.Equal(t, "DEPRECATED_PROVIDER_TYPE_IMMUTABLE", response["code"])
- assert.Contains(t, response["error"], "cannot change provider type")
+ assert.Equal(t, "PROVIDER_TYPE_IMMUTABLE", response["code"])
+ assert.Contains(t, response["error"], "cannot be changed")
}
// TestDiscordOnly_UpdateRejectsEnable tests that update blocks enabling deprecated providers.
@@ -205,13 +208,7 @@ func TestDiscordOnly_UpdateRejectsEnable(t *testing.T) {
handler.Update(c)
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject enabling deprecated provider")
-
- var response map[string]interface{}
- err = json.Unmarshal(w.Body.Bytes(), &response)
- require.NoError(t, err)
- assert.Equal(t, "DEPRECATED_PROVIDER_CANNOT_ENABLE", response["code"])
- assert.Contains(t, response["error"], "cannot enable deprecated")
+ assert.Equal(t, http.StatusOK, w.Code)
}
// TestDiscordOnly_UpdateAllowsDisabledDeprecated tests that update allows updating disabled deprecated providers (except type/enable).
@@ -259,8 +256,7 @@ func TestDiscordOnly_UpdateAllowsDisabledDeprecated(t *testing.T) {
handler.Update(c)
- // Should still reject because type must be discord
- assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject non-Discord type even for read-only fields")
+ assert.Equal(t, http.StatusOK, w.Code)
}
// TestDiscordOnly_UpdateAcceptsDiscord tests that update accepts Discord provider updates.
@@ -360,21 +356,21 @@ func TestDiscordOnly_ErrorCodes(t *testing.T) {
expectedCode string
}{
{
- name: "create_non_discord",
+ name: "create_unsupported",
setupFunc: func(db *gorm.DB) string {
return ""
},
requestFunc: func(id string) (*http.Request, gin.Params) {
payload := map[string]interface{}{
"name": "Test",
- "type": "webhook",
+ "type": "slack",
"url": "https://example.com",
}
body, _ := json.Marshal(payload)
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(body))
return req, nil
},
- expectedCode: "PROVIDER_TYPE_DISCORD_ONLY",
+ expectedCode: "UNSUPPORTED_PROVIDER_TYPE",
},
{
name: "update_type_mutation",
@@ -399,34 +395,7 @@ func TestDiscordOnly_ErrorCodes(t *testing.T) {
req, _ := http.NewRequest("PUT", "/api/v1/notifications/providers/"+id, bytes.NewBuffer(body))
return req, []gin.Param{{Key: "id", Value: id}}
},
- expectedCode: "DEPRECATED_PROVIDER_TYPE_IMMUTABLE",
- },
- {
- name: "update_enable_deprecated",
- setupFunc: func(db *gorm.DB) string {
- provider := models.NotificationProvider{
- ID: "test-id",
- Name: "Test",
- Type: "webhook",
- URL: "https://example.com",
- Enabled: false,
- MigrationState: "deprecated",
- }
- db.Create(&provider)
- return "test-id"
- },
- requestFunc: func(id string) (*http.Request, gin.Params) {
- payload := map[string]interface{}{
- "name": "Test",
- "type": "webhook",
- "url": "https://example.com",
- "enabled": true,
- }
- body, _ := json.Marshal(payload)
- req, _ := http.NewRequest("PUT", "/api/v1/notifications/providers/"+id, bytes.NewBuffer(body))
- return req, []gin.Param{{Key: "id", Value: id}}
- },
- expectedCode: "DEPRECATED_PROVIDER_CANNOT_ENABLE",
+ expectedCode: "PROVIDER_TYPE_IMMUTABLE",
},
}
diff --git a/backend/internal/api/handlers/notification_provider_handler.go b/backend/internal/api/handlers/notification_provider_handler.go
index 8944ee77..9b2649aa 100644
--- a/backend/internal/api/handlers/notification_provider_handler.go
+++ b/backend/internal/api/handlers/notification_provider_handler.go
@@ -4,11 +4,13 @@ import (
"encoding/json"
"fmt"
"net/http"
+ "regexp"
"strings"
"time"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
+ "github.com/Wikid82/charon/backend/internal/trace"
"github.com/gin-gonic/gin"
"gorm.io/gorm"
)
@@ -25,6 +27,7 @@ type notificationProviderUpsertRequest struct {
URL string `json:"url"`
Config string `json:"config"`
Template string `json:"template"`
+ Token string `json:"token,omitempty"`
Enabled bool `json:"enabled"`
NotifyProxyHosts bool `json:"notify_proxy_hosts"`
NotifyRemoteServers bool `json:"notify_remote_servers"`
@@ -37,6 +40,16 @@ type notificationProviderUpsertRequest struct {
NotifySecurityCrowdSecDecisions bool `json:"notify_security_crowdsec_decisions"`
}
+type notificationProviderTestRequest struct {
+ ID string `json:"id"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ URL string `json:"url"`
+ Config string `json:"config"`
+ Template string `json:"template"`
+ Token string `json:"token,omitempty"`
+}
+
func (r notificationProviderUpsertRequest) toModel() models.NotificationProvider {
return models.NotificationProvider{
Name: r.Name,
@@ -44,6 +57,7 @@ func (r notificationProviderUpsertRequest) toModel() models.NotificationProvider
URL: r.URL,
Config: r.Config,
Template: r.Template,
+ Token: strings.TrimSpace(r.Token),
Enabled: r.Enabled,
NotifyProxyHosts: r.NotifyProxyHosts,
NotifyRemoteServers: r.NotifyRemoteServers,
@@ -57,6 +71,70 @@ func (r notificationProviderUpsertRequest) toModel() models.NotificationProvider
}
}
+func providerRequestID(c *gin.Context) string {
+ if value, ok := c.Get(string(trace.RequestIDKey)); ok {
+ if requestID, ok := value.(string); ok {
+ return requestID
+ }
+ }
+ return ""
+}
+
+func respondSanitizedProviderError(c *gin.Context, status int, code, category, message string) {
+ response := gin.H{
+ "error": message,
+ "code": code,
+ "category": category,
+ }
+ if requestID := providerRequestID(c); requestID != "" {
+ response["request_id"] = requestID
+ }
+ c.JSON(status, response)
+}
+
+var providerStatusCodePattern = regexp.MustCompile(`provider returned status\s+(\d{3})`)
+
+func classifyProviderTestFailure(err error) (code string, category string, message string) {
+ if err == nil {
+ return "PROVIDER_TEST_FAILED", "dispatch", "Provider test failed"
+ }
+
+ errText := strings.ToLower(strings.TrimSpace(err.Error()))
+
+ if strings.Contains(errText, "destination url validation failed") ||
+ strings.Contains(errText, "invalid webhook url") ||
+ strings.Contains(errText, "invalid discord webhook url") {
+ return "PROVIDER_TEST_URL_INVALID", "validation", "Provider URL is invalid or blocked. Verify the URL and try again"
+ }
+
+ if statusMatch := providerStatusCodePattern.FindStringSubmatch(errText); len(statusMatch) == 2 {
+ switch statusMatch[1] {
+ case "401", "403":
+ return "PROVIDER_TEST_AUTH_REJECTED", "dispatch", "Provider rejected authentication. Verify your Gotify token"
+ case "404":
+ return "PROVIDER_TEST_ENDPOINT_NOT_FOUND", "dispatch", "Provider endpoint was not found. Verify the provider URL path"
+ default:
+ return "PROVIDER_TEST_REMOTE_REJECTED", "dispatch", fmt.Sprintf("Provider rejected the test request (HTTP %s)", statusMatch[1])
+ }
+ }
+
+ if strings.Contains(errText, "outbound request failed") || strings.Contains(errText, "failed to send webhook") {
+ switch {
+ case strings.Contains(errText, "dns lookup failed"):
+ return "PROVIDER_TEST_DNS_FAILED", "dispatch", "DNS lookup failed for provider host. Verify the hostname in the provider URL"
+ case strings.Contains(errText, "connection refused"):
+ return "PROVIDER_TEST_CONNECTION_REFUSED", "dispatch", "Provider host refused the connection. Verify port and service availability"
+ case strings.Contains(errText, "request timed out"):
+ return "PROVIDER_TEST_TIMEOUT", "dispatch", "Provider request timed out. Verify network route and provider responsiveness"
+ case strings.Contains(errText, "tls handshake failed"):
+ return "PROVIDER_TEST_TLS_FAILED", "dispatch", "TLS handshake failed. Verify HTTPS certificate and URL scheme"
+ }
+ return "PROVIDER_TEST_UNREACHABLE", "dispatch", "Could not reach provider endpoint. Verify URL, DNS, and network connectivity"
+ }
+
+ return "PROVIDER_TEST_FAILED", "dispatch", "Provider test failed"
+}
+
func NewNotificationProviderHandler(service *services.NotificationService) *NotificationProviderHandler {
return NewNotificationProviderHandlerWithDeps(service, nil, "")
}
@@ -71,6 +149,10 @@ func (h *NotificationProviderHandler) List(c *gin.Context) {
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to list providers"})
return
}
+ for i := range providers {
+ providers[i].HasToken = providers[i].Token != ""
+ providers[i].Token = ""
+ }
c.JSON(http.StatusOK, providers)
}
@@ -81,16 +163,13 @@ func (h *NotificationProviderHandler) Create(c *gin.Context) {
var req notificationProviderUpsertRequest
if err := c.ShouldBindJSON(&req); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid notification provider payload")
return
}
- // Discord-only enforcement for this rollout
- if req.Type != "discord" {
- c.JSON(http.StatusBadRequest, gin.H{
- "error": "only discord provider type is supported in this release; additional providers will be enabled in future releases after validation",
- "code": "PROVIDER_TYPE_DISCORD_ONLY",
- })
+ providerType := strings.ToLower(strings.TrimSpace(req.Type))
+ if providerType != "discord" && providerType != "gotify" && providerType != "webhook" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type")
return
}
@@ -106,15 +185,17 @@ func (h *NotificationProviderHandler) Create(c *gin.Context) {
if err := h.service.CreateProvider(&provider); err != nil {
// If it's a validation error from template parsing, return 400
if isProviderValidationError(err) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_VALIDATION_FAILED", "validation", "Notification provider validation failed")
return
}
if respondPermissionError(c, h.securityService, "notification_provider_save_failed", err, h.dataRoot) {
return
}
- c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create provider"})
+ respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_CREATE_FAILED", "internal", "Failed to create provider")
return
}
+ provider.HasToken = provider.Token != ""
+ provider.Token = ""
c.JSON(http.StatusCreated, provider)
}
@@ -126,7 +207,7 @@ func (h *NotificationProviderHandler) Update(c *gin.Context) {
id := c.Param("id")
var req notificationProviderUpsertRequest
if err := c.ShouldBindJSON(&req); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid notification provider payload")
return
}
@@ -134,39 +215,29 @@ func (h *NotificationProviderHandler) Update(c *gin.Context) {
var existing models.NotificationProvider
if err := h.service.DB.Where("id = ?", id).First(&existing).Error; err != nil {
if err == gorm.ErrRecordNotFound {
- c.JSON(http.StatusNotFound, gin.H{"error": "provider not found"})
+ respondSanitizedProviderError(c, http.StatusNotFound, "PROVIDER_NOT_FOUND", "validation", "Provider not found")
return
}
- c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to fetch provider"})
+ respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_READ_FAILED", "internal", "Failed to read provider")
return
}
- // Block type mutation for existing non-Discord providers
- if existing.Type != "discord" && req.Type != existing.Type {
- c.JSON(http.StatusBadRequest, gin.H{
- "error": "cannot change provider type for deprecated non-discord providers; delete and recreate as discord provider instead",
- "code": "DEPRECATED_PROVIDER_TYPE_IMMUTABLE",
- })
+ if strings.TrimSpace(req.Type) != "" && strings.TrimSpace(req.Type) != existing.Type {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_TYPE_IMMUTABLE", "validation", "Provider type cannot be changed")
return
}
- // Block enable mutation for existing non-Discord providers
- if existing.Type != "discord" && req.Enabled && !existing.Enabled {
- c.JSON(http.StatusBadRequest, gin.H{
- "error": "cannot enable deprecated non-discord providers; only discord providers can be enabled",
- "code": "DEPRECATED_PROVIDER_CANNOT_ENABLE",
- })
+ providerType := strings.ToLower(strings.TrimSpace(existing.Type))
+ if providerType != "discord" && providerType != "gotify" && providerType != "webhook" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type")
return
}
- // Discord-only enforcement for this rollout (new providers or type changes)
- if req.Type != "discord" {
- c.JSON(http.StatusBadRequest, gin.H{
- "error": "only discord provider type is supported in this release; additional providers will be enabled in future releases after validation",
- "code": "PROVIDER_TYPE_DISCORD_ONLY",
- })
- return
+ if providerType == "gotify" && strings.TrimSpace(req.Token) == "" {
+ // Keep existing token if update payload omits token
+ req.Token = existing.Token
}
+ req.Type = existing.Type
provider := req.toModel()
provider.ID = id
@@ -179,15 +250,17 @@ func (h *NotificationProviderHandler) Update(c *gin.Context) {
if err := h.service.UpdateProvider(&provider); err != nil {
if isProviderValidationError(err) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_VALIDATION_FAILED", "validation", "Notification provider validation failed")
return
}
if respondPermissionError(c, h.securityService, "notification_provider_save_failed", err, h.dataRoot) {
return
}
- c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to update provider"})
+ respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_UPDATE_FAILED", "internal", "Failed to update provider")
return
}
+ provider.HasToken = provider.Token != ""
+ provider.Token = ""
c.JSON(http.StatusOK, provider)
}
@@ -221,16 +294,44 @@ func (h *NotificationProviderHandler) Delete(c *gin.Context) {
}
func (h *NotificationProviderHandler) Test(c *gin.Context) {
+ var req notificationProviderTestRequest
+ if err := c.ShouldBindJSON(&req); err != nil {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid test payload")
+ return
+ }
+
+ providerType := strings.ToLower(strings.TrimSpace(req.Type))
+ if providerType == "gotify" && strings.TrimSpace(req.Token) != "" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Gotify token is accepted only on provider create/update")
+ return
+ }
+
+ providerID := strings.TrimSpace(req.ID)
+ if providerID == "" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "MISSING_PROVIDER_ID", "validation", "Trusted provider ID is required for test dispatch")
+ return
+ }
+
var provider models.NotificationProvider
- if err := c.ShouldBindJSON(&provider); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ if err := h.service.DB.Where("id = ?", providerID).First(&provider).Error; err != nil {
+ if err == gorm.ErrRecordNotFound {
+ respondSanitizedProviderError(c, http.StatusNotFound, "PROVIDER_NOT_FOUND", "validation", "Provider not found")
+ return
+ }
+ respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_READ_FAILED", "internal", "Failed to read provider")
+ return
+ }
+
+ if strings.TrimSpace(provider.URL) == "" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_CONFIG_MISSING", "validation", "Trusted provider configuration is incomplete")
return
}
if err := h.service.TestProvider(provider); err != nil {
// Create internal notification for the failure
- _, _ = h.service.Create(models.NotificationTypeError, "Test Failed", fmt.Sprintf("Provider %s test failed: %v", provider.Name, err))
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ _, _ = h.service.Create(models.NotificationTypeError, "Test Failed", fmt.Sprintf("Provider %s test failed", provider.Name))
+ code, category, message := classifyProviderTestFailure(err)
+ respondSanitizedProviderError(c, http.StatusBadRequest, code, category, message)
return
}
c.JSON(http.StatusOK, gin.H{"message": "Test notification sent"})
@@ -249,9 +350,15 @@ func (h *NotificationProviderHandler) Templates(c *gin.Context) {
func (h *NotificationProviderHandler) Preview(c *gin.Context) {
var raw map[string]any
if err := c.ShouldBindJSON(&raw); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid preview payload")
return
}
+ if tokenValue, ok := raw["token"]; ok {
+ if tokenText, isString := tokenValue.(string); isString && strings.TrimSpace(tokenText) != "" {
+ respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Gotify token is accepted only on provider create/update")
+ return
+ }
+ }
var provider models.NotificationProvider
// Marshal raw into provider to get proper types
@@ -279,7 +386,8 @@ func (h *NotificationProviderHandler) Preview(c *gin.Context) {
rendered, parsed, err := h.service.RenderTemplate(provider, payload)
if err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error(), "rendered": rendered})
+ _ = rendered
+ respondSanitizedProviderError(c, http.StatusBadRequest, "TEMPLATE_PREVIEW_FAILED", "validation", "Template preview failed")
return
}
c.JSON(http.StatusOK, gin.H{"rendered": rendered, "parsed": parsed})
diff --git a/backend/internal/api/handlers/notification_provider_handler_test.go b/backend/internal/api/handlers/notification_provider_handler_test.go
index 4ba094be..2c0cd86e 100644
--- a/backend/internal/api/handlers/notification_provider_handler_test.go
+++ b/backend/internal/api/handlers/notification_provider_handler_test.go
@@ -120,25 +120,60 @@ func TestNotificationProviderHandler_Templates(t *testing.T) {
}
func TestNotificationProviderHandler_Test(t *testing.T) {
- r, _ := setupNotificationProviderTest(t)
+ r, db := setupNotificationProviderTest(t)
- // Test with invalid provider (should fail validation or service check)
- // Since we don't have notification dispatch mocked easily here,
- // we expect it might fail or pass depending on service implementation.
- // Looking at service code, TestProvider should validate and dispatch.
- // If URL is invalid, it should error.
-
- provider := models.NotificationProvider{
- Type: "discord",
- URL: "invalid-url",
+ stored := models.NotificationProvider{
+ ID: "trusted-provider-id",
+ Name: "Stored Provider",
+ Type: "discord",
+ URL: "invalid-url",
+ Enabled: true,
}
- body, _ := json.Marshal(provider)
+ require.NoError(t, db.Create(&stored).Error)
+
+ payload := map[string]any{
+ "id": stored.ID,
+ "type": "discord",
+ "url": "https://discord.com/api/webhooks/123/override",
+ }
+ body, _ := json.Marshal(payload)
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
- // It should probably fail with 400
assert.Equal(t, http.StatusBadRequest, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_TEST_URL_INVALID")
+}
+
+func TestNotificationProviderHandler_Test_RequiresTrustedProviderID(t *testing.T) {
+ r, _ := setupNotificationProviderTest(t)
+
+ payload := map[string]any{
+ "type": "discord",
+ "url": "https://discord.com/api/webhooks/123/abc",
+ }
+ body, _ := json.Marshal(payload)
+ req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusBadRequest, w.Code)
+ assert.Contains(t, w.Body.String(), "MISSING_PROVIDER_ID")
+}
+
+func TestNotificationProviderHandler_Test_ReturnsNotFoundForUnknownProvider(t *testing.T) {
+ r, _ := setupNotificationProviderTest(t)
+
+ payload := map[string]any{
+ "id": "missing-provider-id",
+ }
+ body, _ := json.Marshal(payload)
+ req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusNotFound, w.Code)
+ assert.Contains(t, w.Body.String(), "PROVIDER_NOT_FOUND")
}
func TestNotificationProviderHandler_Errors(t *testing.T) {
@@ -248,8 +283,8 @@ func TestNotificationProviderHandler_CreateRejectsDiscordIPHost(t *testing.T) {
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
- assert.Contains(t, w.Body.String(), "invalid Discord webhook URL")
- assert.Contains(t, w.Body.String(), "IP address hosts are not allowed")
+ assert.Contains(t, w.Body.String(), "PROVIDER_VALIDATION_FAILED")
+ assert.Contains(t, w.Body.String(), "validation")
}
func TestNotificationProviderHandler_CreateAcceptsDiscordHostname(t *testing.T) {
@@ -378,3 +413,100 @@ func TestNotificationProviderHandler_UpdatePreservesServerManagedMigrationFields
require.NotNil(t, dbProvider.LastMigratedAt)
assert.Equal(t, now, dbProvider.LastMigratedAt.UTC().Round(time.Second))
}
+
+func TestNotificationProviderHandler_List_ReturnsHasTokenTrue(t *testing.T) {
+ r, db := setupNotificationProviderTest(t)
+
+ p := models.NotificationProvider{
+ ID: "tok-true",
+ Name: "Gotify With Token",
+ Type: "gotify",
+ URL: "https://gotify.example.com",
+ Token: "secret-app-token",
+ }
+ require.NoError(t, db.Create(&p).Error)
+
+ req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+
+ var raw []map[string]interface{}
+ require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
+ require.Len(t, raw, 1)
+ assert.Equal(t, true, raw[0]["has_token"])
+}
+
+func TestNotificationProviderHandler_List_ReturnsHasTokenFalse(t *testing.T) {
+ r, db := setupNotificationProviderTest(t)
+
+ p := models.NotificationProvider{
+ ID: "tok-false",
+ Name: "Discord No Token",
+ Type: "discord",
+ URL: "https://discord.com/api/webhooks/123/abc",
+ }
+ require.NoError(t, db.Create(&p).Error)
+
+ req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+
+ var raw []map[string]interface{}
+ require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
+ require.Len(t, raw, 1)
+ assert.Equal(t, false, raw[0]["has_token"])
+}
+
+func TestNotificationProviderHandler_List_NeverExposesRawToken(t *testing.T) {
+ r, db := setupNotificationProviderTest(t)
+
+ p := models.NotificationProvider{
+ ID: "tok-hidden",
+ Name: "Secret Gotify",
+ Type: "gotify",
+ URL: "https://gotify.example.com",
+ Token: "super-secret-value",
+ }
+ require.NoError(t, db.Create(&p).Error)
+
+ req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+ assert.NotContains(t, w.Body.String(), "super-secret-value")
+
+ var raw []map[string]interface{}
+ require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
+ require.Len(t, raw, 1)
+ _, hasTokenField := raw[0]["token"]
+ assert.False(t, hasTokenField, "raw token field must not appear in JSON response")
+}
+
+func TestNotificationProviderHandler_Create_ResponseHasHasToken(t *testing.T) {
+ r, _ := setupNotificationProviderTest(t)
+
+ payload := map[string]interface{}{
+ "name": "New Gotify",
+ "type": "gotify",
+ "url": "https://gotify.example.com",
+ "token": "app-token-123",
+ "template": "minimal",
+ }
+ body, _ := json.Marshal(payload)
+ req, _ := http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(body))
+ req.Header.Set("Content-Type", "application/json")
+ w := httptest.NewRecorder()
+ r.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusCreated, w.Code)
+
+ var raw map[string]interface{}
+ require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
+ assert.Equal(t, true, raw["has_token"])
+ assert.NotContains(t, w.Body.String(), "app-token-123")
+}
diff --git a/backend/internal/api/handlers/notification_provider_patch_coverage_test.go b/backend/internal/api/handlers/notification_provider_patch_coverage_test.go
index 0233d1fd..cfac52dc 100644
--- a/backend/internal/api/handlers/notification_provider_patch_coverage_test.go
+++ b/backend/internal/api/handlers/notification_provider_patch_coverage_test.go
@@ -65,7 +65,7 @@ func TestUpdate_BlockTypeMutationForNonDiscord(t *testing.T) {
err = json.Unmarshal(w.Body.Bytes(), &response)
require.NoError(t, err)
- assert.Equal(t, "DEPRECATED_PROVIDER_TYPE_IMMUTABLE", response["code"])
+ assert.Equal(t, "PROVIDER_TYPE_IMMUTABLE", response["code"])
}
// TestUpdate_AllowTypeMutationForDiscord verifies Discord can be updated
diff --git a/backend/internal/api/handlers/permission_helpers.go b/backend/internal/api/handlers/permission_helpers.go
index 6a10a353..e2a06716 100644
--- a/backend/internal/api/handlers/permission_helpers.go
+++ b/backend/internal/api/handlers/permission_helpers.go
@@ -24,6 +24,17 @@ func requireAdmin(c *gin.Context) bool {
return false
}
+func requireAuthenticatedAdmin(c *gin.Context) bool {
+ if _, exists := c.Get("userID"); !exists {
+ c.JSON(http.StatusUnauthorized, gin.H{
+ "error": "Authorization header required",
+ })
+ return false
+ }
+
+ return requireAdmin(c)
+}
+
func isAdmin(c *gin.Context) bool {
role, _ := c.Get("role")
roleStr, _ := role.(string)
diff --git a/backend/internal/api/handlers/permission_helpers_test.go b/backend/internal/api/handlers/permission_helpers_test.go
index 3113d57a..f9d4fd77 100644
--- a/backend/internal/api/handlers/permission_helpers_test.go
+++ b/backend/internal/api/handlers/permission_helpers_test.go
@@ -168,3 +168,34 @@ func TestLogPermissionAudit_ActorFallback(t *testing.T) {
assert.Equal(t, "permissions", audit.EventCategory)
assert.Contains(t, audit.Details, fmt.Sprintf("\"admin\":%v", false))
}
+
+func TestRequireAuthenticatedAdmin_NoUserID(t *testing.T) {
+ t.Parallel()
+
+ ctx, rec := newTestContextWithRequest()
+ result := requireAuthenticatedAdmin(ctx)
+ assert.False(t, result)
+ assert.Equal(t, http.StatusUnauthorized, rec.Code)
+ assert.Contains(t, rec.Body.String(), "Authorization header required")
+}
+
+func TestRequireAuthenticatedAdmin_UserIDPresentAndAdmin(t *testing.T) {
+ t.Parallel()
+
+ ctx, _ := newTestContextWithRequest()
+ ctx.Set("userID", uint(1))
+ ctx.Set("role", "admin")
+ result := requireAuthenticatedAdmin(ctx)
+ assert.True(t, result)
+}
+
+func TestRequireAuthenticatedAdmin_UserIDPresentButNotAdmin(t *testing.T) {
+ t.Parallel()
+
+ ctx, rec := newTestContextWithRequest()
+ ctx.Set("userID", uint(1))
+ ctx.Set("role", "user")
+ result := requireAuthenticatedAdmin(ctx)
+ assert.False(t, result)
+ assert.Equal(t, http.StatusForbidden, rec.Code)
+}
diff --git a/backend/internal/api/handlers/proxy_host_handler.go b/backend/internal/api/handlers/proxy_host_handler.go
index 2433b74a..5ab90db2 100644
--- a/backend/internal/api/handlers/proxy_host_handler.go
+++ b/backend/internal/api/handlers/proxy_host_handler.go
@@ -130,6 +130,7 @@ func generateForwardHostWarnings(forwardHost string) []ProxyHostWarning {
// ProxyHostHandler handles CRUD operations for proxy hosts.
type ProxyHostHandler struct {
service *services.ProxyHostService
+ db *gorm.DB
caddyManager *caddy.Manager
notificationService *services.NotificationService
uptimeService *services.UptimeService
@@ -183,6 +184,74 @@ func parseNullableUintField(value any, fieldName string) (*uint, bool, error) {
}
}
+func (h *ProxyHostHandler) resolveAccessListReference(value any) (*uint, error) {
+ if value == nil {
+ return nil, nil
+ }
+
+ parsedID, _, parseErr := parseNullableUintField(value, "access_list_id")
+ if parseErr == nil {
+ return parsedID, nil
+ }
+
+ uuidValue, isString := value.(string)
+ if !isString {
+ return nil, parseErr
+ }
+
+ trimmed := strings.TrimSpace(uuidValue)
+ if trimmed == "" {
+ return nil, nil
+ }
+
+ var acl models.AccessList
+ if err := h.db.Select("id").Where("uuid = ?", trimmed).First(&acl).Error; err != nil {
+ if err == gorm.ErrRecordNotFound {
+ return nil, fmt.Errorf("access list not found")
+ }
+ return nil, fmt.Errorf("failed to resolve access list")
+ }
+
+ id := acl.ID
+ return &id, nil
+}
+
+func (h *ProxyHostHandler) resolveSecurityHeaderProfileReference(value any) (*uint, error) {
+ if value == nil {
+ return nil, nil
+ }
+
+ parsedID, _, parseErr := parseNullableUintField(value, "security_header_profile_id")
+ if parseErr == nil {
+ return parsedID, nil
+ }
+
+ uuidValue, isString := value.(string)
+ if !isString {
+ return nil, parseErr
+ }
+
+ trimmed := strings.TrimSpace(uuidValue)
+ if trimmed == "" {
+ return nil, nil
+ }
+
+ if _, err := uuid.Parse(trimmed); err != nil {
+ return nil, parseErr
+ }
+
+ var profile models.SecurityHeaderProfile
+ if err := h.db.Select("id").Where("uuid = ?", trimmed).First(&profile).Error; err != nil {
+ if err == gorm.ErrRecordNotFound {
+ return nil, fmt.Errorf("security header profile not found")
+ }
+ return nil, fmt.Errorf("failed to resolve security header profile")
+ }
+
+ id := profile.ID
+ return &id, nil
+}
+
func parseForwardPortField(value any) (int, error) {
switch v := value.(type) {
case float64:
@@ -221,6 +290,7 @@ func parseForwardPortField(value any) (int, error) {
func NewProxyHostHandler(db *gorm.DB, caddyManager *caddy.Manager, ns *services.NotificationService, uptimeService *services.UptimeService) *ProxyHostHandler {
return &ProxyHostHandler{
service: services.NewProxyHostService(db),
+ db: db,
caddyManager: caddyManager,
notificationService: ns,
uptimeService: uptimeService,
@@ -252,8 +322,38 @@ func (h *ProxyHostHandler) List(c *gin.Context) {
// Create creates a new proxy host.
func (h *ProxyHostHandler) Create(c *gin.Context) {
+ var payload map[string]any
+ if err := c.ShouldBindJSON(&payload); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ return
+ }
+
+ if rawAccessListRef, ok := payload["access_list_id"]; ok {
+ resolvedAccessListID, resolveErr := h.resolveAccessListReference(rawAccessListRef)
+ if resolveErr != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
+ return
+ }
+ payload["access_list_id"] = resolvedAccessListID
+ }
+
+ if rawSecurityHeaderRef, ok := payload["security_header_profile_id"]; ok {
+ resolvedSecurityHeaderID, resolveErr := h.resolveSecurityHeaderProfileReference(rawSecurityHeaderRef)
+ if resolveErr != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
+ return
+ }
+ payload["security_header_profile_id"] = resolvedSecurityHeaderID
+ }
+
+ payloadBytes, marshalErr := json.Marshal(payload)
+ if marshalErr != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request payload"})
+ return
+ }
+
var host models.ProxyHost
- if err := c.ShouldBindJSON(&host); err != nil {
+ if err := json.Unmarshal(payloadBytes, &host); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
@@ -313,6 +413,11 @@ func (h *ProxyHostHandler) Create(c *gin.Context) {
)
}
+ // Trigger immediate uptime monitor creation + health check (non-blocking)
+ if h.uptimeService != nil {
+ go h.uptimeService.SyncAndCheckForHost(host.ID)
+ }
+
// Generate advisory warnings for private/Docker IPs
warnings := generateForwardHostWarnings(host.ForwardHost)
@@ -430,12 +535,12 @@ func (h *ProxyHostHandler) Update(c *gin.Context) {
host.CertificateID = parsedID
}
if v, ok := payload["access_list_id"]; ok {
- parsedID, _, parseErr := parseNullableUintField(v, "access_list_id")
- if parseErr != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": parseErr.Error()})
+ resolvedAccessListID, resolveErr := h.resolveAccessListReference(v)
+ if resolveErr != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
return
}
- host.AccessListID = parsedID
+ host.AccessListID = resolvedAccessListID
}
if v, ok := payload["dns_provider_id"]; ok {
@@ -453,54 +558,12 @@ func (h *ProxyHostHandler) Update(c *gin.Context) {
// Security Header Profile: update only if provided
if v, ok := payload["security_header_profile_id"]; ok {
- logger := middleware.GetRequestLogger(c)
- // Sanitize user-provided values for log injection protection (CWE-117)
- safeUUID := sanitizeForLog(uuidStr)
- logger.WithField("host_uuid", safeUUID).WithField("raw_value", sanitizeForLog(fmt.Sprintf("%v", v))).Debug("Processing security_header_profile_id update")
-
- if v == nil {
- logger.WithField("host_uuid", safeUUID).Debug("Setting security_header_profile_id to nil")
- host.SecurityHeaderProfileID = nil
- } else {
- conversionSuccess := false
- switch t := v.(type) {
- case float64:
- logger.Debug("Received security_header_profile_id as float64")
- if id, ok := safeFloat64ToUint(t); ok {
- host.SecurityHeaderProfileID = &id
- conversionSuccess = true
- logger.Info("Successfully converted security_header_profile_id from float64")
- } else {
- logger.Warn("Failed to convert security_header_profile_id from float64: value is negative or not a valid uint")
- }
- case int:
- logger.Debug("Received security_header_profile_id as int")
- if id, ok := safeIntToUint(t); ok {
- host.SecurityHeaderProfileID = &id
- conversionSuccess = true
- logger.Info("Successfully converted security_header_profile_id from int")
- } else {
- logger.Warn("Failed to convert security_header_profile_id from int: value is negative")
- }
- case string:
- logger.Debug("Received security_header_profile_id as string")
- if n, err := strconv.ParseUint(t, 10, 32); err == nil {
- id := uint(n)
- host.SecurityHeaderProfileID = &id
- conversionSuccess = true
- logger.WithField("host_uuid", safeUUID).WithField("profile_id", id).Info("Successfully converted security_header_profile_id from string")
- } else {
- logger.Warn("Failed to parse security_header_profile_id from string")
- }
- default:
- logger.Warn("Unsupported type for security_header_profile_id")
- }
-
- if !conversionSuccess {
- c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid security_header_profile_id: unable to convert value %v of type %T to uint", v, v)})
- return
- }
+ resolvedSecurityHeaderID, resolveErr := h.resolveSecurityHeaderProfileReference(v)
+ if resolveErr != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
+ return
}
+ host.SecurityHeaderProfileID = resolvedSecurityHeaderID
}
// Locations: replace only if provided
@@ -587,11 +650,10 @@ func (h *ProxyHostHandler) Delete(c *gin.Context) {
return
}
- // check if we should also delete associated uptime monitors (query param: delete_uptime=true)
- deleteUptime := c.DefaultQuery("delete_uptime", "false") == "true"
-
- if deleteUptime && h.uptimeService != nil {
- // Find all monitors referencing this proxy host and delete each
+ // Always clean up associated uptime monitors when deleting a proxy host.
+ // The query param delete_uptime=true is kept for backward compatibility but
+ // cleanup now runs unconditionally to prevent orphaned monitors.
+ if h.uptimeService != nil {
var monitors []models.UptimeMonitor
if err := h.uptimeService.DB.Where("proxy_host_id = ?", host.ID).Find(&monitors).Error; err == nil {
for _, m := range monitors {
diff --git a/backend/internal/api/handlers/proxy_host_handler_test.go b/backend/internal/api/handlers/proxy_host_handler_test.go
index 2a10a52f..cb2f984f 100644
--- a/backend/internal/api/handlers/proxy_host_handler_test.go
+++ b/backend/internal/api/handlers/proxy_host_handler_test.go
@@ -9,6 +9,7 @@ import (
"net/http/httptest"
"strings"
"testing"
+ "time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
@@ -44,6 +45,219 @@ func setupTestRouter(t *testing.T) (*gin.Engine, *gorm.DB) {
return r, db
}
+func setupTestRouterWithReferenceTables(t *testing.T) (*gin.Engine, *gorm.DB) {
+ t.Helper()
+
+ dsn := "file:" + t.Name() + "?mode=memory&cache=shared"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.ProxyHost{},
+ &models.Location{},
+ &models.AccessList{},
+ &models.SecurityHeaderProfile{},
+ &models.Notification{},
+ &models.NotificationProvider{},
+ ))
+
+ ns := services.NewNotificationService(db)
+ h := NewProxyHostHandler(db, nil, ns, nil)
+ r := gin.New()
+ api := r.Group("/api/v1")
+ h.RegisterRoutes(api)
+
+ return r, db
+}
+
+func setupTestRouterWithUptime(t *testing.T) (*gin.Engine, *gorm.DB) {
+ t.Helper()
+
+ dsn := "file:" + t.Name() + "?mode=memory&cache=shared"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.ProxyHost{},
+ &models.Location{},
+ &models.Notification{},
+ &models.NotificationProvider{},
+ &models.UptimeMonitor{},
+ &models.UptimeHeartbeat{},
+ &models.UptimeHost{},
+ &models.Setting{},
+ ))
+
+ ns := services.NewNotificationService(db)
+ us := services.NewUptimeService(db, ns)
+ h := NewProxyHostHandler(db, nil, ns, us)
+ r := gin.New()
+ api := r.Group("/api/v1")
+ h.RegisterRoutes(api)
+
+ return r, db
+}
+
+func TestProxyHostHandler_ResolveAccessListReference_TargetedBranches(t *testing.T) {
+ t.Parallel()
+
+ _, db := setupTestRouterWithReferenceTables(t)
+ h := NewProxyHostHandler(db, nil, services.NewNotificationService(db), nil)
+
+ resolved, err := h.resolveAccessListReference(true)
+ require.Error(t, err)
+ require.Nil(t, resolved)
+ require.Contains(t, err.Error(), "invalid access_list_id")
+
+ resolved, err = h.resolveAccessListReference(" ")
+ require.NoError(t, err)
+ require.Nil(t, resolved)
+
+ acl := models.AccessList{UUID: uuid.NewString(), Name: "resolve-acl", Type: "ip", Enabled: true}
+ require.NoError(t, db.Create(&acl).Error)
+
+ resolved, err = h.resolveAccessListReference(acl.UUID)
+ require.NoError(t, err)
+ require.NotNil(t, resolved)
+ require.Equal(t, acl.ID, *resolved)
+}
+
+func TestProxyHostHandler_ResolveSecurityHeaderReference_TargetedBranches(t *testing.T) {
+ t.Parallel()
+
+ _, db := setupTestRouterWithReferenceTables(t)
+ h := NewProxyHostHandler(db, nil, services.NewNotificationService(db), nil)
+
+ resolved, err := h.resolveSecurityHeaderProfileReference(" ")
+ require.NoError(t, err)
+ require.Nil(t, resolved)
+
+ profile := models.SecurityHeaderProfile{
+ UUID: uuid.NewString(),
+ Name: "resolve-security-profile",
+ IsPreset: false,
+ SecurityScore: 90,
+ }
+ require.NoError(t, db.Create(&profile).Error)
+
+ resolved, err = h.resolveSecurityHeaderProfileReference(profile.UUID)
+ require.NoError(t, err)
+ require.NotNil(t, resolved)
+ require.Equal(t, profile.ID, *resolved)
+
+ resolved, err = h.resolveSecurityHeaderProfileReference(uuid.NewString())
+ require.Error(t, err)
+ require.Nil(t, resolved)
+ require.Contains(t, err.Error(), "security header profile not found")
+
+ require.NoError(t, db.Migrator().DropTable(&models.SecurityHeaderProfile{}))
+ resolved, err = h.resolveSecurityHeaderProfileReference(uuid.NewString())
+ require.Error(t, err)
+ require.Nil(t, resolved)
+ require.Contains(t, err.Error(), "failed to resolve security header profile")
+}
+
+func TestProxyHostCreate_ReferenceResolution_TargetedBranches(t *testing.T) {
+ t.Parallel()
+
+ router, db := setupTestRouterWithReferenceTables(t)
+
+ acl := models.AccessList{UUID: uuid.NewString(), Name: "create-acl", Type: "ip", Enabled: true}
+ require.NoError(t, db.Create(&acl).Error)
+
+ profile := models.SecurityHeaderProfile{
+ UUID: uuid.NewString(),
+ Name: "create-security-profile",
+ IsPreset: false,
+ SecurityScore: 85,
+ }
+ require.NoError(t, db.Create(&profile).Error)
+
+ t.Run("creates host when references are valid UUIDs", func(t *testing.T) {
+ body := map[string]any{
+ "name": "Create Ref Success",
+ "domain_names": "create-ref-success.example.com",
+ "forward_scheme": "http",
+ "forward_host": "localhost",
+ "forward_port": 8080,
+ "enabled": true,
+ "access_list_id": acl.UUID,
+ "security_header_profile_id": profile.UUID,
+ }
+ payload, err := json.Marshal(body)
+ require.NoError(t, err)
+
+ req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", bytes.NewReader(payload))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusCreated, resp.Code)
+
+ var created models.ProxyHost
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &created))
+ require.NotNil(t, created.AccessListID)
+ require.Equal(t, acl.ID, *created.AccessListID)
+ require.NotNil(t, created.SecurityHeaderProfileID)
+ require.Equal(t, profile.ID, *created.SecurityHeaderProfileID)
+ })
+
+ t.Run("returns bad request for invalid access list reference type", func(t *testing.T) {
+ body := `{"name":"Create ACL Type Error","domain_names":"create-acl-type-error.example.com","forward_scheme":"http","forward_host":"localhost","forward_port":8080,"enabled":true,"access_list_id":true}`
+ req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", strings.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusBadRequest, resp.Code)
+ })
+
+ t.Run("returns bad request for missing security header profile", func(t *testing.T) {
+ body := map[string]any{
+ "name": "Create Security Missing",
+ "domain_names": "create-security-missing.example.com",
+ "forward_scheme": "http",
+ "forward_host": "localhost",
+ "forward_port": 8080,
+ "enabled": true,
+ "security_header_profile_id": uuid.NewString(),
+ }
+ payload, err := json.Marshal(body)
+ require.NoError(t, err)
+
+ req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", bytes.NewReader(payload))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusBadRequest, resp.Code)
+ })
+}
+
+func TestProxyHostCreate_TriggersAsyncUptimeSyncWhenServiceConfigured(t *testing.T) {
+ t.Parallel()
+
+ router, db := setupTestRouterWithUptime(t)
+
+ upstream := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ t.Cleanup(upstream.Close)
+
+ domain := strings.TrimPrefix(upstream.URL, "http://")
+ body := fmt.Sprintf(`{"name":"Uptime Hook","domain_names":"%s","forward_scheme":"http","forward_host":"app-service","forward_port":8080,"enabled":true}`, domain)
+ req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", strings.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusCreated, resp.Code)
+
+ var created models.ProxyHost
+ require.NoError(t, db.Where("domain_names = ?", domain).First(&created).Error)
+
+ var count int64
+ require.Eventually(t, func() bool {
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", created.ID).Count(&count)
+ return count > 0
+ }, 3*time.Second, 50*time.Millisecond)
+}
+
func TestProxyHostLifecycle(t *testing.T) {
t.Parallel()
router, _ := setupTestRouter(t)
diff --git a/backend/internal/api/handlers/proxy_host_handler_update_test.go b/backend/internal/api/handlers/proxy_host_handler_update_test.go
index 698d8bd0..ced2f799 100644
--- a/backend/internal/api/handlers/proxy_host_handler_update_test.go
+++ b/backend/internal/api/handlers/proxy_host_handler_update_test.go
@@ -75,6 +75,203 @@ func createTestSecurityHeaderProfile(t *testing.T, db *gorm.DB, name string) mod
return profile
}
+// createTestAccessList creates an access list for testing.
+func createTestAccessList(t *testing.T, db *gorm.DB, name string) models.AccessList {
+ t.Helper()
+ acl := models.AccessList{
+ UUID: uuid.NewString(),
+ Name: name,
+ Type: "ip",
+ Enabled: true,
+ }
+ require.NoError(t, db.Create(&acl).Error)
+ return acl
+}
+
+func TestProxyHostUpdate_AccessListID_Transitions_NoUnrelatedMutation(t *testing.T) {
+ t.Parallel()
+ router, db := setupUpdateTestRouter(t)
+
+ aclOne := createTestAccessList(t, db, "ACL One")
+ aclTwo := createTestAccessList(t, db, "ACL Two")
+
+ host := models.ProxyHost{
+ UUID: uuid.NewString(),
+ Name: "Access List Transition Host",
+ DomainNames: "acl-transition.test.com",
+ ForwardScheme: "http",
+ ForwardHost: "localhost",
+ ForwardPort: 8080,
+ Enabled: true,
+ SSLForced: true,
+ Application: "none",
+ AccessListID: &aclOne.ID,
+ }
+ require.NoError(t, db.Create(&host).Error)
+
+ assertUnrelatedFields := func(t *testing.T, current models.ProxyHost) {
+ t.Helper()
+ assert.Equal(t, "Access List Transition Host", current.Name)
+ assert.Equal(t, "acl-transition.test.com", current.DomainNames)
+ assert.Equal(t, "localhost", current.ForwardHost)
+ assert.Equal(t, 8080, current.ForwardPort)
+ assert.True(t, current.SSLForced)
+ assert.Equal(t, "none", current.Application)
+ }
+
+ runUpdate := func(t *testing.T, update map[string]any) {
+ t.Helper()
+ body, _ := json.Marshal(update)
+ req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusOK, resp.Code)
+ }
+
+ // value -> value
+ runUpdate(t, map[string]any{"access_list_id": aclTwo.ID})
+ var updated models.ProxyHost
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ require.NotNil(t, updated.AccessListID)
+ assert.Equal(t, aclTwo.ID, *updated.AccessListID)
+ assertUnrelatedFields(t, updated)
+
+ // value -> null
+ runUpdate(t, map[string]any{"access_list_id": nil})
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ assert.Nil(t, updated.AccessListID)
+ assertUnrelatedFields(t, updated)
+
+ // null -> value
+ runUpdate(t, map[string]any{"access_list_id": aclOne.ID})
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ require.NotNil(t, updated.AccessListID)
+ assert.Equal(t, aclOne.ID, *updated.AccessListID)
+ assertUnrelatedFields(t, updated)
+}
+
+func TestProxyHostUpdate_AccessListID_UUIDNotFound_ReturnsBadRequest(t *testing.T) {
+ t.Parallel()
+ router, db := setupUpdateTestRouter(t)
+
+ host := createTestProxyHost(t, db, "acl-uuid-not-found")
+
+ updateBody := map[string]any{
+ "name": "ACL UUID Not Found",
+ "domain_names": "acl-uuid-not-found.test.com",
+ "forward_scheme": "http",
+ "forward_host": "localhost",
+ "forward_port": 8080,
+ "access_list_id": uuid.NewString(),
+ }
+ body, _ := json.Marshal(updateBody)
+
+ req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+
+ require.Equal(t, http.StatusBadRequest, resp.Code)
+
+ var result map[string]any
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result))
+ assert.Contains(t, result["error"], "access list not found")
+}
+
+func TestProxyHostUpdate_AccessListID_ResolveQueryFailure_ReturnsBadRequest(t *testing.T) {
+ t.Parallel()
+ router, db := setupUpdateTestRouter(t)
+
+ host := createTestProxyHost(t, db, "acl-resolve-query-failure")
+
+ require.NoError(t, db.Migrator().DropTable(&models.AccessList{}))
+
+ updateBody := map[string]any{
+ "name": "ACL Resolve Query Failure",
+ "domain_names": "acl-resolve-query-failure.test.com",
+ "forward_scheme": "http",
+ "forward_host": "localhost",
+ "forward_port": 8080,
+ "access_list_id": uuid.NewString(),
+ }
+ body, _ := json.Marshal(updateBody)
+
+ req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+
+ require.Equal(t, http.StatusBadRequest, resp.Code)
+
+ var result map[string]any
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result))
+ assert.Contains(t, result["error"], "failed to resolve access list")
+}
+
+func TestProxyHostUpdate_SecurityHeaderProfileID_Transitions_NoUnrelatedMutation(t *testing.T) {
+ t.Parallel()
+ router, db := setupUpdateTestRouter(t)
+
+ profileOne := createTestSecurityHeaderProfile(t, db, "Security Profile One")
+ profileTwo := createTestSecurityHeaderProfile(t, db, "Security Profile Two")
+
+ host := models.ProxyHost{
+ UUID: uuid.NewString(),
+ Name: "Security Profile Transition Host",
+ DomainNames: "security-transition.test.com",
+ ForwardScheme: "http",
+ ForwardHost: "localhost",
+ ForwardPort: 9090,
+ Enabled: true,
+ SSLForced: true,
+ Application: "none",
+ SecurityHeaderProfileID: &profileOne.ID,
+ }
+ require.NoError(t, db.Create(&host).Error)
+
+ assertUnrelatedFields := func(t *testing.T, current models.ProxyHost) {
+ t.Helper()
+ assert.Equal(t, "Security Profile Transition Host", current.Name)
+ assert.Equal(t, "security-transition.test.com", current.DomainNames)
+ assert.Equal(t, "localhost", current.ForwardHost)
+ assert.Equal(t, 9090, current.ForwardPort)
+ assert.True(t, current.SSLForced)
+ assert.Equal(t, "none", current.Application)
+ }
+
+ runUpdate := func(t *testing.T, update map[string]any) {
+ t.Helper()
+ body, _ := json.Marshal(update)
+ req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ router.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusOK, resp.Code)
+ }
+
+ // value -> value
+ runUpdate(t, map[string]any{"security_header_profile_id": fmt.Sprintf("%d", profileTwo.ID)})
+ var updated models.ProxyHost
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ require.NotNil(t, updated.SecurityHeaderProfileID)
+ assert.Equal(t, profileTwo.ID, *updated.SecurityHeaderProfileID)
+ assertUnrelatedFields(t, updated)
+
+ // value -> null
+ runUpdate(t, map[string]any{"security_header_profile_id": ""})
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ assert.Nil(t, updated.SecurityHeaderProfileID)
+ assertUnrelatedFields(t, updated)
+
+ // null -> value
+ runUpdate(t, map[string]any{"security_header_profile_id": fmt.Sprintf("%d", profileOne.ID)})
+ require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
+ require.NotNil(t, updated.SecurityHeaderProfileID)
+ assert.Equal(t, profileOne.ID, *updated.SecurityHeaderProfileID)
+ assertUnrelatedFields(t, updated)
+}
+
// TestProxyHostUpdate_EnableStandardHeaders_Null tests updating enable_standard_headers to null.
func TestProxyHostUpdate_EnableStandardHeaders_Null(t *testing.T) {
t.Parallel()
diff --git a/backend/internal/api/handlers/security_geoip_endpoints_test.go b/backend/internal/api/handlers/security_geoip_endpoints_test.go
index 086fc5bb..7d79f2af 100644
--- a/backend/internal/api/handlers/security_geoip_endpoints_test.go
+++ b/backend/internal/api/handlers/security_geoip_endpoints_test.go
@@ -59,6 +59,10 @@ func TestSecurityHandler_ReloadGeoIP_NotInitialized(t *testing.T) {
h := NewSecurityHandler(config.SecurityConfig{}, nil, nil)
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
r.POST("/security/geoip/reload", h.ReloadGeoIP)
w := httptest.NewRecorder()
@@ -75,6 +79,10 @@ func TestSecurityHandler_ReloadGeoIP_LoadError(t *testing.T) {
h.SetGeoIPService(&services.GeoIPService{}) // dbPath empty => Load() will error
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
r.POST("/security/geoip/reload", h.ReloadGeoIP)
w := httptest.NewRecorder()
@@ -90,6 +98,10 @@ func TestSecurityHandler_LookupGeoIP_MissingIPAddress(t *testing.T) {
h := NewSecurityHandler(config.SecurityConfig{}, nil, nil)
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
r.POST("/security/geoip/lookup", h.LookupGeoIP)
payload := []byte(`{}`)
@@ -109,6 +121,10 @@ func TestSecurityHandler_LookupGeoIP_ServiceUnavailable(t *testing.T) {
h.SetGeoIPService(&services.GeoIPService{}) // present but not loaded
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
r.POST("/security/geoip/lookup", h.LookupGeoIP)
payload, _ := json.Marshal(map[string]string{"ip_address": "8.8.8.8"})
diff --git a/backend/internal/api/handlers/security_handler.go b/backend/internal/api/handlers/security_handler.go
index d8dee492..4468d4b2 100644
--- a/backend/internal/api/handlers/security_handler.go
+++ b/backend/internal/api/handlers/security_handler.go
@@ -261,6 +261,10 @@ func (h *SecurityHandler) GetConfig(c *gin.Context) {
// UpdateConfig creates or updates the SecurityConfig in DB
func (h *SecurityHandler) UpdateConfig(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
var payload models.SecurityConfig
if err := c.ShouldBindJSON(&payload); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
@@ -290,6 +294,10 @@ func (h *SecurityHandler) UpdateConfig(c *gin.Context) {
// GenerateBreakGlass generates a break-glass token and returns the plaintext token once
func (h *SecurityHandler) GenerateBreakGlass(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
token, err := h.svc.GenerateBreakGlassToken("default")
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate break-glass token"})
@@ -316,6 +324,10 @@ func (h *SecurityHandler) ListDecisions(c *gin.Context) {
// CreateDecision creates a manual decision (override) - for now no checks besides payload
func (h *SecurityHandler) CreateDecision(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
var payload models.SecurityDecision
if err := c.ShouldBindJSON(&payload); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
@@ -371,6 +383,10 @@ func (h *SecurityHandler) ListRuleSets(c *gin.Context) {
// UpsertRuleSet uploads or updates a ruleset
func (h *SecurityHandler) UpsertRuleSet(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
var payload models.SecurityRuleSet
if err := c.ShouldBindJSON(&payload); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
@@ -401,6 +417,10 @@ func (h *SecurityHandler) UpsertRuleSet(c *gin.Context) {
// DeleteRuleSet removes a ruleset by id
func (h *SecurityHandler) DeleteRuleSet(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
idParam := c.Param("id")
if idParam == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "id is required"})
@@ -610,6 +630,10 @@ func (h *SecurityHandler) GetGeoIPStatus(c *gin.Context) {
// ReloadGeoIP reloads the GeoIP database from disk.
func (h *SecurityHandler) ReloadGeoIP(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
if h.geoipSvc == nil {
c.JSON(http.StatusServiceUnavailable, gin.H{
"error": "GeoIP service not initialized",
@@ -641,6 +665,10 @@ func (h *SecurityHandler) ReloadGeoIP(c *gin.Context) {
// LookupGeoIP performs a GeoIP lookup for a given IP address.
func (h *SecurityHandler) LookupGeoIP(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
var req struct {
IPAddress string `json:"ip_address" binding:"required"`
}
@@ -707,6 +735,10 @@ func (h *SecurityHandler) GetWAFExclusions(c *gin.Context) {
// AddWAFExclusion adds a rule exclusion to the WAF configuration
func (h *SecurityHandler) AddWAFExclusion(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
var req WAFExclusionRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "rule_id is required"})
@@ -786,6 +818,10 @@ func (h *SecurityHandler) AddWAFExclusion(c *gin.Context) {
// DeleteWAFExclusion removes a rule exclusion by rule_id
func (h *SecurityHandler) DeleteWAFExclusion(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
ruleIDParam := c.Param("rule_id")
if ruleIDParam == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "rule_id is required"})
diff --git a/backend/internal/api/handlers/security_handler_audit_test.go b/backend/internal/api/handlers/security_handler_audit_test.go
index 5ba7251a..47d13c2f 100644
--- a/backend/internal/api/handlers/security_handler_audit_test.go
+++ b/backend/internal/api/handlers/security_handler_audit_test.go
@@ -100,6 +100,10 @@ func TestSecurityHandler_CreateDecision_SQLInjection(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/api/v1/security/decisions", h.CreateDecision)
// Attempt SQL injection via payload fields
@@ -143,6 +147,10 @@ func TestSecurityHandler_UpsertRuleSet_MassivePayload(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
// Try to submit a 3MB payload (should be rejected by service)
@@ -175,6 +183,10 @@ func TestSecurityHandler_UpsertRuleSet_EmptyName(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
payload := map[string]any{
@@ -203,6 +215,10 @@ func TestSecurityHandler_CreateDecision_EmptyFields(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/api/v1/security/decisions", h.CreateDecision)
testCases := []struct {
@@ -347,6 +363,10 @@ func TestSecurityAudit_DeleteRuleSet_InvalidID(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/api/v1/security/rulesets/:id", h.DeleteRuleSet)
testCases := []struct {
@@ -388,6 +408,10 @@ func TestSecurityHandler_UpsertRuleSet_XSSInContent(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
router.GET("/api/v1/security/rulesets", h.ListRuleSets)
@@ -433,6 +457,10 @@ func TestSecurityHandler_UpdateConfig_RateLimitBounds(t *testing.T) {
h := NewSecurityHandler(cfg, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.PUT("/api/v1/security/config", h.UpdateConfig)
testCases := []struct {
diff --git a/backend/internal/api/handlers/security_handler_authz_test.go b/backend/internal/api/handlers/security_handler_authz_test.go
new file mode 100644
index 00000000..32c6bf8a
--- /dev/null
+++ b/backend/internal/api/handlers/security_handler_authz_test.go
@@ -0,0 +1,58 @@
+package handlers
+
+import (
+ "bytes"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "github.com/gin-gonic/gin"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "github.com/Wikid82/charon/backend/internal/config"
+ "github.com/Wikid82/charon/backend/internal/models"
+)
+
+func TestSecurityHandler_MutatorsRequireAdmin(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupTestDB(t)
+ require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.SecurityRuleSet{}, &models.SecurityDecision{}, &models.SecurityAudit{}))
+
+ handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
+ router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("userID", uint(123))
+ c.Set("role", "user")
+ c.Next()
+ })
+
+ router.POST("/security/config", handler.UpdateConfig)
+ router.POST("/security/breakglass/generate", handler.GenerateBreakGlass)
+ router.POST("/security/decisions", handler.CreateDecision)
+ router.POST("/security/rulesets", handler.UpsertRuleSet)
+ router.DELETE("/security/rulesets/:id", handler.DeleteRuleSet)
+
+ testCases := []struct {
+ name string
+ method string
+ url string
+ body string
+ }{
+ {name: "update-config", method: http.MethodPost, url: "/security/config", body: `{"name":"default"}`},
+ {name: "generate-breakglass", method: http.MethodPost, url: "/security/breakglass/generate", body: `{}`},
+ {name: "create-decision", method: http.MethodPost, url: "/security/decisions", body: `{"ip":"1.2.3.4","action":"block"}`},
+ {name: "upsert-ruleset", method: http.MethodPost, url: "/security/rulesets", body: `{"name":"owasp-crs","mode":"block","content":"x"}`},
+ {name: "delete-ruleset", method: http.MethodDelete, url: "/security/rulesets/1", body: ""},
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ req := httptest.NewRequest(tc.method, tc.url, bytes.NewBufferString(tc.body))
+ req.Header.Set("Content-Type", "application/json")
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ assert.Equal(t, http.StatusForbidden, w.Code)
+ })
+ }
+}
diff --git a/backend/internal/api/handlers/security_handler_clean_test.go b/backend/internal/api/handlers/security_handler_clean_test.go
index 31ab8c2e..5019a34b 100644
--- a/backend/internal/api/handlers/security_handler_clean_test.go
+++ b/backend/internal/api/handlers/security_handler_clean_test.go
@@ -120,6 +120,10 @@ func TestSecurityHandler_GenerateBreakGlass_ReturnsToken(t *testing.T) {
db := setupTestDB(t)
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/breakglass/generate", handler.GenerateBreakGlass)
w := httptest.NewRecorder()
@@ -251,6 +255,10 @@ func TestSecurityHandler_Enable_Disable_WithAdminWhitelistAndToken(t *testing.T)
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
api := router.Group("/api/v1")
api.POST("/security/enable", handler.Enable)
api.POST("/security/disable", handler.Disable)
diff --git a/backend/internal/api/handlers/security_handler_coverage_test.go b/backend/internal/api/handlers/security_handler_coverage_test.go
index 49b83837..7ab25de7 100644
--- a/backend/internal/api/handlers/security_handler_coverage_test.go
+++ b/backend/internal/api/handlers/security_handler_coverage_test.go
@@ -27,6 +27,10 @@ func TestSecurityHandler_UpdateConfig_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/config", handler.UpdateConfig)
payload := map[string]any{
@@ -55,6 +59,10 @@ func TestSecurityHandler_UpdateConfig_DefaultName(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/config", handler.UpdateConfig)
// Payload without name - should default to "default"
@@ -78,6 +86,10 @@ func TestSecurityHandler_UpdateConfig_InvalidPayload(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/config", handler.UpdateConfig)
w := httptest.NewRecorder()
@@ -193,6 +205,10 @@ func TestSecurityHandler_CreateDecision_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/decisions", handler.CreateDecision)
payload := map[string]any{
@@ -218,6 +234,10 @@ func TestSecurityHandler_CreateDecision_MissingIP(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/decisions", handler.CreateDecision)
payload := map[string]any{
@@ -240,6 +260,10 @@ func TestSecurityHandler_CreateDecision_MissingAction(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/decisions", handler.CreateDecision)
payload := map[string]any{
@@ -262,6 +286,10 @@ func TestSecurityHandler_CreateDecision_InvalidPayload(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/decisions", handler.CreateDecision)
w := httptest.NewRecorder()
@@ -306,6 +334,10 @@ func TestSecurityHandler_UpsertRuleSet_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/rulesets", handler.UpsertRuleSet)
payload := map[string]any{
@@ -330,6 +362,10 @@ func TestSecurityHandler_UpsertRuleSet_MissingName(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/rulesets", handler.UpsertRuleSet)
payload := map[string]any{
@@ -353,6 +389,10 @@ func TestSecurityHandler_UpsertRuleSet_InvalidPayload(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/rulesets", handler.UpsertRuleSet)
w := httptest.NewRecorder()
@@ -375,6 +415,10 @@ func TestSecurityHandler_DeleteRuleSet_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/rulesets/:id", handler.DeleteRuleSet)
w := httptest.NewRecorder()
@@ -395,6 +439,10 @@ func TestSecurityHandler_DeleteRuleSet_NotFound(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/rulesets/:id", handler.DeleteRuleSet)
w := httptest.NewRecorder()
@@ -411,6 +459,10 @@ func TestSecurityHandler_DeleteRuleSet_InvalidID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/rulesets/:id", handler.DeleteRuleSet)
w := httptest.NewRecorder()
@@ -427,6 +479,10 @@ func TestSecurityHandler_DeleteRuleSet_EmptyID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
// Note: This route pattern won't match empty ID, but testing the handler directly
router.DELETE("/security/rulesets/:id", handler.DeleteRuleSet)
@@ -509,6 +565,10 @@ func TestSecurityHandler_Enable_WithValidBreakGlassToken(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/breakglass/generate", handler.GenerateBreakGlass)
router.POST("/security/enable", handler.Enable)
@@ -600,6 +660,10 @@ func TestSecurityHandler_Disable_FromRemoteWithToken(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/breakglass/generate", handler.GenerateBreakGlass)
router.POST("/security/disable", func(c *gin.Context) {
c.Request.RemoteAddr = "192.168.1.100:12345" // Remote IP
@@ -689,6 +753,10 @@ func TestSecurityHandler_GenerateBreakGlass_NoConfig(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/breakglass/generate", handler.GenerateBreakGlass)
w := httptest.NewRecorder()
diff --git a/backend/internal/api/handlers/security_handler_rules_decisions_test.go b/backend/internal/api/handlers/security_handler_rules_decisions_test.go
index 7dcc17b2..b8de1568 100644
--- a/backend/internal/api/handlers/security_handler_rules_decisions_test.go
+++ b/backend/internal/api/handlers/security_handler_rules_decisions_test.go
@@ -30,6 +30,10 @@ func setupSecurityTestRouterWithExtras(t *testing.T) (*gin.Engine, *gorm.DB) {
require.NoError(t, db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{}, &models.CaddyConfig{}, &models.SSLCertificate{}, &models.AccessList{}, &models.SecurityConfig{}, &models.SecurityDecision{}, &models.SecurityAudit{}, &models.SecurityRuleSet{}))
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
api := r.Group("/api/v1")
cfg := config.SecurityConfig{}
h := NewSecurityHandler(cfg, db, nil)
@@ -148,6 +152,10 @@ func TestSecurityHandler_UpsertDeleteTriggersApplyConfig(t *testing.T) {
m := caddy.NewManager(client, db, tmp, "", false, config.SecurityConfig{CerberusEnabled: true, WAFMode: "block"})
r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
api := r.Group("/api/v1")
cfg := config.SecurityConfig{}
h := NewSecurityHandler(cfg, db, m)
diff --git a/backend/internal/api/handlers/security_handler_waf_test.go b/backend/internal/api/handlers/security_handler_waf_test.go
index 26eb3ee9..9f338b06 100644
--- a/backend/internal/api/handlers/security_handler_waf_test.go
+++ b/backend/internal/api/handlers/security_handler_waf_test.go
@@ -110,6 +110,10 @@ func TestSecurityHandler_AddWAFExclusion_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
payload := map[string]any{
@@ -140,6 +144,10 @@ func TestSecurityHandler_AddWAFExclusion_WithTarget(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
payload := map[string]any{
@@ -175,6 +183,10 @@ func TestSecurityHandler_AddWAFExclusion_ToExistingConfig(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
router.GET("/security/waf/exclusions", handler.GetWAFExclusions)
@@ -215,6 +227,10 @@ func TestSecurityHandler_AddWAFExclusion_Duplicate(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
// Try to add duplicate
@@ -244,6 +260,10 @@ func TestSecurityHandler_AddWAFExclusion_DuplicateWithDifferentTarget(t *testing
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
// Add same rule_id with different target - should succeed
@@ -268,6 +288,10 @@ func TestSecurityHandler_AddWAFExclusion_MissingRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
payload := map[string]any{
@@ -290,6 +314,10 @@ func TestSecurityHandler_AddWAFExclusion_InvalidRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
// Zero rule_id
@@ -313,6 +341,10 @@ func TestSecurityHandler_AddWAFExclusion_NegativeRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
payload := map[string]any{
@@ -335,6 +367,10 @@ func TestSecurityHandler_AddWAFExclusion_InvalidPayload(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
w := httptest.NewRecorder()
@@ -358,6 +394,10 @@ func TestSecurityHandler_DeleteWAFExclusion_Success(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
router.GET("/security/waf/exclusions", handler.GetWAFExclusions)
@@ -394,6 +434,10 @@ func TestSecurityHandler_DeleteWAFExclusion_WithTarget(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
router.GET("/security/waf/exclusions", handler.GetWAFExclusions)
@@ -430,6 +474,10 @@ func TestSecurityHandler_DeleteWAFExclusion_NotFound(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
w := httptest.NewRecorder()
@@ -446,6 +494,10 @@ func TestSecurityHandler_DeleteWAFExclusion_NoConfig(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
w := httptest.NewRecorder()
@@ -462,6 +514,10 @@ func TestSecurityHandler_DeleteWAFExclusion_InvalidRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
w := httptest.NewRecorder()
@@ -478,6 +534,10 @@ func TestSecurityHandler_DeleteWAFExclusion_ZeroRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
w := httptest.NewRecorder()
@@ -494,6 +554,10 @@ func TestSecurityHandler_DeleteWAFExclusion_NegativeRuleID(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
w := httptest.NewRecorder()
@@ -533,6 +597,10 @@ func TestSecurityHandler_WAFExclusion_FullWorkflow(t *testing.T) {
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
router.GET("/security/waf/exclusions", handler.GetWAFExclusions)
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
router.DELETE("/security/waf/exclusions/:rule_id", handler.DeleteWAFExclusion)
diff --git a/backend/internal/api/handlers/settings_handler.go b/backend/internal/api/handlers/settings_handler.go
index 7d6603fd..935cd9d8 100644
--- a/backend/internal/api/handlers/settings_handler.go
+++ b/backend/internal/api/handlers/settings_handler.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"net/http"
+ "strconv"
"strings"
"time"
@@ -37,6 +38,15 @@ type SettingsHandler struct {
DataRoot string
}
+const (
+ settingCaddyKeepaliveIdle = "caddy.keepalive_idle"
+ settingCaddyKeepaliveCount = "caddy.keepalive_count"
+ minCaddyKeepaliveIdleDuration = time.Second
+ maxCaddyKeepaliveIdleDuration = 24 * time.Hour
+ minCaddyKeepaliveCount = 1
+ maxCaddyKeepaliveCount = 100
+)
+
func NewSettingsHandler(db *gorm.DB) *SettingsHandler {
return &SettingsHandler{
DB: db,
@@ -65,14 +75,43 @@ func (h *SettingsHandler) GetSettings(c *gin.Context) {
}
// Convert to map for easier frontend consumption
- settingsMap := make(map[string]string)
+ settingsMap := make(map[string]any)
for _, s := range settings {
+ if isSensitiveSettingKey(s.Key) {
+ hasSecret := strings.TrimSpace(s.Value) != ""
+ settingsMap[s.Key] = "********"
+ settingsMap[s.Key+".has_secret"] = hasSecret
+ settingsMap[s.Key+".last_updated"] = s.UpdatedAt.UTC().Format(time.RFC3339)
+ continue
+ }
+
settingsMap[s.Key] = s.Value
}
c.JSON(http.StatusOK, settingsMap)
}
+func isSensitiveSettingKey(key string) bool {
+ normalizedKey := strings.ToLower(strings.TrimSpace(key))
+
+ sensitiveFragments := []string{
+ "password",
+ "secret",
+ "token",
+ "api_key",
+ "apikey",
+ "webhook",
+ }
+
+ for _, fragment := range sensitiveFragments {
+ if strings.Contains(normalizedKey, fragment) {
+ return true
+ }
+ }
+
+ return false
+}
+
type UpdateSettingRequest struct {
Key string `json:"key" binding:"required"`
Value string `json:"value" binding:"required"`
@@ -109,6 +148,11 @@ func (h *SettingsHandler) UpdateSetting(c *gin.Context) {
}
}
+ if err := validateOptionalKeepaliveSetting(req.Key, req.Value); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ return
+ }
+
setting := models.Setting{
Key: req.Key,
Value: req.Value,
@@ -247,6 +291,10 @@ func (h *SettingsHandler) PatchConfig(c *gin.Context) {
}
}
+ if err := validateOptionalKeepaliveSetting(key, value); err != nil {
+ return err
+ }
+
setting := models.Setting{
Key: key,
Value: value,
@@ -284,6 +332,10 @@ func (h *SettingsHandler) PatchConfig(c *gin.Context) {
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid admin_whitelist"})
return
}
+ if strings.Contains(err.Error(), "invalid caddy.keepalive_idle") || strings.Contains(err.Error(), "invalid caddy.keepalive_count") {
+ c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
+ return
+ }
if respondPermissionError(c, h.SecuritySvc, "settings_save_failed", err, h.DataRoot) {
return
}
@@ -401,6 +453,53 @@ func validateAdminWhitelist(whitelist string) error {
return nil
}
+func validateOptionalKeepaliveSetting(key, value string) error {
+ switch key {
+ case settingCaddyKeepaliveIdle:
+ return validateKeepaliveIdleValue(value)
+ case settingCaddyKeepaliveCount:
+ return validateKeepaliveCountValue(value)
+ default:
+ return nil
+ }
+}
+
+func validateKeepaliveIdleValue(value string) error {
+ idle := strings.TrimSpace(value)
+ if idle == "" {
+ return nil
+ }
+
+ d, err := time.ParseDuration(idle)
+ if err != nil {
+ return fmt.Errorf("invalid caddy.keepalive_idle")
+ }
+
+ if d < minCaddyKeepaliveIdleDuration || d > maxCaddyKeepaliveIdleDuration {
+ return fmt.Errorf("invalid caddy.keepalive_idle")
+ }
+
+ return nil
+}
+
+func validateKeepaliveCountValue(value string) error {
+ raw := strings.TrimSpace(value)
+ if raw == "" {
+ return nil
+ }
+
+ count, err := strconv.Atoi(raw)
+ if err != nil {
+ return fmt.Errorf("invalid caddy.keepalive_count")
+ }
+
+ if count < minCaddyKeepaliveCount || count > maxCaddyKeepaliveCount {
+ return fmt.Errorf("invalid caddy.keepalive_count")
+ }
+
+ return nil
+}
+
func (h *SettingsHandler) syncAdminWhitelist(whitelist string) error {
return h.syncAdminWhitelistWithDB(h.DB, whitelist)
}
@@ -433,6 +532,10 @@ type SMTPConfigRequest struct {
// GetSMTPConfig returns the current SMTP configuration.
func (h *SettingsHandler) GetSMTPConfig(c *gin.Context) {
+ if !requireAdmin(c) {
+ return
+ }
+
config, err := h.MailService.GetSMTPConfig()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch SMTP configuration"})
diff --git a/backend/internal/api/handlers/settings_handler_test.go b/backend/internal/api/handlers/settings_handler_test.go
index fdc1097d..f36a28d3 100644
--- a/backend/internal/api/handlers/settings_handler_test.go
+++ b/backend/internal/api/handlers/settings_handler_test.go
@@ -182,6 +182,31 @@ func TestSettingsHandler_GetSettings(t *testing.T) {
assert.Equal(t, "test_value", response["test_key"])
}
+func TestSettingsHandler_GetSettings_MasksSensitiveValues(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupSettingsTestDB(t)
+
+ db.Create(&models.Setting{Key: "smtp_password", Value: "super-secret-password", Category: "smtp", Type: "string"})
+
+ handler := handlers.NewSettingsHandler(db)
+ router := newAdminRouter()
+ router.GET("/settings", handler.GetSettings)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/settings", http.NoBody)
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+
+ var response map[string]any
+ err := json.Unmarshal(w.Body.Bytes(), &response)
+ assert.NoError(t, err)
+ assert.Equal(t, "********", response["smtp_password"])
+ assert.Equal(t, true, response["smtp_password.has_secret"])
+ _, hasRaw := response["super-secret-password"]
+ assert.False(t, hasRaw)
+}
+
func TestSettingsHandler_GetSettings_DatabaseError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
@@ -413,6 +438,58 @@ func TestSettingsHandler_UpdateSetting_InvalidAdminWhitelist(t *testing.T) {
assert.Contains(t, w.Body.String(), "Invalid admin_whitelist")
}
+func TestSettingsHandler_UpdateSetting_InvalidKeepaliveIdle(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupSettingsTestDB(t)
+
+ handler := handlers.NewSettingsHandler(db)
+ router := newAdminRouter()
+ router.POST("/settings", handler.UpdateSetting)
+
+ payload := map[string]string{
+ "key": "caddy.keepalive_idle",
+ "value": "bad-duration",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body))
+ req.Header.Set("Content-Type", "application/json")
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusBadRequest, w.Code)
+ assert.Contains(t, w.Body.String(), "invalid caddy.keepalive_idle")
+}
+
+func TestSettingsHandler_UpdateSetting_ValidKeepaliveCount(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupSettingsTestDB(t)
+
+ handler := handlers.NewSettingsHandler(db)
+ router := newAdminRouter()
+ router.POST("/settings", handler.UpdateSetting)
+
+ payload := map[string]string{
+ "key": "caddy.keepalive_count",
+ "value": "9",
+ "category": "caddy",
+ "type": "number",
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body))
+ req.Header.Set("Content-Type", "application/json")
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+
+ var setting models.Setting
+ err := db.Where("key = ?", "caddy.keepalive_count").First(&setting).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "9", setting.Value)
+}
+
func TestSettingsHandler_UpdateSetting_SecurityKeyInvalidatesCache(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
@@ -538,6 +615,64 @@ func TestSettingsHandler_PatchConfig_InvalidAdminWhitelist(t *testing.T) {
assert.Contains(t, w.Body.String(), "Invalid admin_whitelist")
}
+func TestSettingsHandler_PatchConfig_InvalidKeepaliveCount(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupSettingsTestDB(t)
+
+ handler := handlers.NewSettingsHandler(db)
+ router := newAdminRouter()
+ router.PATCH("/config", handler.PatchConfig)
+
+ payload := map[string]any{
+ "caddy": map[string]any{
+ "keepalive_count": 0,
+ },
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest(http.MethodPatch, "/config", bytes.NewBuffer(body))
+ req.Header.Set("Content-Type", "application/json")
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusBadRequest, w.Code)
+ assert.Contains(t, w.Body.String(), "invalid caddy.keepalive_count")
+}
+
+func TestSettingsHandler_PatchConfig_ValidKeepaliveSettings(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupSettingsTestDB(t)
+
+ handler := handlers.NewSettingsHandler(db)
+ router := newAdminRouter()
+ router.PATCH("/config", handler.PatchConfig)
+
+ payload := map[string]any{
+ "caddy": map[string]any{
+ "keepalive_idle": "30s",
+ "keepalive_count": 12,
+ },
+ }
+ body, _ := json.Marshal(payload)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest(http.MethodPatch, "/config", bytes.NewBuffer(body))
+ req.Header.Set("Content-Type", "application/json")
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusOK, w.Code)
+
+ var idle models.Setting
+ err := db.Where("key = ?", "caddy.keepalive_idle").First(&idle).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "30s", idle.Value)
+
+ var count models.Setting
+ err = db.Where("key = ?", "caddy.keepalive_count").First(&count).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "12", count.Value)
+}
+
func TestSettingsHandler_PatchConfig_ReloadFailureReturns500(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
@@ -864,6 +999,25 @@ func TestSettingsHandler_GetSMTPConfig_DatabaseError(t *testing.T) {
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
+func TestSettingsHandler_GetSMTPConfig_NonAdminForbidden(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ handler, _ := setupSettingsHandlerWithMail(t)
+
+ router := gin.New()
+ router.Use(func(c *gin.Context) {
+ c.Set("role", "user")
+ c.Set("userID", uint(2))
+ c.Next()
+ })
+ router.GET("/api/v1/settings/smtp", handler.GetSMTPConfig)
+
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/api/v1/settings/smtp", http.NoBody)
+ router.ServeHTTP(w, req)
+
+ assert.Equal(t, http.StatusForbidden, w.Code)
+}
+
func TestSettingsHandler_UpdateSMTPConfig_NonAdmin(t *testing.T) {
gin.SetMode(gin.TestMode)
handler, _ := setupSettingsHandlerWithMail(t)
diff --git a/backend/internal/api/handlers/user_handler.go b/backend/internal/api/handlers/user_handler.go
index 18fc2726..6b1d884a 100644
--- a/backend/internal/api/handlers/user_handler.go
+++ b/backend/internal/api/handlers/user_handler.go
@@ -103,6 +103,18 @@ type SetupRequest struct {
Password string `json:"password" binding:"required,min=8"`
}
+func isSetupConflictError(err error) bool {
+ if err == nil {
+ return false
+ }
+
+ errText := strings.ToLower(err.Error())
+ return strings.Contains(errText, "unique constraint failed") ||
+ strings.Contains(errText, "duplicate key") ||
+ strings.Contains(errText, "database is locked") ||
+ strings.Contains(errText, "database table is locked")
+}
+
// Setup creates the initial admin user and configures the ACME email.
func (h *UserHandler) Setup(c *gin.Context) {
// 1. Check if setup is allowed
@@ -160,6 +172,17 @@ func (h *UserHandler) Setup(c *gin.Context) {
})
if err != nil {
+ var postTxCount int64
+ if countErr := h.DB.Model(&models.User{}).Count(&postTxCount).Error; countErr == nil && postTxCount > 0 {
+ c.JSON(http.StatusForbidden, gin.H{"error": "Setup already completed"})
+ return
+ }
+
+ if isSetupConflictError(err) {
+ c.JSON(http.StatusConflict, gin.H{"error": "Setup conflict: setup already in progress or completed"})
+ return
+ }
+
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to complete setup: " + err.Error()})
return
}
@@ -189,7 +212,12 @@ func (h *UserHandler) RegenerateAPIKey(c *gin.Context) {
return
}
- c.JSON(http.StatusOK, gin.H{"api_key": apiKey})
+ c.JSON(http.StatusOK, gin.H{
+ "message": "API key regenerated successfully",
+ "has_api_key": true,
+ "api_key_masked": maskSecretForResponse(apiKey),
+ "api_key_updated": time.Now().UTC().Format(time.RFC3339),
+ })
}
// GetProfile returns the current user's profile including API key.
@@ -207,11 +235,12 @@ func (h *UserHandler) GetProfile(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{
- "id": user.ID,
- "email": user.Email,
- "name": user.Name,
- "role": user.Role,
- "api_key": user.APIKey,
+ "id": user.ID,
+ "email": user.Email,
+ "name": user.Name,
+ "role": user.Role,
+ "has_api_key": strings.TrimSpace(user.APIKey) != "",
+ "api_key_masked": maskSecretForResponse(user.APIKey),
})
}
@@ -548,14 +577,14 @@ func (h *UserHandler) InviteUser(c *gin.Context) {
}
c.JSON(http.StatusCreated, gin.H{
- "id": user.ID,
- "uuid": user.UUID,
- "email": user.Email,
- "role": user.Role,
- "invite_token": inviteToken, // Return token in case email fails
- "invite_url": inviteURL,
- "email_sent": emailSent,
- "expires_at": inviteExpires,
+ "id": user.ID,
+ "uuid": user.UUID,
+ "email": user.Email,
+ "role": user.Role,
+ "invite_token_masked": maskSecretForResponse(inviteToken),
+ "invite_url": redactInviteURL(inviteURL),
+ "email_sent": emailSent,
+ "expires_at": inviteExpires,
})
}
@@ -862,16 +891,32 @@ func (h *UserHandler) ResendInvite(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{
- "id": user.ID,
- "uuid": user.UUID,
- "email": user.Email,
- "role": user.Role,
- "invite_token": inviteToken,
- "email_sent": emailSent,
- "expires_at": inviteExpires,
+ "id": user.ID,
+ "uuid": user.UUID,
+ "email": user.Email,
+ "role": user.Role,
+ "invite_token_masked": maskSecretForResponse(inviteToken),
+ "email_sent": emailSent,
+ "expires_at": inviteExpires,
})
}
+func maskSecretForResponse(value string) string {
+ if strings.TrimSpace(value) == "" {
+ return ""
+ }
+
+ return "********"
+}
+
+func redactInviteURL(inviteURL string) string {
+ if strings.TrimSpace(inviteURL) == "" {
+ return ""
+ }
+
+ return "[REDACTED]"
+}
+
// UpdateUserPermissions updates a user's permission mode and host exceptions (admin only).
func (h *UserHandler) UpdateUserPermissions(c *gin.Context) {
role, _ := c.Get("role")
diff --git a/backend/internal/api/handlers/user_handler_test.go b/backend/internal/api/handlers/user_handler_test.go
index 49b53995..bdcb24b7 100644
--- a/backend/internal/api/handlers/user_handler_test.go
+++ b/backend/internal/api/handlers/user_handler_test.go
@@ -3,9 +3,11 @@ package handlers
import (
"bytes"
"encoding/json"
+ "errors"
"net/http"
"net/http/httptest"
"strconv"
+ "sync"
"testing"
"time"
@@ -15,15 +17,11 @@ import (
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gorm.io/driver/sqlite"
"gorm.io/gorm"
)
func setupUserHandler(t *testing.T) (*UserHandler, *gorm.DB) {
- // Use unique DB for each test to avoid pollution
- dbName := "file:" + t.Name() + "?mode=memory&cache=shared"
- db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
- require.NoError(t, err)
+ db := OpenTestDB(t)
_ = db.AutoMigrate(&models.User{}, &models.Setting{}, &models.SecurityAudit{})
return NewUserHandler(db), db
}
@@ -131,6 +129,224 @@ func TestUserHandler_Setup(t *testing.T) {
assert.Equal(t, http.StatusForbidden, w.Code)
}
+func TestUserHandler_Setup_OneWayInvariant_ReentryRejectedAndSingleUser(t *testing.T) {
+ handler, db := setupUserHandler(t)
+ gin.SetMode(gin.TestMode)
+ r := gin.New()
+ r.POST("/setup", handler.Setup)
+
+ initialBody := map[string]string{
+ "name": "Admin",
+ "email": "admin@example.com",
+ "password": "password123",
+ }
+ initialJSON, _ := json.Marshal(initialBody)
+
+ firstReq := httptest.NewRequest(http.MethodPost, "/setup", bytes.NewBuffer(initialJSON))
+ firstReq.Header.Set("Content-Type", "application/json")
+ firstResp := httptest.NewRecorder()
+ r.ServeHTTP(firstResp, firstReq)
+ require.Equal(t, http.StatusCreated, firstResp.Code)
+
+ secondBody := map[string]string{
+ "name": "Different Admin",
+ "email": "different@example.com",
+ "password": "password123",
+ }
+ secondJSON, _ := json.Marshal(secondBody)
+ secondReq := httptest.NewRequest(http.MethodPost, "/setup", bytes.NewBuffer(secondJSON))
+ secondReq.Header.Set("Content-Type", "application/json")
+ secondResp := httptest.NewRecorder()
+ r.ServeHTTP(secondResp, secondReq)
+
+ require.Equal(t, http.StatusForbidden, secondResp.Code)
+
+ var userCount int64
+ require.NoError(t, db.Model(&models.User{}).Count(&userCount).Error)
+ assert.Equal(t, int64(1), userCount)
+}
+
+func TestUserHandler_Setup_ConcurrentAttemptInvariant(t *testing.T) {
+ handler, db := setupUserHandler(t)
+ gin.SetMode(gin.TestMode)
+ r := gin.New()
+ r.POST("/setup", handler.Setup)
+
+ concurrency := 6
+ start := make(chan struct{})
+ statuses := make(chan int, concurrency)
+
+ var wg sync.WaitGroup
+ for i := 0; i < concurrency; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ <-start
+
+ body := map[string]string{
+ "name": "Admin",
+ "email": "admin@example.com",
+ "password": "password123",
+ }
+ jsonBody, _ := json.Marshal(body)
+
+ req := httptest.NewRequest(http.MethodPost, "/setup", bytes.NewBuffer(jsonBody))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ r.ServeHTTP(resp, req)
+ statuses <- resp.Code
+ }()
+ }
+
+ close(start)
+ wg.Wait()
+ close(statuses)
+
+ createdCount := 0
+ forbiddenOrConflictCount := 0
+ for status := range statuses {
+ if status == http.StatusCreated {
+ createdCount++
+ continue
+ }
+
+ if status == http.StatusForbidden || status == http.StatusConflict {
+ forbiddenOrConflictCount++
+ continue
+ }
+
+ t.Fatalf("unexpected setup concurrency status: %d", status)
+ }
+
+ assert.Equal(t, 1, createdCount)
+ assert.Equal(t, concurrency-1, forbiddenOrConflictCount)
+
+ var userCount int64
+ require.NoError(t, db.Model(&models.User{}).Count(&userCount).Error)
+ assert.Equal(t, int64(1), userCount)
+}
+
+func TestUserHandler_Setup_ResponseSecretEchoContract(t *testing.T) {
+ handler, _ := setupUserHandler(t)
+ gin.SetMode(gin.TestMode)
+ r := gin.New()
+ r.POST("/setup", handler.Setup)
+
+ body := map[string]string{
+ "name": "Admin",
+ "email": "admin@example.com",
+ "password": "password123",
+ }
+ jsonBody, _ := json.Marshal(body)
+
+ req := httptest.NewRequest(http.MethodPost, "/setup", bytes.NewBuffer(jsonBody))
+ req.Header.Set("Content-Type", "application/json")
+ resp := httptest.NewRecorder()
+ r.ServeHTTP(resp, req)
+ require.Equal(t, http.StatusCreated, resp.Code)
+
+ var payload map[string]any
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &payload))
+
+ userValue, ok := payload["user"]
+ require.True(t, ok)
+ userMap, ok := userValue.(map[string]any)
+ require.True(t, ok)
+
+ _, hasAPIKey := userMap["api_key"]
+ _, hasPassword := userMap["password"]
+ _, hasPasswordHash := userMap["password_hash"]
+ _, hasInviteToken := userMap["invite_token"]
+
+ assert.False(t, hasAPIKey)
+ assert.False(t, hasPassword)
+ assert.False(t, hasPasswordHash)
+ assert.False(t, hasInviteToken)
+}
+
+func TestUserHandler_GetProfile_SecretEchoContract(t *testing.T) {
+ handler, db := setupUserHandler(t)
+
+ user := &models.User{
+ UUID: uuid.NewString(),
+ Email: "profile@example.com",
+ Name: "Profile User",
+ APIKey: "real-secret-api-key",
+ InviteToken: "invite-secret-token",
+ PasswordHash: "hashed-password-value",
+ }
+ require.NoError(t, db.Create(user).Error)
+
+ gin.SetMode(gin.TestMode)
+ r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("userID", user.ID)
+ c.Next()
+ })
+ r.GET("/profile", handler.GetProfile)
+
+ req := httptest.NewRequest(http.MethodGet, "/profile", http.NoBody)
+ resp := httptest.NewRecorder()
+ r.ServeHTTP(resp, req)
+
+ require.Equal(t, http.StatusOK, resp.Code)
+ var payload map[string]any
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &payload))
+
+ _, hasAPIKey := payload["api_key"]
+ _, hasPassword := payload["password"]
+ _, hasPasswordHash := payload["password_hash"]
+ _, hasInviteToken := payload["invite_token"]
+
+ assert.False(t, hasAPIKey)
+ assert.False(t, hasPassword)
+ assert.False(t, hasPasswordHash)
+ assert.False(t, hasInviteToken)
+ assert.Equal(t, "********", payload["api_key_masked"])
+}
+
+func TestUserHandler_ListUsers_SecretEchoContract(t *testing.T) {
+ handler, db := setupUserHandlerWithProxyHosts(t)
+
+ user := &models.User{
+ UUID: uuid.NewString(),
+ Email: "user@example.com",
+ Name: "User",
+ Role: "user",
+ APIKey: "raw-api-key",
+ InviteToken: "raw-invite-token",
+ PasswordHash: "raw-password-hash",
+ }
+ require.NoError(t, db.Create(user).Error)
+
+ gin.SetMode(gin.TestMode)
+ r := gin.New()
+ r.Use(func(c *gin.Context) {
+ c.Set("role", "admin")
+ c.Next()
+ })
+ r.GET("/users", handler.ListUsers)
+
+ req := httptest.NewRequest(http.MethodGet, "/users", http.NoBody)
+ resp := httptest.NewRecorder()
+ r.ServeHTTP(resp, req)
+
+ require.Equal(t, http.StatusOK, resp.Code)
+ var users []map[string]any
+ require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &users))
+ require.Len(t, users, 1)
+
+ _, hasAPIKey := users[0]["api_key"]
+ _, hasPassword := users[0]["password"]
+ _, hasPasswordHash := users[0]["password_hash"]
+ _, hasInviteToken := users[0]["invite_token"]
+
+ assert.False(t, hasAPIKey)
+ assert.False(t, hasPassword)
+ assert.False(t, hasPasswordHash)
+ assert.False(t, hasInviteToken)
+}
+
func TestUserHandler_Setup_DBError(t *testing.T) {
// Can't easily mock DB error with sqlite memory unless we close it or something.
// But we can try to insert duplicate email if we had a unique constraint and pre-seeded data,
@@ -162,15 +378,16 @@ func TestUserHandler_RegenerateAPIKey(t *testing.T) {
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
- var resp map[string]string
+ var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["api_key"])
+ assert.Equal(t, "API key regenerated successfully", resp["message"])
+ assert.Equal(t, "********", resp["api_key_masked"])
// Verify DB
var updatedUser models.User
db.First(&updatedUser, user.ID)
- assert.Equal(t, resp["api_key"], updatedUser.APIKey)
+ assert.NotEmpty(t, updatedUser.APIKey)
}
func TestUserHandler_GetProfile(t *testing.T) {
@@ -442,9 +659,7 @@ func TestUserHandler_UpdateProfile_Errors(t *testing.T) {
// ============= User Management Tests (Admin functions) =============
func setupUserHandlerWithProxyHosts(t *testing.T) (*UserHandler, *gorm.DB) {
- dbName := "file:" + t.Name() + "?mode=memory&cache=shared"
- db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
- require.NoError(t, err)
+ db := OpenTestDB(t)
_ = db.AutoMigrate(&models.User{}, &models.Setting{}, &models.ProxyHost{}, &models.SecurityAudit{})
return NewUserHandler(db), db
}
@@ -1376,7 +1591,7 @@ func TestUserHandler_InviteUser_Success(t *testing.T) {
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
assert.Equal(t, "", resp["invite_url"])
// email_sent is false because no SMTP is configured
assert.Equal(t, false, resp["email_sent"].(bool))
@@ -1500,7 +1715,7 @@ func TestUserHandler_InviteUser_WithSMTPConfigured(t *testing.T) {
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
assert.Equal(t, "", resp["invite_url"])
assert.Equal(t, false, resp["email_sent"].(bool))
}
@@ -1553,8 +1768,8 @@ func TestUserHandler_InviteUser_WithSMTPAndConfiguredPublicURL_IncludesInviteURL
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- token := resp["invite_token"].(string)
- assert.Equal(t, "https://charon.example.com/accept-invite?token="+token, resp["invite_url"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
+ assert.Equal(t, "[REDACTED]", resp["invite_url"])
assert.Equal(t, true, resp["email_sent"].(bool))
}
@@ -1606,7 +1821,7 @@ func TestUserHandler_InviteUser_WithSMTPAndMalformedPublicURL_DoesNotExposeInvit
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
assert.Equal(t, "", resp["invite_url"])
assert.Equal(t, false, resp["email_sent"].(bool))
}
@@ -1668,7 +1883,7 @@ func TestUserHandler_InviteUser_WithSMTPConfigured_DefaultAppName(t *testing.T)
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
}
// Note: TestGetBaseURL and TestGetAppName have been removed as these internal helper
@@ -2372,8 +2587,7 @@ func TestResendInvite_Success(t *testing.T) {
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
- assert.NotEqual(t, "oldtoken123", resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
assert.Equal(t, "pending-user@example.com", resp["email"])
assert.Equal(t, false, resp["email_sent"].(bool)) // No SMTP configured
@@ -2381,7 +2595,7 @@ func TestResendInvite_Success(t *testing.T) {
var updatedUser models.User
db.First(&updatedUser, user.ID)
assert.NotEqual(t, "oldtoken123", updatedUser.InviteToken)
- assert.Equal(t, resp["invite_token"], updatedUser.InviteToken)
+ assert.NotEmpty(t, updatedUser.InviteToken)
}
func TestResendInvite_WithExpiredInvite(t *testing.T) {
@@ -2419,11 +2633,75 @@ func TestResendInvite_WithExpiredInvite(t *testing.T) {
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err, "Failed to unmarshal response")
- assert.NotEmpty(t, resp["invite_token"])
- assert.NotEqual(t, "expiredtoken", resp["invite_token"])
+ assert.Equal(t, "********", resp["invite_token_masked"])
// Verify new expiration is in the future
var updatedUser models.User
db.First(&updatedUser, user.ID)
assert.True(t, updatedUser.InviteExpires.After(time.Now()))
}
+
+// ===== Additional coverage for uncovered utility functions =====
+
+func TestIsSetupConflictError(t *testing.T) {
+ tests := []struct {
+ name string
+ err error
+ expected bool
+ }{
+ {"nil error", nil, false},
+ {"unique constraint failed", errors.New("UNIQUE constraint failed: users.email"), true},
+ {"duplicate key", errors.New("duplicate key value violates unique constraint"), true},
+ {"database is locked", errors.New("database is locked"), true},
+ {"database table is locked", errors.New("database table is locked"), true},
+ {"case insensitive", errors.New("UNIQUE CONSTRAINT FAILED"), true},
+ {"unrelated error", errors.New("connection refused"), false},
+ {"empty error", errors.New(""), false},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := isSetupConflictError(tt.err)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
+
+func TestMaskSecretForResponse(t *testing.T) {
+ tests := []struct {
+ name string
+ input string
+ expected string
+ }{
+ {"non-empty secret", "my-secret-key", "********"},
+ {"empty string", "", ""},
+ {"whitespace only", " ", ""},
+ {"single char", "x", "********"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := maskSecretForResponse(tt.input)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
+
+func TestRedactInviteURL(t *testing.T) {
+ tests := []struct {
+ name string
+ input string
+ expected string
+ }{
+ {"non-empty url", "https://example.com/invite/abc123", "[REDACTED]"},
+ {"empty string", "", ""},
+ {"whitespace only", " ", ""},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := redactInviteURL(tt.input)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
diff --git a/backend/internal/api/routes/routes.go b/backend/internal/api/routes/routes.go
index 3cb79109..2533036d 100644
--- a/backend/internal/api/routes/routes.go
+++ b/backend/internal/api/routes/routes.go
@@ -29,6 +29,29 @@ import (
_ "github.com/Wikid82/charon/backend/pkg/dnsprovider/custom"
)
+type uptimeBootstrapService interface {
+ CleanupStaleFailureCounts() error
+ SyncMonitors() error
+ CheckAll()
+}
+
+func runInitialUptimeBootstrap(enabled bool, uptimeService uptimeBootstrapService, logWarn func(error, string), logError func(error, string)) {
+ if !enabled {
+ return
+ }
+
+ if err := uptimeService.CleanupStaleFailureCounts(); err != nil && logWarn != nil {
+ logWarn(err, "Failed to cleanup stale failure counts")
+ }
+
+ if err := uptimeService.SyncMonitors(); err != nil && logError != nil {
+ logError(err, "Failed to sync monitors")
+ }
+
+ // Run initial check immediately after sync to avoid the 90s blind window.
+ uptimeService.CheckAll()
+}
+
// Register wires up API routes and performs automatic migrations.
func Register(router *gin.Engine, db *gorm.DB, cfg config.Config) error {
// Caddy Manager - created early so it can be used by settings handlers for config reload
@@ -277,7 +300,7 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
protected.PATCH("/config", settingsHandler.PatchConfig) // Bulk configuration update
// SMTP Configuration
- protected.GET("/settings/smtp", settingsHandler.GetSMTPConfig)
+ protected.GET("/settings/smtp", middleware.RequireRole("admin"), settingsHandler.GetSMTPConfig)
protected.POST("/settings/smtp", settingsHandler.UpdateSMTPConfig)
protected.POST("/settings/smtp/test", settingsHandler.TestSMTPConfig)
protected.POST("/settings/smtp/test-email", settingsHandler.SendTestEmail)
@@ -410,9 +433,10 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
dockerHandler := handlers.NewDockerHandler(dockerService, remoteServerService)
dockerHandler.RegisterRoutes(protected)
- // Uptime Service
- uptimeSvc := services.NewUptimeService(db, notificationService)
- uptimeHandler := handlers.NewUptimeHandler(uptimeSvc)
+ // Uptime Service — reuse the single uptimeService instance (defined above)
+ // to share in-memory state (mutexes, notification batching) between
+ // background checker, ProxyHostHandler, and API handlers.
+ uptimeHandler := handlers.NewUptimeHandler(uptimeService)
protected.GET("/uptime/monitors", uptimeHandler.List)
protected.POST("/uptime/monitors", uptimeHandler.Create)
protected.GET("/uptime/monitors/:id/history", uptimeHandler.GetHistory)
@@ -463,11 +487,12 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
enabled = s.Value == "true"
}
- if enabled {
- if err := uptimeService.SyncMonitors(); err != nil {
- logger.Log().WithError(err).Error("Failed to sync monitors")
- }
- }
+ runInitialUptimeBootstrap(
+ enabled,
+ uptimeService,
+ func(err error, msg string) { logger.Log().WithError(err).Warn(msg) },
+ func(err error, msg string) { logger.Log().WithError(err).Error(msg) },
+ )
ticker := time.NewTicker(1 * time.Minute)
for range ticker.C {
@@ -520,40 +545,43 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
protected.GET("/security/status", securityHandler.GetStatus)
// Security Config management
protected.GET("/security/config", securityHandler.GetConfig)
- protected.POST("/security/config", securityHandler.UpdateConfig)
- protected.POST("/security/enable", securityHandler.Enable)
- protected.POST("/security/disable", securityHandler.Disable)
- protected.POST("/security/breakglass/generate", securityHandler.GenerateBreakGlass)
protected.GET("/security/decisions", securityHandler.ListDecisions)
- protected.POST("/security/decisions", securityHandler.CreateDecision)
protected.GET("/security/rulesets", securityHandler.ListRuleSets)
- protected.POST("/security/rulesets", securityHandler.UpsertRuleSet)
- protected.DELETE("/security/rulesets/:id", securityHandler.DeleteRuleSet)
protected.GET("/security/rate-limit/presets", securityHandler.GetRateLimitPresets)
// GeoIP endpoints
protected.GET("/security/geoip/status", securityHandler.GetGeoIPStatus)
- protected.POST("/security/geoip/reload", securityHandler.ReloadGeoIP)
- protected.POST("/security/geoip/lookup", securityHandler.LookupGeoIP)
// WAF exclusion endpoints
protected.GET("/security/waf/exclusions", securityHandler.GetWAFExclusions)
- protected.POST("/security/waf/exclusions", securityHandler.AddWAFExclusion)
- protected.DELETE("/security/waf/exclusions/:rule_id", securityHandler.DeleteWAFExclusion)
+
+ securityAdmin := protected.Group("/security")
+ securityAdmin.Use(middleware.RequireRole("admin"))
+ securityAdmin.POST("/config", securityHandler.UpdateConfig)
+ securityAdmin.POST("/enable", securityHandler.Enable)
+ securityAdmin.POST("/disable", securityHandler.Disable)
+ securityAdmin.POST("/breakglass/generate", securityHandler.GenerateBreakGlass)
+ securityAdmin.POST("/decisions", securityHandler.CreateDecision)
+ securityAdmin.POST("/rulesets", securityHandler.UpsertRuleSet)
+ securityAdmin.DELETE("/rulesets/:id", securityHandler.DeleteRuleSet)
+ securityAdmin.POST("/geoip/reload", securityHandler.ReloadGeoIP)
+ securityAdmin.POST("/geoip/lookup", securityHandler.LookupGeoIP)
+ securityAdmin.POST("/waf/exclusions", securityHandler.AddWAFExclusion)
+ securityAdmin.DELETE("/waf/exclusions/:rule_id", securityHandler.DeleteWAFExclusion)
// Security module enable/disable endpoints (granular control)
- protected.POST("/security/acl/enable", securityHandler.EnableACL)
- protected.POST("/security/acl/disable", securityHandler.DisableACL)
- protected.PATCH("/security/acl", securityHandler.PatchACL) // E2E tests use PATCH
- protected.POST("/security/waf/enable", securityHandler.EnableWAF)
- protected.POST("/security/waf/disable", securityHandler.DisableWAF)
- protected.PATCH("/security/waf", securityHandler.PatchWAF) // E2E tests use PATCH
- protected.POST("/security/cerberus/enable", securityHandler.EnableCerberus)
- protected.POST("/security/cerberus/disable", securityHandler.DisableCerberus)
- protected.POST("/security/crowdsec/enable", securityHandler.EnableCrowdSec)
- protected.POST("/security/crowdsec/disable", securityHandler.DisableCrowdSec)
- protected.PATCH("/security/crowdsec", securityHandler.PatchCrowdSec) // E2E tests use PATCH
- protected.POST("/security/rate-limit/enable", securityHandler.EnableRateLimit)
- protected.POST("/security/rate-limit/disable", securityHandler.DisableRateLimit)
- protected.PATCH("/security/rate-limit", securityHandler.PatchRateLimit) // E2E tests use PATCH
+ securityAdmin.POST("/acl/enable", securityHandler.EnableACL)
+ securityAdmin.POST("/acl/disable", securityHandler.DisableACL)
+ securityAdmin.PATCH("/acl", securityHandler.PatchACL) // E2E tests use PATCH
+ securityAdmin.POST("/waf/enable", securityHandler.EnableWAF)
+ securityAdmin.POST("/waf/disable", securityHandler.DisableWAF)
+ securityAdmin.PATCH("/waf", securityHandler.PatchWAF) // E2E tests use PATCH
+ securityAdmin.POST("/cerberus/enable", securityHandler.EnableCerberus)
+ securityAdmin.POST("/cerberus/disable", securityHandler.DisableCerberus)
+ securityAdmin.POST("/crowdsec/enable", securityHandler.EnableCrowdSec)
+ securityAdmin.POST("/crowdsec/disable", securityHandler.DisableCrowdSec)
+ securityAdmin.PATCH("/crowdsec", securityHandler.PatchCrowdSec) // E2E tests use PATCH
+ securityAdmin.POST("/rate-limit/enable", securityHandler.EnableRateLimit)
+ securityAdmin.POST("/rate-limit/disable", securityHandler.DisableRateLimit)
+ securityAdmin.PATCH("/rate-limit", securityHandler.PatchRateLimit) // E2E tests use PATCH
// CrowdSec process management and import
// Data dir for crowdsec (persisted on host via volumes)
@@ -635,7 +663,7 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
proxyHostHandler.RegisterRoutes(protected)
remoteServerHandler := handlers.NewRemoteServerHandler(remoteServerService, notificationService)
- remoteServerHandler.RegisterRoutes(api)
+ remoteServerHandler.RegisterRoutes(protected)
// Initial Caddy Config Sync
go func() {
@@ -674,17 +702,20 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM
}
// RegisterImportHandler wires up import routes with config dependencies.
-func RegisterImportHandler(router *gin.Engine, db *gorm.DB, caddyBinary, importDir, mountPath string) {
+func RegisterImportHandler(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyBinary, importDir, mountPath string) {
securityService := services.NewSecurityService(db)
importHandler := handlers.NewImportHandlerWithDeps(db, caddyBinary, importDir, mountPath, securityService)
api := router.Group("/api/v1")
- importHandler.RegisterRoutes(api)
+ authService := services.NewAuthService(db, cfg)
+ authenticatedAdmin := api.Group("/")
+ authenticatedAdmin.Use(middleware.AuthMiddleware(authService), middleware.RequireRole("admin"))
+ importHandler.RegisterRoutes(authenticatedAdmin)
// NPM Import Handler - supports Nginx Proxy Manager export format
npmImportHandler := handlers.NewNPMImportHandler(db)
- npmImportHandler.RegisterRoutes(api)
+ npmImportHandler.RegisterRoutes(authenticatedAdmin)
// JSON Import Handler - supports both Charon and NPM export formats
jsonImportHandler := handlers.NewJSONImportHandler(db)
- jsonImportHandler.RegisterRoutes(api)
+ jsonImportHandler.RegisterRoutes(authenticatedAdmin)
}
diff --git a/backend/internal/api/routes/routes_coverage_test.go b/backend/internal/api/routes/routes_coverage_test.go
index e5e11d82..57939ce7 100644
--- a/backend/internal/api/routes/routes_coverage_test.go
+++ b/backend/internal/api/routes/routes_coverage_test.go
@@ -73,3 +73,55 @@ func TestRegister_LegacyMigrationErrorIsNonFatal(t *testing.T) {
}
require.True(t, hasHealth)
}
+
+func TestRegister_UptimeFeatureFlagDefaultErrorIsNonFatal(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_uptime_flag_warn"), &gorm.Config{
+ Logger: logger.Default.LogMode(logger.Silent),
+ })
+ require.NoError(t, err)
+
+ const cbName = "routes:test_force_settings_query_error"
+ err = db.Callback().Query().Before("gorm:query").Register(cbName, func(tx *gorm.DB) {
+ if tx.Statement != nil && tx.Statement.Table == "settings" {
+ _ = tx.AddError(errors.New("forced settings query failure"))
+ }
+ })
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ _ = db.Callback().Query().Remove(cbName)
+ })
+
+ cfg := config.Config{JWTSecret: "test-secret"}
+
+ err = Register(router, db, cfg)
+ require.NoError(t, err)
+}
+
+func TestRegister_SecurityHeaderPresetInitErrorIsNonFatal(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_sec_header_presets_warn"), &gorm.Config{
+ Logger: logger.Default.LogMode(logger.Silent),
+ })
+ require.NoError(t, err)
+
+ const cbName = "routes:test_force_security_header_profile_query_error"
+ err = db.Callback().Query().Before("gorm:query").Register(cbName, func(tx *gorm.DB) {
+ if tx.Statement != nil && tx.Statement.Table == "security_header_profiles" {
+ _ = tx.AddError(errors.New("forced security_header_profiles query failure"))
+ }
+ })
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ _ = db.Callback().Query().Remove(cbName)
+ })
+
+ cfg := config.Config{JWTSecret: "test-secret"}
+
+ err = Register(router, db, cfg)
+ require.NoError(t, err)
+}
diff --git a/backend/internal/api/routes/routes_import_test.go b/backend/internal/api/routes/routes_import_test.go
index 0e8707b1..84a0010f 100644
--- a/backend/internal/api/routes/routes_import_test.go
+++ b/backend/internal/api/routes/routes_import_test.go
@@ -1,15 +1,20 @@
package routes_test
import (
+ "net/http"
+ "net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/api/routes"
+ "github.com/Wikid82/charon/backend/internal/config"
"github.com/Wikid82/charon/backend/internal/models"
+ "github.com/Wikid82/charon/backend/internal/services"
)
func setupTestImportDB(t *testing.T) *gorm.DB {
@@ -27,7 +32,7 @@ func TestRegisterImportHandler(t *testing.T) {
db := setupTestImportDB(t)
router := gin.New()
- routes.RegisterImportHandler(router, db, "echo", "/tmp", "/import/Caddyfile")
+ routes.RegisterImportHandler(router, db, config.Config{JWTSecret: "test-secret"}, "echo", "/tmp", "/import/Caddyfile")
// Verify routes are registered by checking the routes list
routeInfo := router.Routes()
@@ -53,3 +58,30 @@ func TestRegisterImportHandler(t *testing.T) {
assert.True(t, found, "route %s should be registered", route)
}
}
+
+func TestRegisterImportHandler_AuthzGuards(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ db := setupTestImportDB(t)
+ require.NoError(t, db.AutoMigrate(&models.User{}))
+
+ cfg := config.Config{JWTSecret: "test-secret"}
+ router := gin.New()
+ routes.RegisterImportHandler(router, db, cfg, "echo", "/tmp", "/import/Caddyfile")
+
+ unauthReq := httptest.NewRequest(http.MethodGet, "/api/v1/import/status", http.NoBody)
+ unauthW := httptest.NewRecorder()
+ router.ServeHTTP(unauthW, unauthReq)
+ assert.Equal(t, http.StatusUnauthorized, unauthW.Code)
+
+ nonAdmin := &models.User{Email: "user@example.com", Role: "user", Enabled: true}
+ require.NoError(t, db.Create(nonAdmin).Error)
+ authSvc := services.NewAuthService(db, cfg)
+ token, err := authSvc.GenerateToken(nonAdmin)
+ require.NoError(t, err)
+
+ nonAdminReq := httptest.NewRequest(http.MethodGet, "/api/v1/import/preview", http.NoBody)
+ nonAdminReq.Header.Set("Authorization", "Bearer "+token)
+ nonAdminW := httptest.NewRecorder()
+ router.ServeHTTP(nonAdminW, nonAdminReq)
+ assert.Equal(t, http.StatusForbidden, nonAdminW.Code)
+}
diff --git a/backend/internal/api/routes/routes_test.go b/backend/internal/api/routes/routes_test.go
index ebcd8769..d5fcf600 100644
--- a/backend/internal/api/routes/routes_test.go
+++ b/backend/internal/api/routes/routes_test.go
@@ -1,6 +1,7 @@
package routes
import (
+ "io"
"net/http"
"net/http/httptest"
"os"
@@ -16,6 +17,16 @@ import (
"gorm.io/gorm"
)
+func materializeRoutePath(path string) string {
+ segments := strings.Split(path, "/")
+ for i, segment := range segments {
+ if strings.HasPrefix(segment, ":") {
+ segments[i] = "1"
+ }
+ }
+ return strings.Join(segments, "/")
+}
+
func TestRegister(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
@@ -103,11 +114,13 @@ func TestRegisterImportHandler(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
+ cfg := config.Config{JWTSecret: "test-secret"}
+
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_import"), &gorm.Config{})
require.NoError(t, err)
// RegisterImportHandler should not panic
- RegisterImportHandler(router, db, "/usr/bin/caddy", "/tmp/imports", "/tmp/mount")
+ RegisterImportHandler(router, db, cfg, "/usr/bin/caddy", "/tmp/imports", "/tmp/mount")
// Verify import routes exist
routes := router.Routes()
@@ -177,6 +190,70 @@ func TestRegister_ProxyHostsRequireAuth(t *testing.T) {
assert.Contains(t, w.Body.String(), "Authorization header required")
}
+func TestRegister_StateChangingRoutesDenyByDefaultWithExplicitAllowlist(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_mutation_auth_guard"), &gorm.Config{})
+ require.NoError(t, err)
+
+ cfg := config.Config{JWTSecret: "test-secret"}
+ require.NoError(t, Register(router, db, cfg))
+
+ mutatingMethods := map[string]bool{
+ http.MethodPost: true,
+ http.MethodPut: true,
+ http.MethodPatch: true,
+ http.MethodDelete: true,
+ }
+
+ publicMutationAllowlist := map[string]bool{
+ http.MethodPost + " /api/v1/auth/login": true,
+ http.MethodPost + " /api/v1/auth/register": true,
+ http.MethodPost + " /api/v1/setup": true,
+ http.MethodPost + " /api/v1/invite/accept": true,
+ http.MethodPost + " /api/v1/security/events": true,
+ http.MethodPost + " /api/v1/emergency/security-reset": true,
+ }
+
+ for _, route := range router.Routes() {
+ if !strings.HasPrefix(route.Path, "/api/v1/") {
+ continue
+ }
+ if !mutatingMethods[route.Method] {
+ continue
+ }
+
+ key := route.Method + " " + route.Path
+ if publicMutationAllowlist[key] {
+ continue
+ }
+
+ requestPath := materializeRoutePath(route.Path)
+ var body io.Reader = http.NoBody
+ if route.Method == http.MethodPost || route.Method == http.MethodPut || route.Method == http.MethodPatch {
+ body = strings.NewReader("{}")
+ }
+
+ req := httptest.NewRequest(route.Method, requestPath, body)
+ if route.Method == http.MethodPost || route.Method == http.MethodPut || route.Method == http.MethodPatch {
+ req.Header.Set("Content-Type", "application/json")
+ }
+
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ assert.Contains(
+ t,
+ []int{http.StatusUnauthorized, http.StatusForbidden},
+ w.Code,
+ "state-changing endpoint must deny unauthenticated access unless explicitly allowlisted: %s (materialized path: %s)",
+ key,
+ requestPath,
+ )
+ }
+}
+
func TestRegister_DNSProviders_NotRegisteredWhenEncryptionKeyMissing(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
@@ -362,6 +439,42 @@ func TestRegister_AuthenticatedRoutes(t *testing.T) {
}
}
+func TestRegister_StateChangingRoutesRequireAuthentication(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ router := gin.New()
+
+ db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_mutating_auth_routes"), &gorm.Config{})
+ require.NoError(t, err)
+
+ cfg := config.Config{JWTSecret: "test-secret"}
+ require.NoError(t, Register(router, db, cfg))
+
+ stateChangingPaths := []struct {
+ method string
+ path string
+ }{
+ {http.MethodPost, "/api/v1/backups"},
+ {http.MethodPost, "/api/v1/settings"},
+ {http.MethodPatch, "/api/v1/settings"},
+ {http.MethodPatch, "/api/v1/config"},
+ {http.MethodPost, "/api/v1/user/profile"},
+ {http.MethodPost, "/api/v1/remote-servers"},
+ {http.MethodPost, "/api/v1/remote-servers/test"},
+ {http.MethodPut, "/api/v1/remote-servers/1"},
+ {http.MethodDelete, "/api/v1/remote-servers/1"},
+ {http.MethodPost, "/api/v1/remote-servers/1/test"},
+ }
+
+ for _, tc := range stateChangingPaths {
+ t.Run(tc.method+"_"+tc.path, func(t *testing.T) {
+ w := httptest.NewRecorder()
+ req := httptest.NewRequest(tc.method, tc.path, nil)
+ router.ServeHTTP(w, req)
+ assert.Equal(t, http.StatusUnauthorized, w.Code, "State-changing route %s %s should require auth", tc.method, tc.path)
+ })
+ }
+}
+
func TestRegister_AdminRoutes(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
@@ -915,10 +1028,12 @@ func TestRegisterImportHandler_RoutesExist(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
+ cfg := config.Config{JWTSecret: "test-secret"}
+
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_import_routes"), &gorm.Config{})
require.NoError(t, err)
- RegisterImportHandler(router, db, "/usr/bin/caddy", "/tmp/imports", "/tmp/mount")
+ RegisterImportHandler(router, db, cfg, "/usr/bin/caddy", "/tmp/imports", "/tmp/mount")
routes := router.Routes()
routeMap := make(map[string]bool)
diff --git a/backend/internal/api/routes/routes_uptime_bootstrap_test.go b/backend/internal/api/routes/routes_uptime_bootstrap_test.go
new file mode 100644
index 00000000..ac03c221
--- /dev/null
+++ b/backend/internal/api/routes/routes_uptime_bootstrap_test.go
@@ -0,0 +1,107 @@
+package routes
+
+import (
+ "errors"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type testUptimeBootstrapService struct {
+ cleanupErr error
+ syncErr error
+
+ cleanupCalls int
+ syncCalls int
+ checkAllCalls int
+}
+
+func (s *testUptimeBootstrapService) CleanupStaleFailureCounts() error {
+ s.cleanupCalls++
+ return s.cleanupErr
+}
+
+func (s *testUptimeBootstrapService) SyncMonitors() error {
+ s.syncCalls++
+ return s.syncErr
+}
+
+func (s *testUptimeBootstrapService) CheckAll() {
+ s.checkAllCalls++
+}
+
+func TestRunInitialUptimeBootstrap_Disabled_DoesNothing(t *testing.T) {
+ svc := &testUptimeBootstrapService{}
+
+ warnLogs := 0
+ errorLogs := 0
+ runInitialUptimeBootstrap(
+ false,
+ svc,
+ func(err error, msg string) { warnLogs++ },
+ func(err error, msg string) { errorLogs++ },
+ )
+
+ assert.Equal(t, 0, svc.cleanupCalls)
+ assert.Equal(t, 0, svc.syncCalls)
+ assert.Equal(t, 0, svc.checkAllCalls)
+ assert.Equal(t, 0, warnLogs)
+ assert.Equal(t, 0, errorLogs)
+}
+
+func TestRunInitialUptimeBootstrap_Enabled_HappyPath(t *testing.T) {
+ svc := &testUptimeBootstrapService{}
+
+ warnLogs := 0
+ errorLogs := 0
+ runInitialUptimeBootstrap(
+ true,
+ svc,
+ func(err error, msg string) { warnLogs++ },
+ func(err error, msg string) { errorLogs++ },
+ )
+
+ assert.Equal(t, 1, svc.cleanupCalls)
+ assert.Equal(t, 1, svc.syncCalls)
+ assert.Equal(t, 1, svc.checkAllCalls)
+ assert.Equal(t, 0, warnLogs)
+ assert.Equal(t, 0, errorLogs)
+}
+
+func TestRunInitialUptimeBootstrap_Enabled_CleanupError_StillProceeds(t *testing.T) {
+ svc := &testUptimeBootstrapService{cleanupErr: errors.New("cleanup failed")}
+
+ warnLogs := 0
+ errorLogs := 0
+ runInitialUptimeBootstrap(
+ true,
+ svc,
+ func(err error, msg string) { warnLogs++ },
+ func(err error, msg string) { errorLogs++ },
+ )
+
+ assert.Equal(t, 1, svc.cleanupCalls)
+ assert.Equal(t, 1, svc.syncCalls)
+ assert.Equal(t, 1, svc.checkAllCalls)
+ assert.Equal(t, 1, warnLogs)
+ assert.Equal(t, 0, errorLogs)
+}
+
+func TestRunInitialUptimeBootstrap_Enabled_SyncError_StillChecksAll(t *testing.T) {
+ svc := &testUptimeBootstrapService{syncErr: errors.New("sync failed")}
+
+ warnLogs := 0
+ errorLogs := 0
+ runInitialUptimeBootstrap(
+ true,
+ svc,
+ func(err error, msg string) { warnLogs++ },
+ func(err error, msg string) { errorLogs++ },
+ )
+
+ assert.Equal(t, 1, svc.cleanupCalls)
+ assert.Equal(t, 1, svc.syncCalls)
+ assert.Equal(t, 1, svc.checkAllCalls)
+ assert.Equal(t, 0, warnLogs)
+ assert.Equal(t, 1, errorLogs)
+}
diff --git a/backend/internal/api/tests/user_smtp_audit_test.go b/backend/internal/api/tests/user_smtp_audit_test.go
index 381b4c66..f27b74a9 100644
--- a/backend/internal/api/tests/user_smtp_audit_test.go
+++ b/backend/internal/api/tests/user_smtp_audit_test.go
@@ -100,7 +100,10 @@ func TestInviteToken_MustBeUnguessable(t *testing.T) {
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
- token := resp["invite_token"].(string)
+ var invitedUser models.User
+ require.NoError(t, db.Where("email = ?", "user@test.com").First(&invitedUser).Error)
+ token := invitedUser.InviteToken
+ require.NotEmpty(t, token)
// Token MUST be at least 32 chars (64 hex = 32 bytes = 256 bits)
assert.GreaterOrEqual(t, len(token), 64, "Invite token must be at least 64 hex chars (256 bits)")
diff --git a/backend/internal/caddy/config.go b/backend/internal/caddy/config.go
index 60008607..63a8b893 100644
--- a/backend/internal/caddy/config.go
+++ b/backend/internal/caddy/config.go
@@ -857,6 +857,27 @@ func normalizeHeaderOps(headerOps map[string]any) {
}
}
+func applyOptionalServerKeepalive(conf *Config, keepaliveIdle string, keepaliveCount int) {
+ if conf == nil || conf.Apps.HTTP == nil || conf.Apps.HTTP.Servers == nil {
+ return
+ }
+
+ server, ok := conf.Apps.HTTP.Servers["charon_server"]
+ if !ok || server == nil {
+ return
+ }
+
+ idle := strings.TrimSpace(keepaliveIdle)
+ if idle != "" {
+ server.KeepaliveIdle = &idle
+ }
+
+ if keepaliveCount > 0 {
+ count := keepaliveCount
+ server.KeepaliveCount = &count
+ }
+}
+
// NormalizeAdvancedConfig traverses a parsed JSON advanced config (map or array)
// and normalizes any headers blocks so that header values are arrays of strings.
// It returns the modified config object which can be JSON marshaled again.
diff --git a/backend/internal/caddy/config_generate_test.go b/backend/internal/caddy/config_generate_test.go
index d913f669..c3242f65 100644
--- a/backend/internal/caddy/config_generate_test.go
+++ b/backend/internal/caddy/config_generate_test.go
@@ -103,3 +103,43 @@ func TestGenerateConfig_EmergencyRoutesBypassSecurity(t *testing.T) {
require.NotEqual(t, "crowdsec", name)
}
}
+
+func TestApplyOptionalServerKeepalive_OmitsWhenUnset(t *testing.T) {
+ cfg := &Config{
+ Apps: Apps{
+ HTTP: &HTTPApp{Servers: map[string]*Server{
+ "charon_server": {
+ Listen: []string{":80", ":443"},
+ Routes: []*Route{},
+ },
+ }},
+ },
+ }
+
+ applyOptionalServerKeepalive(cfg, "", 0)
+
+ server := cfg.Apps.HTTP.Servers["charon_server"]
+ require.Nil(t, server.KeepaliveIdle)
+ require.Nil(t, server.KeepaliveCount)
+}
+
+func TestApplyOptionalServerKeepalive_AppliesValidValues(t *testing.T) {
+ cfg := &Config{
+ Apps: Apps{
+ HTTP: &HTTPApp{Servers: map[string]*Server{
+ "charon_server": {
+ Listen: []string{":80", ":443"},
+ Routes: []*Route{},
+ },
+ }},
+ },
+ }
+
+ applyOptionalServerKeepalive(cfg, "45s", 7)
+
+ server := cfg.Apps.HTTP.Servers["charon_server"]
+ require.NotNil(t, server.KeepaliveIdle)
+ require.Equal(t, "45s", *server.KeepaliveIdle)
+ require.NotNil(t, server.KeepaliveCount)
+ require.Equal(t, 7, *server.KeepaliveCount)
+}
diff --git a/backend/internal/caddy/manager.go b/backend/internal/caddy/manager.go
index 01cf5447..c2cfab9d 100644
--- a/backend/internal/caddy/manager.go
+++ b/backend/internal/caddy/manager.go
@@ -8,6 +8,7 @@ import (
"os"
"path/filepath"
"sort"
+ "strconv"
"strings"
"time"
@@ -33,6 +34,15 @@ var (
validateConfigFunc = Validate
)
+const (
+ minKeepaliveIdleDuration = time.Second
+ maxKeepaliveIdleDuration = 24 * time.Hour
+ minKeepaliveCount = 1
+ maxKeepaliveCount = 100
+ settingCaddyKeepaliveIdle = "caddy.keepalive_idle"
+ settingCaddyKeepaliveCnt = "caddy.keepalive_count"
+)
+
// DNSProviderConfig contains a DNS provider with its decrypted credentials
// for use in Caddy DNS challenge configuration generation
type DNSProviderConfig struct {
@@ -277,6 +287,18 @@ func (m *Manager) ApplyConfig(ctx context.Context) error {
// Compute effective security flags (re-read runtime overrides)
_, aclEnabled, wafEnabled, rateLimitEnabled, crowdsecEnabled := m.computeEffectiveFlags(ctx)
+ keepaliveIdle := ""
+ var keepaliveIdleSetting models.Setting
+ if err := m.db.Where("key = ?", settingCaddyKeepaliveIdle).First(&keepaliveIdleSetting).Error; err == nil {
+ keepaliveIdle = sanitizeKeepaliveIdle(keepaliveIdleSetting.Value)
+ }
+
+ keepaliveCount := 0
+ var keepaliveCountSetting models.Setting
+ if err := m.db.Where("key = ?", settingCaddyKeepaliveCnt).First(&keepaliveCountSetting).Error; err == nil {
+ keepaliveCount = sanitizeKeepaliveCount(keepaliveCountSetting.Value)
+ }
+
// Safety check: if Cerberus is enabled in DB and no admin whitelist configured,
// warn but allow initial startup to proceed. This prevents total lockout when
// the user has enabled Cerberus but hasn't configured admin_whitelist yet.
@@ -401,6 +423,8 @@ func (m *Manager) ApplyConfig(ctx context.Context) error {
return fmt.Errorf("generate config: %w", err)
}
+ applyOptionalServerKeepalive(generatedConfig, keepaliveIdle, keepaliveCount)
+
// Debug logging: WAF configuration state for troubleshooting integration issues
logger.Log().WithFields(map[string]any{
"waf_enabled": wafEnabled,
@@ -467,6 +491,42 @@ func (m *Manager) ApplyConfig(ctx context.Context) error {
return nil
}
+func sanitizeKeepaliveIdle(value string) string {
+ idle := strings.TrimSpace(value)
+ if idle == "" {
+ return ""
+ }
+
+ d, err := time.ParseDuration(idle)
+ if err != nil {
+ return ""
+ }
+
+ if d < minKeepaliveIdleDuration || d > maxKeepaliveIdleDuration {
+ return ""
+ }
+
+ return idle
+}
+
+func sanitizeKeepaliveCount(value string) int {
+ raw := strings.TrimSpace(value)
+ if raw == "" {
+ return 0
+ }
+
+ count, err := strconv.Atoi(raw)
+ if err != nil {
+ return 0
+ }
+
+ if count < minKeepaliveCount || count > maxKeepaliveCount {
+ return 0
+ }
+
+ return count
+}
+
// saveSnapshot stores the config to disk with timestamp.
func (m *Manager) saveSnapshot(conf *Config) (string, error) {
timestamp := time.Now().Unix()
diff --git a/backend/internal/caddy/manager_patch_coverage_test.go b/backend/internal/caddy/manager_patch_coverage_test.go
index d9fab970..5939b322 100644
--- a/backend/internal/caddy/manager_patch_coverage_test.go
+++ b/backend/internal/caddy/manager_patch_coverage_test.go
@@ -1,8 +1,10 @@
package caddy
import (
+ "bytes"
"context"
"encoding/base64"
+ "io"
"net/http"
"net/http/httptest"
"os"
@@ -185,3 +187,93 @@ func TestManagerApplyConfig_DNSProviders_SkipsDecryptOrJSONFailures(t *testing.T
require.Len(t, captured, 1)
require.Equal(t, uint(24), captured[0].ID)
}
+
+func TestManagerApplyConfig_MapsKeepaliveSettingsToGeneratedServer(t *testing.T) {
+ var loadBody []byte
+ caddyServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == "/load" && r.Method == http.MethodPost {
+ payload, _ := io.ReadAll(r.Body)
+ loadBody = append([]byte(nil), payload...)
+ w.WriteHeader(http.StatusOK)
+ return
+ }
+ w.WriteHeader(http.StatusNotFound)
+ }))
+ defer caddyServer.Close()
+
+ dsn := "file:" + t.Name() + "?mode=memory&cache=shared"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.ProxyHost{},
+ &models.Location{},
+ &models.Setting{},
+ &models.CaddyConfig{},
+ &models.SSLCertificate{},
+ &models.SecurityConfig{},
+ &models.SecurityRuleSet{},
+ &models.SecurityDecision{},
+ &models.DNSProvider{},
+ ))
+
+ db.Create(&models.ProxyHost{DomainNames: "keepalive.example.com", ForwardHost: "127.0.0.1", ForwardPort: 8080, Enabled: true})
+ db.Create(&models.SecurityConfig{Name: "default", Enabled: true})
+ db.Create(&models.Setting{Key: settingCaddyKeepaliveIdle, Value: "45s"})
+ db.Create(&models.Setting{Key: settingCaddyKeepaliveCnt, Value: "8"})
+
+ origVal := validateConfigFunc
+ defer func() { validateConfigFunc = origVal }()
+ validateConfigFunc = func(_ *Config) error { return nil }
+
+ manager := NewManager(newTestClient(t, caddyServer.URL), db, t.TempDir(), "", false, config.SecurityConfig{CerberusEnabled: true})
+ require.NoError(t, manager.ApplyConfig(context.Background()))
+ require.NotEmpty(t, loadBody)
+
+ require.True(t, bytes.Contains(loadBody, []byte(`"keepalive_idle":"45s"`)))
+ require.True(t, bytes.Contains(loadBody, []byte(`"keepalive_count":8`)))
+}
+
+func TestManagerApplyConfig_InvalidKeepaliveSettingsFallbackToDefaults(t *testing.T) {
+ var loadBody []byte
+ caddyServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == "/load" && r.Method == http.MethodPost {
+ payload, _ := io.ReadAll(r.Body)
+ loadBody = append([]byte(nil), payload...)
+ w.WriteHeader(http.StatusOK)
+ return
+ }
+ w.WriteHeader(http.StatusNotFound)
+ }))
+ defer caddyServer.Close()
+
+ dsn := "file:" + t.Name() + "_invalid?mode=memory&cache=shared"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.ProxyHost{},
+ &models.Location{},
+ &models.Setting{},
+ &models.CaddyConfig{},
+ &models.SSLCertificate{},
+ &models.SecurityConfig{},
+ &models.SecurityRuleSet{},
+ &models.SecurityDecision{},
+ &models.DNSProvider{},
+ ))
+
+ db.Create(&models.ProxyHost{DomainNames: "invalid-keepalive.example.com", ForwardHost: "127.0.0.1", ForwardPort: 8080, Enabled: true})
+ db.Create(&models.SecurityConfig{Name: "default", Enabled: true})
+ db.Create(&models.Setting{Key: settingCaddyKeepaliveIdle, Value: "bad"})
+ db.Create(&models.Setting{Key: settingCaddyKeepaliveCnt, Value: "-1"})
+
+ origVal := validateConfigFunc
+ defer func() { validateConfigFunc = origVal }()
+ validateConfigFunc = func(_ *Config) error { return nil }
+
+ manager := NewManager(newTestClient(t, caddyServer.URL), db, t.TempDir(), "", false, config.SecurityConfig{CerberusEnabled: true})
+ require.NoError(t, manager.ApplyConfig(context.Background()))
+ require.NotEmpty(t, loadBody)
+
+ require.False(t, bytes.Contains(loadBody, []byte(`"keepalive_idle"`)))
+ require.False(t, bytes.Contains(loadBody, []byte(`"keepalive_count"`)))
+}
diff --git a/backend/internal/caddy/types.go b/backend/internal/caddy/types.go
index 5fce7ba8..474964b1 100644
--- a/backend/internal/caddy/types.go
+++ b/backend/internal/caddy/types.go
@@ -83,6 +83,8 @@ type Server struct {
AutoHTTPS *AutoHTTPSConfig `json:"automatic_https,omitempty"`
Logs *ServerLogs `json:"logs,omitempty"`
TrustedProxies *TrustedProxies `json:"trusted_proxies,omitempty"`
+ KeepaliveIdle *string `json:"keepalive_idle,omitempty"`
+ KeepaliveCount *int `json:"keepalive_count,omitempty"`
}
// TrustedProxies defines the module for configuring trusted proxy IP ranges.
diff --git a/backend/internal/config/config.go b/backend/internal/config/config.go
index 1e2f9520..a6809456 100644
--- a/backend/internal/config/config.go
+++ b/backend/internal/config/config.go
@@ -7,6 +7,8 @@ import (
"path/filepath"
"strconv"
"strings"
+
+ "github.com/Wikid82/charon/backend/internal/security"
)
// Config captures runtime configuration sourced from environment variables.
@@ -106,6 +108,17 @@ func Load() (Config, error) {
Debug: getEnvAny("false", "CHARON_DEBUG", "CPM_DEBUG") == "true",
}
+ allowedInternalHosts := security.InternalServiceHostAllowlist()
+ normalizedCaddyAdminURL, err := security.ValidateInternalServiceBaseURL(
+ cfg.CaddyAdminAPI,
+ 2019,
+ allowedInternalHosts,
+ )
+ if err != nil {
+ return Config{}, fmt.Errorf("validate caddy admin api url: %w", err)
+ }
+ cfg.CaddyAdminAPI = normalizedCaddyAdminURL.String()
+
if err := os.MkdirAll(filepath.Dir(cfg.DatabasePath), 0o700); err != nil {
return Config{}, fmt.Errorf("ensure data directory: %w", err)
}
diff --git a/backend/internal/config/config_test.go b/backend/internal/config/config_test.go
index 4cbd3865..98597da7 100644
--- a/backend/internal/config/config_test.go
+++ b/backend/internal/config/config_test.go
@@ -258,6 +258,32 @@ func TestLoad_EmergencyConfig(t *testing.T) {
assert.Equal(t, "testpass", cfg.Emergency.BasicAuthPassword)
}
+func TestLoad_CaddyAdminAPIValidationAndNormalization(t *testing.T) {
+ tempDir := t.TempDir()
+ t.Setenv("CHARON_DB_PATH", filepath.Join(tempDir, "test.db"))
+ t.Setenv("CHARON_CADDY_CONFIG_DIR", filepath.Join(tempDir, "caddy"))
+ t.Setenv("CHARON_IMPORT_DIR", filepath.Join(tempDir, "imports"))
+ t.Setenv("CHARON_SSRF_INTERNAL_HOST_ALLOWLIST", "")
+ t.Setenv("CHARON_CADDY_ADMIN_API", "http://localhost:2019/config/")
+
+ cfg, err := Load()
+ require.NoError(t, err)
+ assert.Equal(t, "http://localhost:2019", cfg.CaddyAdminAPI)
+}
+
+func TestLoad_CaddyAdminAPIValidationRejectsNonAllowlistedHost(t *testing.T) {
+ tempDir := t.TempDir()
+ t.Setenv("CHARON_DB_PATH", filepath.Join(tempDir, "test.db"))
+ t.Setenv("CHARON_CADDY_CONFIG_DIR", filepath.Join(tempDir, "caddy"))
+ t.Setenv("CHARON_IMPORT_DIR", filepath.Join(tempDir, "imports"))
+ t.Setenv("CHARON_SSRF_INTERNAL_HOST_ALLOWLIST", "")
+ t.Setenv("CHARON_CADDY_ADMIN_API", "http://example.com:2019")
+
+ _, err := Load()
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "validate caddy admin api url")
+}
+
// ============================================
// splitAndTrim Tests
// ============================================
diff --git a/backend/internal/models/notification_provider.go b/backend/internal/models/notification_provider.go
index 2a0d6c9c..9d6427ec 100644
--- a/backend/internal/models/notification_provider.go
+++ b/backend/internal/models/notification_provider.go
@@ -14,6 +14,7 @@ type NotificationProvider struct {
Type string `json:"type" gorm:"index"` // discord (only supported type in current rollout)
URL string `json:"url"` // Discord webhook URL (HTTPS format required)
Token string `json:"-"` // Auth token for providers (e.g., Gotify) - never exposed in API
+ HasToken bool `json:"has_token" gorm:"-"` // Computed: indicates whether a token is set (never exposes raw value)
Engine string `json:"engine,omitempty" gorm:"index"` // notify_v1 (notify-only runtime)
Config string `json:"config"` // JSON payload template for custom webhooks
ServiceConfig string `json:"service_config,omitempty" gorm:"type:text"` // JSON blob for typed service config
diff --git a/backend/internal/notifications/feature_flags.go b/backend/internal/notifications/feature_flags.go
index 048edfeb..f6792963 100644
--- a/backend/internal/notifications/feature_flags.go
+++ b/backend/internal/notifications/feature_flags.go
@@ -4,5 +4,6 @@ const (
FlagNotifyEngineEnabled = "feature.notifications.engine.notify_v1.enabled"
FlagDiscordServiceEnabled = "feature.notifications.service.discord.enabled"
FlagGotifyServiceEnabled = "feature.notifications.service.gotify.enabled"
+ FlagWebhookServiceEnabled = "feature.notifications.service.webhook.enabled"
FlagSecurityProviderEventsEnabled = "feature.notifications.security_provider_events.enabled"
)
diff --git a/backend/internal/notifications/http_client_executor.go b/backend/internal/notifications/http_client_executor.go
new file mode 100644
index 00000000..25041951
--- /dev/null
+++ b/backend/internal/notifications/http_client_executor.go
@@ -0,0 +1,7 @@
+package notifications
+
+import "net/http"
+
+func executeNotifyRequest(client *http.Client, req *http.Request) (*http.Response, error) {
+ return client.Do(req)
+}
diff --git a/backend/internal/notifications/http_wrapper.go b/backend/internal/notifications/http_wrapper.go
new file mode 100644
index 00000000..981b74e3
--- /dev/null
+++ b/backend/internal/notifications/http_wrapper.go
@@ -0,0 +1,507 @@
+package notifications
+
+import (
+ "bytes"
+ "context"
+ crand "crypto/rand"
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "net"
+ "net/http"
+ neturl "net/url"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/Wikid82/charon/backend/internal/network"
+ "github.com/Wikid82/charon/backend/internal/security"
+)
+
+const (
+ MaxNotifyRequestBodyBytes = 256 * 1024
+ MaxNotifyResponseBodyBytes = 1024 * 1024
+)
+
+type RetryPolicy struct {
+ MaxAttempts int
+ BaseDelay time.Duration
+ MaxDelay time.Duration
+}
+
+type HTTPWrapperRequest struct {
+ URL string
+ Headers map[string]string
+ Body []byte
+}
+
+type HTTPWrapperResult struct {
+ StatusCode int
+ ResponseBody []byte
+ Attempts int
+}
+
+type HTTPWrapper struct {
+ retryPolicy RetryPolicy
+ allowHTTP bool
+ maxRedirects int
+ httpClientFactory func(allowHTTP bool, maxRedirects int) *http.Client
+ sleep func(time.Duration)
+ jitterNanos func(int64) int64
+}
+
+func NewNotifyHTTPWrapper() *HTTPWrapper {
+ return &HTTPWrapper{
+ retryPolicy: RetryPolicy{
+ MaxAttempts: 3,
+ BaseDelay: 200 * time.Millisecond,
+ MaxDelay: 2 * time.Second,
+ },
+ allowHTTP: allowNotifyHTTPOverride(),
+ maxRedirects: notifyMaxRedirects(),
+ httpClientFactory: func(allowHTTP bool, maxRedirects int) *http.Client {
+ opts := []network.Option{network.WithTimeout(10 * time.Second), network.WithMaxRedirects(maxRedirects)}
+ if allowHTTP {
+ opts = append(opts, network.WithAllowLocalhost())
+ }
+ return network.NewSafeHTTPClient(opts...)
+ },
+ sleep: time.Sleep,
+ }
+}
+
+func (w *HTTPWrapper) Send(ctx context.Context, request HTTPWrapperRequest) (*HTTPWrapperResult, error) {
+ if len(request.Body) > MaxNotifyRequestBodyBytes {
+ return nil, fmt.Errorf("request payload exceeds maximum size")
+ }
+
+ validatedURL, err := w.validateURL(request.URL)
+ if err != nil {
+ return nil, err
+ }
+
+ parsedValidatedURL, err := neturl.Parse(validatedURL)
+ if err != nil {
+ return nil, fmt.Errorf("destination URL validation failed")
+ }
+
+ validationOptions := []security.ValidationOption{}
+ if w.allowHTTP {
+ validationOptions = append(validationOptions, security.WithAllowHTTP(), security.WithAllowLocalhost())
+ }
+
+ safeURL, safeURLErr := security.ValidateExternalURL(parsedValidatedURL.String(), validationOptions...)
+ if safeURLErr != nil {
+ return nil, fmt.Errorf("destination URL validation failed")
+ }
+
+ safeParsedURL, safeParseErr := neturl.Parse(safeURL)
+ if safeParseErr != nil {
+ return nil, fmt.Errorf("destination URL validation failed")
+ }
+
+ if err := w.guardDestination(safeParsedURL); err != nil {
+ return nil, err
+ }
+
+ safeRequestURL, hostHeader, safeRequestErr := w.buildSafeRequestURL(safeParsedURL)
+ if safeRequestErr != nil {
+ return nil, safeRequestErr
+ }
+
+ headers := sanitizeOutboundHeaders(request.Headers)
+ client := w.httpClientFactory(w.allowHTTP, w.maxRedirects)
+ w.applyRedirectGuard(client)
+
+ var lastErr error
+ for attempt := 1; attempt <= w.retryPolicy.MaxAttempts; attempt++ {
+ httpReq, reqErr := http.NewRequestWithContext(ctx, http.MethodPost, safeRequestURL.String(), bytes.NewReader(request.Body))
+ if reqErr != nil {
+ return nil, fmt.Errorf("create outbound request: %w", reqErr)
+ }
+
+ httpReq.Host = hostHeader
+
+ for key, value := range headers {
+ httpReq.Header.Set(key, value)
+ }
+
+ if httpReq.Header.Get("Content-Type") == "" {
+ httpReq.Header.Set("Content-Type", "application/json")
+ }
+
+ resp, doErr := executeNotifyRequest(client, httpReq)
+ if doErr != nil {
+ lastErr = doErr
+ if attempt < w.retryPolicy.MaxAttempts && shouldRetry(nil, doErr) {
+ w.waitBeforeRetry(attempt)
+ continue
+ }
+ return nil, fmt.Errorf("outbound request failed: %s", sanitizeTransportErrorReason(doErr))
+ }
+
+ body, bodyErr := readCappedResponseBody(resp.Body)
+ closeErr := resp.Body.Close()
+ if bodyErr != nil {
+ return nil, bodyErr
+ }
+ if closeErr != nil {
+ return nil, fmt.Errorf("close response body: %w", closeErr)
+ }
+
+ if shouldRetry(resp, nil) && attempt < w.retryPolicy.MaxAttempts {
+ w.waitBeforeRetry(attempt)
+ continue
+ }
+
+ if resp.StatusCode >= http.StatusBadRequest {
+ return nil, fmt.Errorf("provider returned status %d", resp.StatusCode)
+ }
+
+ return &HTTPWrapperResult{
+ StatusCode: resp.StatusCode,
+ ResponseBody: body,
+ Attempts: attempt,
+ }, nil
+ }
+
+ if lastErr != nil {
+ return nil, fmt.Errorf("provider request failed after retries: %s", sanitizeTransportErrorReason(lastErr))
+ }
+
+ return nil, fmt.Errorf("provider request failed")
+}
+
+func sanitizeTransportErrorReason(err error) string {
+ if err == nil {
+ return "connection failed"
+ }
+
+ errText := strings.ToLower(strings.TrimSpace(err.Error()))
+
+ switch {
+ case strings.Contains(errText, "no such host"):
+ return "dns lookup failed"
+ case strings.Contains(errText, "connection refused"):
+ return "connection refused"
+ case strings.Contains(errText, "no route to host") || strings.Contains(errText, "network is unreachable"):
+ return "network unreachable"
+ case strings.Contains(errText, "timeout") || strings.Contains(errText, "deadline exceeded"):
+ return "request timed out"
+ case strings.Contains(errText, "tls") || strings.Contains(errText, "certificate") || strings.Contains(errText, "x509"):
+ return "tls handshake failed"
+ default:
+ return "connection failed"
+ }
+}
+
+func (w *HTTPWrapper) applyRedirectGuard(client *http.Client) {
+ if client == nil {
+ return
+ }
+
+ originalCheckRedirect := client.CheckRedirect
+ client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
+ if originalCheckRedirect != nil {
+ if err := originalCheckRedirect(req, via); err != nil {
+ return err
+ }
+ }
+
+ return w.guardOutboundRequestURL(req)
+ }
+}
+
+func (w *HTTPWrapper) validateURL(rawURL string) (string, error) {
+ parsedURL, err := neturl.Parse(rawURL)
+ if err != nil {
+ return "", fmt.Errorf("invalid destination URL")
+ }
+
+ if hasDisallowedQueryAuthKey(parsedURL.Query()) {
+ return "", fmt.Errorf("destination URL query authentication is not allowed")
+ }
+
+ options := []security.ValidationOption{}
+ if w.allowHTTP {
+ options = append(options, security.WithAllowHTTP(), security.WithAllowLocalhost())
+ }
+
+ validatedURL, err := security.ValidateExternalURL(rawURL, options...)
+ if err != nil {
+ return "", fmt.Errorf("destination URL validation failed")
+ }
+
+ return validatedURL, nil
+}
+
+func hasDisallowedQueryAuthKey(query neturl.Values) bool {
+ for key := range query {
+ normalizedKey := strings.ToLower(strings.TrimSpace(key))
+ switch normalizedKey {
+ case "token", "auth", "apikey", "api_key":
+ return true
+ }
+ }
+
+ return false
+}
+
+func (w *HTTPWrapper) guardOutboundRequestURL(httpReq *http.Request) error {
+ if httpReq == nil || httpReq.URL == nil {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ reqURL := httpReq.URL.String()
+ validatedURL, err := w.validateURL(reqURL)
+ if err != nil {
+ return err
+ }
+
+ parsedValidatedURL, err := neturl.Parse(validatedURL)
+ if err != nil {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ return w.guardDestination(parsedValidatedURL)
+}
+
+func (w *HTTPWrapper) guardDestination(destinationURL *neturl.URL) error {
+ if destinationURL == nil {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ if destinationURL.User != nil || destinationURL.Fragment != "" {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ hostname := strings.TrimSpace(destinationURL.Hostname())
+ if hostname == "" {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ if parsedIP := net.ParseIP(hostname); parsedIP != nil {
+ if !w.isAllowedDestinationIP(hostname, parsedIP) {
+ return fmt.Errorf("destination URL validation failed")
+ }
+ return nil
+ }
+
+ resolvedIPs, err := net.LookupIP(hostname)
+ if err != nil || len(resolvedIPs) == 0 {
+ return fmt.Errorf("destination URL validation failed")
+ }
+
+ for _, resolvedIP := range resolvedIPs {
+ if !w.isAllowedDestinationIP(hostname, resolvedIP) {
+ return fmt.Errorf("destination URL validation failed")
+ }
+ }
+
+ return nil
+}
+
+func (w *HTTPWrapper) isAllowedDestinationIP(hostname string, ip net.IP) bool {
+ if ip == nil {
+ return false
+ }
+
+ if ip.IsUnspecified() || ip.IsMulticast() || ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast() {
+ return false
+ }
+
+ if ip.IsLoopback() {
+ return w.allowHTTP && isLocalDestinationHost(hostname)
+ }
+
+ if network.IsPrivateIP(ip) {
+ return false
+ }
+
+ return true
+}
+
+func (w *HTTPWrapper) buildSafeRequestURL(destinationURL *neturl.URL) (*neturl.URL, string, error) {
+ if destinationURL == nil {
+ return nil, "", fmt.Errorf("destination URL validation failed")
+ }
+
+ hostname := strings.TrimSpace(destinationURL.Hostname())
+ if hostname == "" {
+ return nil, "", fmt.Errorf("destination URL validation failed")
+ }
+
+ // Validate destination IPs are allowed (defense-in-depth alongside safeDialer).
+ _, err := w.resolveAllowedDestinationIP(hostname)
+ if err != nil {
+ return nil, "", err
+ }
+
+ // Preserve the original hostname in the URL so Go's TLS layer derives the
+ // correct ServerName for SNI and certificate verification. The safeDialer
+ // resolves DNS, validates IPs against SSRF rules, and connects to a
+ // validated IP at dial time, so protection is maintained without
+ // IP-pinning in the URL.
+ safeRequestURL := &neturl.URL{
+ Scheme: destinationURL.Scheme,
+ Host: destinationURL.Host,
+ Path: destinationURL.EscapedPath(),
+ RawQuery: destinationURL.RawQuery,
+ }
+
+ if safeRequestURL.Path == "" {
+ safeRequestURL.Path = "/"
+ }
+
+ return safeRequestURL, destinationURL.Host, nil
+}
+
+func (w *HTTPWrapper) resolveAllowedDestinationIP(hostname string) (net.IP, error) {
+ if parsedIP := net.ParseIP(hostname); parsedIP != nil {
+ if !w.isAllowedDestinationIP(hostname, parsedIP) {
+ return nil, fmt.Errorf("destination URL validation failed")
+ }
+ return parsedIP, nil
+ }
+
+ resolvedIPs, err := net.LookupIP(hostname)
+ if err != nil || len(resolvedIPs) == 0 {
+ return nil, fmt.Errorf("destination URL validation failed")
+ }
+
+ for _, resolvedIP := range resolvedIPs {
+ if w.isAllowedDestinationIP(hostname, resolvedIP) {
+ return resolvedIP, nil
+ }
+ }
+
+ return nil, fmt.Errorf("destination URL validation failed")
+}
+
+func isLocalDestinationHost(host string) bool {
+ trimmedHost := strings.TrimSpace(host)
+ if strings.EqualFold(trimmedHost, "localhost") {
+ return true
+ }
+
+ parsedIP := net.ParseIP(trimmedHost)
+ return parsedIP != nil && parsedIP.IsLoopback()
+}
+
+func shouldRetry(resp *http.Response, err error) bool {
+ if err != nil {
+ var netErr net.Error
+ if isNetErr := strings.Contains(strings.ToLower(err.Error()), "timeout") || strings.Contains(strings.ToLower(err.Error()), "connection"); isNetErr {
+ return true
+ }
+ return errors.As(err, &netErr)
+ }
+
+ if resp == nil {
+ return false
+ }
+
+ if resp.StatusCode == http.StatusTooManyRequests {
+ return true
+ }
+
+ return resp.StatusCode >= http.StatusInternalServerError
+}
+
+func readCappedResponseBody(body io.Reader) ([]byte, error) {
+ limited := io.LimitReader(body, MaxNotifyResponseBodyBytes+1)
+ content, err := io.ReadAll(limited)
+ if err != nil {
+ return nil, fmt.Errorf("read response body: %w", err)
+ }
+
+ if len(content) > MaxNotifyResponseBodyBytes {
+ return nil, fmt.Errorf("response payload exceeds maximum size")
+ }
+
+ return content, nil
+}
+
+func sanitizeOutboundHeaders(headers map[string]string) map[string]string {
+ allowed := map[string]struct{}{
+ "content-type": {},
+ "user-agent": {},
+ "x-request-id": {},
+ "x-gotify-key": {},
+ }
+
+ sanitized := make(map[string]string)
+ for key, value := range headers {
+ normalizedKey := strings.ToLower(strings.TrimSpace(key))
+ if _, ok := allowed[normalizedKey]; !ok {
+ continue
+ }
+ sanitized[http.CanonicalHeaderKey(normalizedKey)] = strings.TrimSpace(value)
+ }
+
+ return sanitized
+}
+
+func (w *HTTPWrapper) waitBeforeRetry(attempt int) {
+ delay := w.retryPolicy.BaseDelay << (attempt - 1)
+ if delay > w.retryPolicy.MaxDelay {
+ delay = w.retryPolicy.MaxDelay
+ }
+
+ jitterFn := w.jitterNanos
+ if jitterFn == nil {
+ jitterFn = func(max int64) int64 {
+ if max <= 0 {
+ return 0
+ }
+ n, err := crand.Int(crand.Reader, big.NewInt(max))
+ if err != nil {
+ return 0
+ }
+ return n.Int64()
+ }
+ }
+
+ jitter := time.Duration(jitterFn(int64(delay) / 2))
+ sleepFn := w.sleep
+ if sleepFn == nil {
+ sleepFn = time.Sleep
+ }
+ sleepFn(delay + jitter)
+}
+
+func allowNotifyHTTPOverride() bool {
+ if strings.HasSuffix(os.Args[0], ".test") {
+ return true
+ }
+
+ allowHTTP := strings.EqualFold(strings.TrimSpace(os.Getenv("CHARON_NOTIFY_ALLOW_HTTP")), "true")
+ if !allowHTTP {
+ return false
+ }
+
+ environment := strings.ToLower(strings.TrimSpace(os.Getenv("CHARON_ENV")))
+ return environment == "development" || environment == "test"
+}
+
+func notifyMaxRedirects() int {
+ raw := strings.TrimSpace(os.Getenv("CHARON_NOTIFY_MAX_REDIRECTS"))
+ if raw == "" {
+ return 0
+ }
+
+ value, err := strconv.Atoi(raw)
+ if err != nil {
+ return 0
+ }
+
+ if value < 0 {
+ return 0
+ }
+ if value > 5 {
+ return 5
+ }
+ return value
+}
diff --git a/backend/internal/notifications/http_wrapper_test.go b/backend/internal/notifications/http_wrapper_test.go
new file mode 100644
index 00000000..6262c091
--- /dev/null
+++ b/backend/internal/notifications/http_wrapper_test.go
@@ -0,0 +1,923 @@
+package notifications
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net"
+ "net/http"
+ "net/http/httptest"
+ neturl "net/url"
+ "strings"
+ "sync/atomic"
+ "testing"
+ "time"
+)
+
+func TestHTTPWrapperRejectsOversizedRequestBody(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ payload := make([]byte, MaxNotifyRequestBodyBytes+1)
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "http://example.com/hook",
+ Body: payload,
+ })
+ if err == nil || !strings.Contains(err.Error(), "request payload exceeds") {
+ t.Fatalf("expected oversized request body error, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperRejectsTokenizedQueryURL(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "http://example.com/hook?token=secret",
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "query authentication is not allowed") {
+ t.Fatalf("expected query token rejection, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperRejectsQueryAuthCaseVariants(t *testing.T) {
+ testCases := []string{
+ "http://example.com/hook?Token=secret",
+ "http://example.com/hook?AUTH=secret",
+ "http://example.com/hook?apiKey=secret",
+ }
+
+ for _, testURL := range testCases {
+ t.Run(testURL, func(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: testURL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "query authentication is not allowed") {
+ t.Fatalf("expected query auth rejection for %q, got: %v", testURL, err)
+ }
+ })
+ }
+}
+
+func TestHTTPWrapperSendRejectsRedirectTargetWithDisallowedScheme(t *testing.T) {
+ var attempts int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ atomic.AddInt32(&attempts, 1)
+ http.Redirect(w, r, "ftp://example.com/redirected", http.StatusFound)
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.maxRedirects = 3
+ wrapper.retryPolicy.MaxAttempts = 1
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "outbound request failed") {
+ t.Fatalf("expected outbound failure due to redirect target validation, got: %v", err)
+ }
+ if got := atomic.LoadInt32(&attempts); got != 1 {
+ t.Fatalf("expected only initial request due to blocked redirect, got %d attempts", got)
+ }
+}
+
+func TestHTTPWrapperSendRejectsRedirectTargetWithMixedCaseQueryAuth(t *testing.T) {
+ var attempts int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ atomic.AddInt32(&attempts, 1)
+ http.Redirect(w, r, "https://example.com/redirected?Token=secret", http.StatusFound)
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.maxRedirects = 3
+ wrapper.retryPolicy.MaxAttempts = 1
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "outbound request failed") {
+ t.Fatalf("expected outbound failure due to redirect query auth validation, got: %v", err)
+ }
+ if got := atomic.LoadInt32(&attempts); got != 1 {
+ t.Fatalf("expected only initial request due to blocked redirect, got %d attempts", got)
+ }
+}
+
+func TestHTTPWrapperRetriesOn429ThenSucceeds(t *testing.T) {
+ var calls int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ current := atomic.AddInt32(&calls, 1)
+ if current == 1 {
+ w.WriteHeader(http.StatusTooManyRequests)
+ return
+ }
+ w.WriteHeader(http.StatusOK)
+ _, _ = w.Write([]byte("ok"))
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.sleep = func(time.Duration) {}
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+
+ result, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err != nil {
+ t.Fatalf("expected success after retry, got error: %v", err)
+ }
+ if result.Attempts != 2 {
+ t.Fatalf("expected 2 attempts, got %d", result.Attempts)
+ }
+}
+
+func TestHTTPWrapperSendSuccessWithValidatedDestination(t *testing.T) {
+ server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if got := r.Header.Get("Content-Type"); got != "application/json" {
+ t.Fatalf("expected default content-type, got %q", got)
+ }
+ w.WriteHeader(http.StatusOK)
+ _, _ = w.Write([]byte("ok"))
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.retryPolicy.MaxAttempts = 1
+ wrapper.httpClientFactory = func(bool, int) *http.Client {
+ return server.Client()
+ }
+
+ result, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err != nil {
+ t.Fatalf("expected successful send, got error: %v", err)
+ }
+ if result.Attempts != 1 {
+ t.Fatalf("expected 1 attempt, got %d", result.Attempts)
+ }
+ if result.StatusCode != http.StatusOK {
+ t.Fatalf("expected status %d, got %d", http.StatusOK, result.StatusCode)
+ }
+}
+
+func TestHTTPWrapperSendRejectsUserInfoInDestinationURL(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "https://user:pass@example.com/hook",
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected destination validation failure, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperSendRejectsFragmentInDestinationURL(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "https://example.com/hook#fragment",
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected destination validation failure, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperDoesNotRetryOn400(t *testing.T) {
+ var calls int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ atomic.AddInt32(&calls, 1)
+ w.WriteHeader(http.StatusBadRequest)
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.sleep = func(time.Duration) {}
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "status 400") {
+ t.Fatalf("expected non-retryable 400 error, got: %v", err)
+ }
+ if atomic.LoadInt32(&calls) != 1 {
+ t.Fatalf("expected exactly one request attempt, got %d", calls)
+ }
+}
+
+func TestHTTPWrapperResponseBodyCap(t *testing.T) {
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ _, _ = io.WriteString(w, strings.Repeat("x", MaxNotifyResponseBodyBytes+8))
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"message":"hello"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "response payload exceeds") {
+ t.Fatalf("expected capped response body error, got: %v", err)
+ }
+}
+
+func TestSanitizeOutboundHeadersAllowlist(t *testing.T) {
+ headers := sanitizeOutboundHeaders(map[string]string{
+ "Content-Type": "application/json",
+ "User-Agent": "Charon",
+ "X-Request-ID": "abc",
+ "X-Gotify-Key": "secret",
+ "Authorization": "Bearer token",
+ "Cookie": "sid=1",
+ })
+
+ if len(headers) != 4 {
+ t.Fatalf("expected 4 allowed headers, got %d", len(headers))
+ }
+ if _, ok := headers["Authorization"]; ok {
+ t.Fatalf("authorization header must be stripped")
+ }
+ if _, ok := headers["Cookie"]; ok {
+ t.Fatalf("cookie header must be stripped")
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsNilRequest(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ err := wrapper.guardOutboundRequestURL(nil)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected validation failure for nil request, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsQueryAuth(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "http", Host: "example.com", Path: "/hook", RawQuery: "token=secret"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err == nil || !strings.Contains(err.Error(), "query authentication is not allowed") {
+ t.Fatalf("expected query auth rejection, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsMixedCaseQueryAuth(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "http", Host: "example.com", Path: "/hook", RawQuery: "apiKey=secret"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err == nil || !strings.Contains(err.Error(), "query authentication is not allowed") {
+ t.Fatalf("expected query auth rejection, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperApplyRedirectGuardPreservesOriginalBehavior(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ baseErr := fmt.Errorf("base redirect policy")
+ client := &http.Client{CheckRedirect: func(*http.Request, []*http.Request) error {
+ return baseErr
+ }}
+
+ wrapper.applyRedirectGuard(client)
+ err := client.CheckRedirect(&http.Request{URL: &neturl.URL{Scheme: "https", Host: "example.com"}}, nil)
+ if !errors.Is(err, baseErr) {
+ t.Fatalf("expected original redirect policy error, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsUnsafeDestination(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "http", Host: "example.com", Path: "/hook"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected destination validation failure, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLAllowsValidatedDestination(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "https", Host: "example.com", Path: "/hook"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err != nil {
+ t.Fatalf("expected validated destination to pass guard, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsUserInfo(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "http", Host: "127.0.0.1", User: neturl.UserPassword("user", "pass"), Path: "/hook"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected userinfo rejection, got: %v", err)
+ }
+}
+
+func TestHTTPWrapperGuardOutboundRequestURLRejectsFragment(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ httpReq := &http.Request{URL: &neturl.URL{Scheme: "https", Host: "example.com", Path: "/hook", Fragment: "frag"}}
+ err := wrapper.guardOutboundRequestURL(httpReq)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected fragment rejection, got: %v", err)
+ }
+}
+
+func TestSanitizeTransportErrorReason(t *testing.T) {
+ tests := []struct {
+ name string
+ err error
+ expected string
+ }{
+ {name: "nil error", err: nil, expected: "connection failed"},
+ {name: "dns error", err: errors.New("dial tcp: lookup gotify.example: no such host"), expected: "dns lookup failed"},
+ {name: "connection refused", err: errors.New("connect: connection refused"), expected: "connection refused"},
+ {name: "network unreachable", err: errors.New("connect: no route to host"), expected: "network unreachable"},
+ {name: "timeout", err: errors.New("context deadline exceeded"), expected: "request timed out"},
+ {name: "tls failure", err: errors.New("tls: handshake failure"), expected: "tls handshake failed"},
+ {name: "fallback", err: errors.New("some unexpected transport error"), expected: "connection failed"},
+ }
+
+ for _, testCase := range tests {
+ t.Run(testCase.name, func(t *testing.T) {
+ actual := sanitizeTransportErrorReason(testCase.err)
+ if actual != testCase.expected {
+ t.Fatalf("expected %q, got %q", testCase.expected, actual)
+ }
+ })
+ }
+}
+
+func TestBuildSafeRequestURLPreservesHostnameForTLS(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ destinationURL := &neturl.URL{
+ Scheme: "https",
+ Host: "example.com",
+ Path: "/webhook",
+ }
+
+ safeURL, hostHeader, err := wrapper.buildSafeRequestURL(destinationURL)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if safeURL.Hostname() != "example.com" {
+ t.Fatalf("expected hostname 'example.com' preserved in URL for TLS SNI, got %q", safeURL.Hostname())
+ }
+
+ if hostHeader != "example.com" {
+ t.Fatalf("expected host header 'example.com', got %q", hostHeader)
+ }
+
+ if safeURL.Scheme != "https" {
+ t.Fatalf("expected scheme 'https', got %q", safeURL.Scheme)
+ }
+
+ if safeURL.Path != "/webhook" {
+ t.Fatalf("expected path '/webhook', got %q", safeURL.Path)
+ }
+}
+
+func TestBuildSafeRequestURLDefaultsEmptyPathToSlash(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ destinationURL := &neturl.URL{
+ Scheme: "http",
+ Host: "localhost",
+ }
+
+ safeURL, _, err := wrapper.buildSafeRequestURL(destinationURL)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if safeURL.Path != "/" {
+ t.Fatalf("expected default path '/', got %q", safeURL.Path)
+ }
+}
+
+func TestBuildSafeRequestURLPreservesQueryString(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ destinationURL := &neturl.URL{
+ Scheme: "https",
+ Host: "example.com",
+ Path: "/hook",
+ RawQuery: "key=value",
+ }
+
+ safeURL, _, err := wrapper.buildSafeRequestURL(destinationURL)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if safeURL.RawQuery != "key=value" {
+ t.Fatalf("expected query 'key=value', got %q", safeURL.RawQuery)
+ }
+}
+
+func TestBuildSafeRequestURLRejectsNilDestination(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ _, _, err := wrapper.buildSafeRequestURL(nil)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected validation failure for nil URL, got: %v", err)
+ }
+}
+
+func TestBuildSafeRequestURLRejectsEmptyHostname(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+
+ destinationURL := &neturl.URL{
+ Scheme: "https",
+ Host: "",
+ Path: "/hook",
+ }
+
+ _, _, err := wrapper.buildSafeRequestURL(destinationURL)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected validation failure for empty hostname, got: %v", err)
+ }
+}
+
+func TestBuildSafeRequestURLWithTLSServer(t *testing.T) {
+ server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer server.Close()
+
+ serverURL, _ := neturl.Parse(server.URL)
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ safeURL, hostHeader, err := wrapper.buildSafeRequestURL(serverURL)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ if safeURL.Host != serverURL.Host {
+ t.Fatalf("expected host %q preserved for TLS, got %q", serverURL.Host, safeURL.Host)
+ }
+
+ if hostHeader != serverURL.Host {
+ t.Fatalf("expected host header %q, got %q", serverURL.Host, hostHeader)
+ }
+}
+
+// ===== Additional coverage for uncovered paths =====
+
+type errReader struct{}
+
+func (errReader) Read([]byte) (int, error) {
+ return 0, errors.New("simulated read error")
+}
+
+type roundTripFunc func(*http.Request) (*http.Response, error)
+
+func (f roundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
+ return f(req)
+}
+
+func TestApplyRedirectGuardNilClient(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.applyRedirectGuard(nil)
+}
+
+func TestGuardDestinationNilURL(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ err := wrapper.guardDestination(nil)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected validation failure for nil URL, got: %v", err)
+ }
+}
+
+func TestGuardDestinationEmptyHostname(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ err := wrapper.guardDestination(&neturl.URL{Scheme: "https", Host: ""})
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected validation failure for empty hostname, got: %v", err)
+ }
+}
+
+func TestGuardDestinationUserInfoRejection(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ u := &neturl.URL{Scheme: "https", Host: "example.com", User: neturl.User("admin")}
+ err := wrapper.guardDestination(u)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected userinfo rejection, got: %v", err)
+ }
+}
+
+func TestGuardDestinationFragmentRejection(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ u := &neturl.URL{Scheme: "https", Host: "example.com", Fragment: "section"}
+ err := wrapper.guardDestination(u)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected fragment rejection, got: %v", err)
+ }
+}
+
+func TestGuardDestinationPrivateIPRejection(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+ err := wrapper.guardDestination(&neturl.URL{Scheme: "https", Host: "192.168.1.1"})
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected private IP rejection, got: %v", err)
+ }
+}
+
+func TestIsAllowedDestinationIPEdgeCases(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+
+ tests := []struct {
+ name string
+ hostname string
+ ip net.IP
+ expected bool
+ }{
+ {"nil IP", "", nil, false},
+ {"unspecified", "0.0.0.0", net.IPv4zero, false},
+ {"multicast", "224.0.0.1", net.ParseIP("224.0.0.1"), false},
+ {"link-local unicast", "169.254.1.1", net.ParseIP("169.254.1.1"), false},
+ {"loopback without allowHTTP", "127.0.0.1", net.ParseIP("127.0.0.1"), false},
+ {"private 10.x", "10.0.0.1", net.ParseIP("10.0.0.1"), false},
+ {"private 172.16.x", "172.16.0.1", net.ParseIP("172.16.0.1"), false},
+ {"private 192.168.x", "192.168.1.1", net.ParseIP("192.168.1.1"), false},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := wrapper.isAllowedDestinationIP(tt.hostname, tt.ip)
+ if result != tt.expected {
+ t.Fatalf("isAllowedDestinationIP(%q, %v) = %v, want %v", tt.hostname, tt.ip, result, tt.expected)
+ }
+ })
+ }
+}
+
+func TestIsAllowedDestinationIPLoopbackAllowHTTP(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+
+ if !wrapper.isAllowedDestinationIP("localhost", net.ParseIP("127.0.0.1")) {
+ t.Fatal("expected loopback allowed for localhost with allowHTTP")
+ }
+
+ if wrapper.isAllowedDestinationIP("not-localhost", net.ParseIP("127.0.0.1")) {
+ t.Fatal("expected loopback rejected for non-localhost hostname")
+ }
+}
+
+func TestIsLocalDestinationHost(t *testing.T) {
+ tests := []struct {
+ host string
+ expected bool
+ }{
+ {"localhost", true},
+ {"LOCALHOST", true},
+ {"127.0.0.1", true},
+ {"::1", true},
+ {"example.com", false},
+ {"", false},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.host, func(t *testing.T) {
+ if got := isLocalDestinationHost(tt.host); got != tt.expected {
+ t.Fatalf("isLocalDestinationHost(%q) = %v, want %v", tt.host, got, tt.expected)
+ }
+ })
+ }
+}
+
+func TestShouldRetryComprehensive(t *testing.T) {
+ tests := []struct {
+ name string
+ resp *http.Response
+ err error
+ expected bool
+ }{
+ {"nil resp nil err", nil, nil, false},
+ {"timeout error string", nil, errors.New("operation timeout"), true},
+ {"connection error string", nil, errors.New("connection reset"), true},
+ {"unrelated error", nil, errors.New("json parse error"), false},
+ {"500 response", &http.Response{StatusCode: 500}, nil, true},
+ {"502 response", &http.Response{StatusCode: 502}, nil, true},
+ {"503 response", &http.Response{StatusCode: 503}, nil, true},
+ {"429 response", &http.Response{StatusCode: 429}, nil, true},
+ {"200 response", &http.Response{StatusCode: 200}, nil, false},
+ {"400 response", &http.Response{StatusCode: 400}, nil, false},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := shouldRetry(tt.resp, tt.err); got != tt.expected {
+ t.Fatalf("shouldRetry = %v, want %v", got, tt.expected)
+ }
+ })
+ }
+}
+
+func TestShouldRetryNetError(t *testing.T) {
+ netErr := &net.DNSError{Err: "no such host", Name: "example.invalid"}
+ if !shouldRetry(nil, netErr) {
+ t.Fatal("expected net.Error to trigger retry via errors.As fallback")
+ }
+}
+
+func TestReadCappedResponseBodyReadError(t *testing.T) {
+ _, err := readCappedResponseBody(errReader{})
+ if err == nil || !strings.Contains(err.Error(), "read response body") {
+ t.Fatalf("expected read body error, got: %v", err)
+ }
+}
+
+func TestReadCappedResponseBodyOversize(t *testing.T) {
+ oversized := strings.NewReader(strings.Repeat("x", MaxNotifyResponseBodyBytes+10))
+ _, err := readCappedResponseBody(oversized)
+ if err == nil || !strings.Contains(err.Error(), "response payload exceeds") {
+ t.Fatalf("expected oversize error, got: %v", err)
+ }
+}
+
+func TestReadCappedResponseBodySuccess(t *testing.T) {
+ content, err := readCappedResponseBody(strings.NewReader("hello"))
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if string(content) != "hello" {
+ t.Fatalf("expected 'hello', got %q", string(content))
+ }
+}
+
+func TestHasDisallowedQueryAuthKeyAllVariants(t *testing.T) {
+ tests := []struct {
+ name string
+ key string
+ expected bool
+ }{
+ {"token", "token", true},
+ {"auth", "auth", true},
+ {"apikey", "apikey", true},
+ {"api_key", "api_key", true},
+ {"TOKEN uppercase", "TOKEN", true},
+ {"Api_Key mixed", "Api_Key", true},
+ {"safe key", "callback", false},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ query := neturl.Values{}
+ query.Set(tt.key, "secret")
+ if got := hasDisallowedQueryAuthKey(query); got != tt.expected {
+ t.Fatalf("hasDisallowedQueryAuthKey with key %q = %v, want %v", tt.key, got, tt.expected)
+ }
+ })
+ }
+}
+
+func TestHasDisallowedQueryAuthKeyEmptyQuery(t *testing.T) {
+ if hasDisallowedQueryAuthKey(neturl.Values{}) {
+ t.Fatal("expected empty query to be safe")
+ }
+}
+
+func TestNotifyMaxRedirects(t *testing.T) {
+ tests := []struct {
+ name string
+ envValue string
+ expected int
+ }{
+ {"empty", "", 0},
+ {"valid 3", "3", 3},
+ {"zero", "0", 0},
+ {"negative", "-1", 0},
+ {"above max", "10", 5},
+ {"exactly 5", "5", 5},
+ {"invalid", "abc", 0},
+ {"whitespace", " 2 ", 2},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Setenv("CHARON_NOTIFY_MAX_REDIRECTS", tt.envValue)
+ if got := notifyMaxRedirects(); got != tt.expected {
+ t.Fatalf("notifyMaxRedirects() = %d, want %d", got, tt.expected)
+ }
+ })
+ }
+}
+
+func TestResolveAllowedDestinationIPRejectsPrivateIP(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+ _, err := wrapper.resolveAllowedDestinationIP("192.168.1.1")
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected private IP rejection, got: %v", err)
+ }
+}
+
+func TestResolveAllowedDestinationIPRejectsLoopback(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+ _, err := wrapper.resolveAllowedDestinationIP("127.0.0.1")
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected loopback rejection, got: %v", err)
+ }
+}
+
+func TestResolveAllowedDestinationIPAllowsPublic(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ ip, err := wrapper.resolveAllowedDestinationIP("1.1.1.1")
+ if err != nil {
+ t.Fatalf("expected public IP to be allowed, got: %v", err)
+ }
+ if !ip.Equal(net.ParseIP("1.1.1.1")) {
+ t.Fatalf("expected 1.1.1.1, got %v", ip)
+ }
+}
+
+func TestBuildSafeRequestURLRejectsPrivateHostname(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = false
+ u := &neturl.URL{Scheme: "https", Host: "192.168.1.1", Path: "/hook"}
+ _, _, err := wrapper.buildSafeRequestURL(u)
+ if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
+ t.Fatalf("expected private host rejection, got: %v", err)
+ }
+}
+
+func TestWaitBeforeRetryBasic(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ var sleptDuration time.Duration
+ wrapper.sleep = func(d time.Duration) { sleptDuration = d }
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+ wrapper.retryPolicy.BaseDelay = 100 * time.Millisecond
+ wrapper.retryPolicy.MaxDelay = 1 * time.Second
+
+ wrapper.waitBeforeRetry(1)
+ if sleptDuration != 100*time.Millisecond {
+ t.Fatalf("expected 100ms delay for attempt 1, got %v", sleptDuration)
+ }
+
+ wrapper.waitBeforeRetry(2)
+ if sleptDuration != 200*time.Millisecond {
+ t.Fatalf("expected 200ms delay for attempt 2, got %v", sleptDuration)
+ }
+}
+
+func TestWaitBeforeRetryClampedToMax(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ var sleptDuration time.Duration
+ wrapper.sleep = func(d time.Duration) { sleptDuration = d }
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+ wrapper.retryPolicy.BaseDelay = 1 * time.Second
+ wrapper.retryPolicy.MaxDelay = 2 * time.Second
+
+ wrapper.waitBeforeRetry(5)
+ if sleptDuration != 2*time.Second {
+ t.Fatalf("expected clamped delay of 2s, got %v", sleptDuration)
+ }
+}
+
+func TestWaitBeforeRetryDefaultJitter(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.jitterNanos = nil
+ wrapper.sleep = func(time.Duration) {}
+ wrapper.retryPolicy.BaseDelay = 100 * time.Millisecond
+ wrapper.retryPolicy.MaxDelay = 1 * time.Second
+ wrapper.waitBeforeRetry(1)
+}
+
+func TestHTTPWrapperSendExhaustsRetriesOnTransportError(t *testing.T) {
+ var calls int32
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.sleep = func(time.Duration) {}
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+ wrapper.httpClientFactory = func(bool, int) *http.Client {
+ return &http.Client{
+ Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
+ atomic.AddInt32(&calls, 1)
+ return nil, errors.New("connection timeout failure")
+ }),
+ }
+ }
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "http://localhost:19999/hook",
+ Body: []byte(`{"msg":"test"}`),
+ })
+ if err == nil {
+ t.Fatal("expected error after transport failures")
+ }
+ if !strings.Contains(err.Error(), "outbound request failed") {
+ t.Fatalf("expected outbound request failed message, got: %v", err)
+ }
+ if got := atomic.LoadInt32(&calls); got != 3 {
+ t.Fatalf("expected 3 attempts, got %d", got)
+ }
+}
+
+func TestHTTPWrapperSendExhaustsRetriesOn500(t *testing.T) {
+ var calls int32
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ atomic.AddInt32(&calls, 1)
+ w.WriteHeader(http.StatusInternalServerError)
+ }))
+ defer server.Close()
+
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.sleep = func(time.Duration) {}
+ wrapper.jitterNanos = func(int64) int64 { return 0 }
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: server.URL,
+ Body: []byte(`{"msg":"test"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "status 500") {
+ t.Fatalf("expected 500 status error, got: %v", err)
+ }
+ if got := atomic.LoadInt32(&calls); got != 3 {
+ t.Fatalf("expected 3 attempts for 500 retries, got %d", got)
+ }
+}
+
+func TestHTTPWrapperSendTransportErrorNoRetry(t *testing.T) {
+ wrapper := NewNotifyHTTPWrapper()
+ wrapper.allowHTTP = true
+ wrapper.retryPolicy.MaxAttempts = 1
+ wrapper.httpClientFactory = func(bool, int) *http.Client {
+ return &http.Client{
+ Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
+ return nil, errors.New("some unretryable error")
+ }),
+ }
+ }
+
+ _, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
+ URL: "http://localhost:19999/hook",
+ Body: []byte(`{"msg":"test"}`),
+ })
+ if err == nil || !strings.Contains(err.Error(), "outbound request failed") {
+ t.Fatalf("expected outbound request failed, got: %v", err)
+ }
+}
+
+func TestSanitizeTransportErrorReasonNetworkUnreachable(t *testing.T) {
+ result := sanitizeTransportErrorReason(errors.New("connect: network is unreachable"))
+ if result != "network unreachable" {
+ t.Fatalf("expected 'network unreachable', got %q", result)
+ }
+}
+
+func TestSanitizeTransportErrorReasonCertificate(t *testing.T) {
+ result := sanitizeTransportErrorReason(errors.New("x509: certificate signed by unknown authority"))
+ if result != "tls handshake failed" {
+ t.Fatalf("expected 'tls handshake failed', got %q", result)
+ }
+}
+
+func TestAllowNotifyHTTPOverride(t *testing.T) {
+ result := allowNotifyHTTPOverride()
+ if !result {
+ t.Fatal("expected allowHTTP to be true in test binary")
+ }
+}
diff --git a/backend/internal/notifications/router.go b/backend/internal/notifications/router.go
index f77f7d94..5c19aa02 100644
--- a/backend/internal/notifications/router.go
+++ b/backend/internal/notifications/router.go
@@ -22,6 +22,8 @@ func (r *Router) ShouldUseNotify(providerType, providerEngine string, flags map[
return flags[FlagDiscordServiceEnabled]
case "gotify":
return flags[FlagGotifyServiceEnabled]
+ case "webhook":
+ return flags[FlagWebhookServiceEnabled]
default:
return false
}
diff --git a/backend/internal/notifications/router_test.go b/backend/internal/notifications/router_test.go
index e54b4581..a8ea1a44 100644
--- a/backend/internal/notifications/router_test.go
+++ b/backend/internal/notifications/router_test.go
@@ -90,3 +90,21 @@ func TestRouter_ShouldUseNotify_GotifyServiceFlag(t *testing.T) {
t.Fatalf("expected notify routing disabled for gotify when FlagGotifyServiceEnabled is false")
}
}
+
+func TestRouter_ShouldUseNotify_WebhookServiceFlag(t *testing.T) {
+ router := NewRouter()
+
+ flags := map[string]bool{
+ FlagNotifyEngineEnabled: true,
+ FlagWebhookServiceEnabled: true,
+ }
+
+ if !router.ShouldUseNotify("webhook", EngineNotifyV1, flags) {
+ t.Fatalf("expected notify routing enabled for webhook when FlagWebhookServiceEnabled is true")
+ }
+
+ flags[FlagWebhookServiceEnabled] = false
+ if router.ShouldUseNotify("webhook", EngineNotifyV1, flags) {
+ t.Fatalf("expected notify routing disabled for webhook when FlagWebhookServiceEnabled is false")
+ }
+}
diff --git a/backend/internal/services/docker_service.go b/backend/internal/services/docker_service.go
index dd25f6b9..7995e65f 100644
--- a/backend/internal/services/docker_service.go
+++ b/backend/internal/services/docker_service.go
@@ -7,6 +7,8 @@ import (
"net"
"net/url"
"os"
+ "slices"
+ "strconv"
"strings"
"syscall"
@@ -16,11 +18,17 @@ import (
)
type DockerUnavailableError struct {
- err error
+ err error
+ details string
}
-func NewDockerUnavailableError(err error) *DockerUnavailableError {
- return &DockerUnavailableError{err: err}
+func NewDockerUnavailableError(err error, details ...string) *DockerUnavailableError {
+ detailMsg := ""
+ if len(details) > 0 {
+ detailMsg = details[0]
+ }
+
+ return &DockerUnavailableError{err: err, details: detailMsg}
}
func (e *DockerUnavailableError) Error() string {
@@ -37,6 +45,13 @@ func (e *DockerUnavailableError) Unwrap() error {
return e.err
}
+func (e *DockerUnavailableError) Details() string {
+ if e == nil {
+ return ""
+ }
+ return e.details
+}
+
type DockerPort struct {
PrivatePort uint16 `json:"private_port"`
PublicPort uint16 `json:"public_port"`
@@ -55,8 +70,9 @@ type DockerContainer struct {
}
type DockerService struct {
- client *client.Client
- initErr error // Stores initialization error if Docker is unavailable
+ client *client.Client
+ initErr error // Stores initialization error if Docker is unavailable
+ localHost string
}
// NewDockerService creates a new Docker service instance.
@@ -64,21 +80,33 @@ type DockerService struct {
// DockerUnavailableError for all operations. This allows routes to be registered
// and provide helpful error messages to users.
func NewDockerService() *DockerService {
- cli, err := client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation())
+ envHost := strings.TrimSpace(os.Getenv("DOCKER_HOST"))
+ localHost := resolveLocalDockerHost()
+ if envHost != "" && !strings.HasPrefix(envHost, "unix://") {
+ logger.Log().WithFields(map[string]any{"docker_host_env": envHost, "local_host": localHost}).Info("ignoring non-unix DOCKER_HOST for local docker mode")
+ }
+
+ cli, err := client.NewClientWithOpts(client.WithHost(localHost), client.WithAPIVersionNegotiation())
if err != nil {
logger.Log().WithError(err).Warn("Failed to initialize Docker client - Docker features will be unavailable")
+ unavailableErr := NewDockerUnavailableError(err, buildLocalDockerUnavailableDetails(err, localHost))
return &DockerService{
- client: nil,
- initErr: err,
+ client: nil,
+ initErr: unavailableErr,
+ localHost: localHost,
}
}
- return &DockerService{client: cli, initErr: nil}
+ return &DockerService{client: cli, initErr: nil, localHost: localHost}
}
func (s *DockerService) ListContainers(ctx context.Context, host string) ([]DockerContainer, error) {
// Check if Docker was available during initialization
if s.initErr != nil {
- return nil, &DockerUnavailableError{err: s.initErr}
+ var unavailableErr *DockerUnavailableError
+ if errors.As(s.initErr, &unavailableErr) {
+ return nil, unavailableErr
+ }
+ return nil, NewDockerUnavailableError(s.initErr, buildLocalDockerUnavailableDetails(s.initErr, s.localHost))
}
var cli *client.Client
@@ -101,7 +129,10 @@ func (s *DockerService) ListContainers(ctx context.Context, host string) ([]Dock
containers, err := cli.ContainerList(ctx, container.ListOptions{All: false})
if err != nil {
if isDockerConnectivityError(err) {
- return nil, &DockerUnavailableError{err: err}
+ if host == "" || host == "local" {
+ return nil, NewDockerUnavailableError(err, buildLocalDockerUnavailableDetails(err, s.localHost))
+ }
+ return nil, NewDockerUnavailableError(err)
}
return nil, fmt.Errorf("failed to list containers: %w", err)
}
@@ -206,3 +237,118 @@ func isDockerConnectivityError(err error) bool {
return false
}
+
+func resolveLocalDockerHost() string {
+ envHost := strings.TrimSpace(os.Getenv("DOCKER_HOST"))
+ if strings.HasPrefix(envHost, "unix://") {
+ socketPath := socketPathFromDockerHost(envHost)
+ if socketPath != "" {
+ if _, err := os.Stat(socketPath); err == nil {
+ return envHost
+ }
+ }
+ }
+
+ defaultSocketPath := "/var/run/docker.sock"
+ if _, err := os.Stat(defaultSocketPath); err == nil {
+ return "unix:///var/run/docker.sock"
+ }
+
+ rootlessSocketPath := fmt.Sprintf("/run/user/%d/docker.sock", os.Getuid())
+ if _, err := os.Stat(rootlessSocketPath); err == nil {
+ return "unix://" + rootlessSocketPath
+ }
+
+ return "unix:///var/run/docker.sock"
+}
+
+func socketPathFromDockerHost(host string) string {
+ trimmedHost := strings.TrimSpace(host)
+ if !strings.HasPrefix(trimmedHost, "unix://") {
+ return ""
+ }
+ return strings.TrimPrefix(trimmedHost, "unix://")
+}
+
+func buildLocalDockerUnavailableDetails(err error, localHost string) string {
+ socketPath := socketPathFromDockerHost(localHost)
+ if socketPath == "" {
+ socketPath = "/var/run/docker.sock"
+ }
+
+ uid := os.Getuid()
+ gid := os.Getgid()
+ groups, _ := os.Getgroups()
+ groupsStr := ""
+ if len(groups) > 0 {
+ groupValues := make([]string, 0, len(groups))
+ for _, groupID := range groups {
+ groupValues = append(groupValues, strconv.Itoa(groupID))
+ }
+ groupsStr = strings.Join(groupValues, ",")
+ }
+
+ if errno, ok := extractErrno(err); ok {
+ switch errno {
+ case syscall.ENOENT:
+ return fmt.Sprintf("Local Docker socket not found at %s (local host selector uses %s). Mount %s as read-only or read-write.", socketPath, localHost, socketPath)
+ case syscall.ECONNREFUSED:
+ return fmt.Sprintf("Docker daemon is not accepting connections at %s.", socketPath)
+ case syscall.EACCES, syscall.EPERM:
+ infoMsg, socketGID := localSocketStatSummary(socketPath)
+ permissionHint := ""
+ if socketGID >= 0 && !slices.Contains(groups, socketGID) {
+ permissionHint = fmt.Sprintf(" Process groups (%s) do not include socket gid %d; run container with matching supplemental group (e.g., --group-add %d or compose group_add: [\"%d\"]).", groupsStr, socketGID, socketGID, socketGID)
+ }
+ return fmt.Sprintf("Local Docker socket is mounted but not accessible by current process (uid=%d gid=%d). %s%s", uid, gid, infoMsg, permissionHint)
+ }
+ }
+
+ if errors.Is(err, os.ErrNotExist) {
+ return fmt.Sprintf("Local Docker socket not found at %s (local host selector uses %s).", socketPath, localHost)
+ }
+
+ return fmt.Sprintf("Cannot connect to local Docker via %s. Ensure Docker is running and the mounted socket permissions allow uid=%d gid=%d access.", localHost, uid, gid)
+}
+
+func extractErrno(err error) (syscall.Errno, bool) {
+ if err == nil {
+ return 0, false
+ }
+
+ var urlErr *url.Error
+ if errors.As(err, &urlErr) {
+ err = urlErr.Unwrap()
+ }
+
+ var syscallErr *os.SyscallError
+ if errors.As(err, &syscallErr) {
+ err = syscallErr.Unwrap()
+ }
+
+ var opErr *net.OpError
+ if errors.As(err, &opErr) {
+ err = opErr.Unwrap()
+ }
+
+ var errno syscall.Errno
+ if errors.As(err, &errno) {
+ return errno, true
+ }
+
+ return 0, false
+}
+
+func localSocketStatSummary(socketPath string) (string, int) {
+ info, statErr := os.Stat(socketPath)
+ if statErr != nil {
+ return fmt.Sprintf("Socket path %s could not be stat'ed: %v.", socketPath, statErr), -1
+ }
+
+ stat, ok := info.Sys().(*syscall.Stat_t)
+ if !ok || stat == nil {
+ return fmt.Sprintf("Socket path %s has mode %s.", socketPath, info.Mode().String()), -1
+ }
+
+ return fmt.Sprintf("Socket path %s has mode %s owner uid=%d gid=%d.", socketPath, info.Mode().String(), stat.Uid, stat.Gid), int(stat.Gid)
+}
diff --git a/backend/internal/services/docker_service_test.go b/backend/internal/services/docker_service_test.go
index 9687579c..fa35e599 100644
--- a/backend/internal/services/docker_service_test.go
+++ b/backend/internal/services/docker_service_test.go
@@ -3,13 +3,17 @@ package services
import (
"context"
"errors"
+ "fmt"
"net"
"net/url"
"os"
+ "path/filepath"
+ "strings"
"syscall"
"testing"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestDockerService_New(t *testing.T) {
@@ -58,6 +62,10 @@ func TestDockerUnavailableError_ErrorMethods(t *testing.T) {
unwrapped := err.Unwrap()
assert.Equal(t, baseErr, unwrapped)
+ // Test Details()
+ errWithDetails := NewDockerUnavailableError(baseErr, "socket permission mismatch")
+ assert.Equal(t, "socket permission mismatch", errWithDetails.Details())
+
// Test nil receiver cases
var nilErr *DockerUnavailableError
assert.Equal(t, "docker unavailable", nilErr.Error())
@@ -67,6 +75,7 @@ func TestDockerUnavailableError_ErrorMethods(t *testing.T) {
nilBaseErr := NewDockerUnavailableError(nil)
assert.Equal(t, "docker unavailable", nilBaseErr.Error())
assert.Nil(t, nilBaseErr.Unwrap())
+ assert.Equal(t, "", nilBaseErr.Details())
}
func TestIsDockerConnectivityError(t *testing.T) {
@@ -165,3 +174,184 @@ func TestIsDockerConnectivityError_NetErrorTimeout(t *testing.T) {
result := isDockerConnectivityError(netErr)
assert.True(t, result, "net.Error with Timeout() should return true")
}
+
+func TestResolveLocalDockerHost_IgnoresRemoteTCPEnv(t *testing.T) {
+ t.Setenv("DOCKER_HOST", "tcp://docker-proxy:2375")
+
+ host := resolveLocalDockerHost()
+
+ assert.Equal(t, "unix:///var/run/docker.sock", host)
+}
+
+func TestResolveLocalDockerHost_UsesExistingUnixSocketFromEnv(t *testing.T) {
+ tmpDir := t.TempDir()
+ socketFile := filepath.Join(tmpDir, "docker.sock")
+ require.NoError(t, os.WriteFile(socketFile, []byte(""), 0o600))
+
+ t.Setenv("DOCKER_HOST", "unix://"+socketFile)
+
+ host := resolveLocalDockerHost()
+
+ assert.Equal(t, "unix://"+socketFile, host)
+}
+
+func TestBuildLocalDockerUnavailableDetails_PermissionDeniedIncludesGroupHint(t *testing.T) {
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EACCES}
+ details := buildLocalDockerUnavailableDetails(err, "unix:///var/run/docker.sock")
+
+ assert.Contains(t, details, "not accessible")
+ assert.Contains(t, details, "uid=")
+ assert.Contains(t, details, "gid=")
+ assert.NotContains(t, strings.ToLower(details), "token")
+
+ // When docker socket exists with a GID not in process groups, verify both
+ // CLI and compose supplemental-group guidance are present.
+ if strings.Contains(details, "--group-add") {
+ assert.Contains(t, details, "group_add",
+ "when supplemental group hint is present, it should include compose group_add syntax")
+ }
+}
+
+func TestBuildLocalDockerUnavailableDetails_MissingSocket(t *testing.T) {
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.ENOENT}
+ host := "unix:///tmp/nonexistent-docker.sock"
+
+ details := buildLocalDockerUnavailableDetails(err, host)
+
+ assert.Contains(t, details, "not found")
+ assert.Contains(t, details, "/tmp/nonexistent-docker.sock")
+ assert.Contains(t, details, host)
+ assert.Contains(t, details, "Mount", "ENOENT path should include mount guidance")
+}
+
+func TestBuildLocalDockerUnavailableDetails_PermissionDeniedSocketGIDInGroups(t *testing.T) {
+ // Temp file GID = our primary GID (already in process groups) → no group hint
+ tmpDir := t.TempDir()
+ socketFile := filepath.Join(tmpDir, "docker.sock")
+ require.NoError(t, os.WriteFile(socketFile, []byte(""), 0o660))
+
+ host := "unix://" + socketFile
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EACCES}
+ details := buildLocalDockerUnavailableDetails(err, host)
+
+ assert.Contains(t, details, "not accessible")
+ assert.Contains(t, details, "uid=")
+ assert.NotContains(t, details, "--group-add",
+ "group-add hint should not appear when socket GID is already in process groups")
+}
+
+func TestBuildLocalDockerUnavailableDetails_PermissionDeniedStatFails(t *testing.T) {
+ // EACCES with a socket path that doesn't exist → stat fails
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EACCES}
+ details := buildLocalDockerUnavailableDetails(err, "unix:///tmp/nonexistent-stat-fail.sock")
+
+ assert.Contains(t, details, "not accessible")
+ assert.Contains(t, details, "could not be stat")
+}
+
+func TestBuildLocalDockerUnavailableDetails_ConnectionRefused(t *testing.T) {
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.ECONNREFUSED}
+ details := buildLocalDockerUnavailableDetails(err, "unix:///var/run/docker.sock")
+
+ assert.Contains(t, details, "not accepting connections")
+}
+
+func TestBuildLocalDockerUnavailableDetails_GenericError(t *testing.T) {
+ err := errors.New("some unknown docker error")
+ details := buildLocalDockerUnavailableDetails(err, "unix:///var/run/docker.sock")
+
+ assert.Contains(t, details, "Cannot connect")
+ assert.Contains(t, details, "uid=")
+ assert.Contains(t, details, "gid=")
+}
+
+// ===== Additional coverage for uncovered paths =====
+
+func TestDockerUnavailableError_NilDetails(t *testing.T) {
+ var nilErr *DockerUnavailableError
+ assert.Equal(t, "", nilErr.Details())
+}
+
+func TestExtractErrno_UrlErrorWrapping(t *testing.T) {
+ urlErr := &url.Error{Op: "dial", URL: "unix:///var/run/docker.sock", Err: syscall.EACCES}
+ errno, ok := extractErrno(urlErr)
+ assert.True(t, ok)
+ assert.Equal(t, syscall.EACCES, errno)
+}
+
+func TestExtractErrno_SyscallError(t *testing.T) {
+ scErr := &os.SyscallError{Syscall: "connect", Err: syscall.ECONNREFUSED}
+ errno, ok := extractErrno(scErr)
+ assert.True(t, ok)
+ assert.Equal(t, syscall.ECONNREFUSED, errno)
+}
+
+func TestExtractErrno_NilError(t *testing.T) {
+ _, ok := extractErrno(nil)
+ assert.False(t, ok)
+}
+
+func TestExtractErrno_NonSyscallError(t *testing.T) {
+ _, ok := extractErrno(errors.New("some generic error"))
+ assert.False(t, ok)
+}
+
+func TestExtractErrno_OpErrorWrapping(t *testing.T) {
+ opErr := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EPERM}
+ errno, ok := extractErrno(opErr)
+ assert.True(t, ok)
+ assert.Equal(t, syscall.EPERM, errno)
+}
+
+func TestExtractErrno_NestedUrlSyscallOpError(t *testing.T) {
+ innerErr := &net.OpError{
+ Op: "dial",
+ Net: "unix",
+ Err: &os.SyscallError{Syscall: "connect", Err: syscall.EACCES},
+ }
+ urlErr := &url.Error{Op: "Get", URL: "unix:///var/run/docker.sock", Err: innerErr}
+ errno, ok := extractErrno(urlErr)
+ assert.True(t, ok)
+ assert.Equal(t, syscall.EACCES, errno)
+}
+
+func TestSocketPathFromDockerHost(t *testing.T) {
+ tests := []struct {
+ name string
+ host string
+ expected string
+ }{
+ {"unix socket", "unix:///var/run/docker.sock", "/var/run/docker.sock"},
+ {"tcp host", "tcp://192.168.1.1:2375", ""},
+ {"empty", "", ""},
+ {"whitespace unix", " unix:///tmp/docker.sock ", "/tmp/docker.sock"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := socketPathFromDockerHost(tt.host)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
+
+func TestBuildLocalDockerUnavailableDetails_OsErrNotExist(t *testing.T) {
+ err := fmt.Errorf("wrapped: %w", os.ErrNotExist)
+ details := buildLocalDockerUnavailableDetails(err, "unix:///var/run/docker.sock")
+ assert.Contains(t, details, "not found")
+ assert.Contains(t, details, "/var/run/docker.sock")
+}
+
+func TestBuildLocalDockerUnavailableDetails_NonUnixHost(t *testing.T) {
+ err := errors.New("cannot connect")
+ details := buildLocalDockerUnavailableDetails(err, "tcp://192.168.1.1:2375")
+ assert.Contains(t, details, "Cannot connect")
+ assert.Contains(t, details, "tcp://192.168.1.1:2375")
+}
+
+func TestBuildLocalDockerUnavailableDetails_EPERMWithStatFail(t *testing.T) {
+ err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EPERM}
+ details := buildLocalDockerUnavailableDetails(err, "unix:///tmp/nonexistent-eperm.sock")
+ assert.Contains(t, details, "not accessible")
+ assert.Contains(t, details, "could not be stat")
+}
diff --git a/backend/internal/services/enhanced_security_notification_service.go b/backend/internal/services/enhanced_security_notification_service.go
index 9754aef6..a6495d2d 100644
--- a/backend/internal/services/enhanced_security_notification_service.go
+++ b/backend/internal/services/enhanced_security_notification_service.go
@@ -394,8 +394,8 @@ func (s *EnhancedSecurityNotificationService) MigrateFromLegacyConfig() error {
NotifySecurityRateLimitHits: legacyConfig.NotifyRateLimitHits,
URL: legacyConfig.WebhookURL,
}
- if err := tx.Create(&provider).Error; err != nil {
- return fmt.Errorf("create managed provider: %w", err)
+ if createErr := tx.Create(&provider).Error; createErr != nil {
+ return fmt.Errorf("create managed provider: %w", createErr)
}
} else if err != nil {
return fmt.Errorf("query managed provider: %w", err)
@@ -405,8 +405,8 @@ func (s *EnhancedSecurityNotificationService) MigrateFromLegacyConfig() error {
provider.NotifySecurityACLDenies = legacyConfig.NotifyACLDenies
provider.NotifySecurityRateLimitHits = legacyConfig.NotifyRateLimitHits
provider.URL = legacyConfig.WebhookURL
- if err := tx.Save(&provider).Error; err != nil {
- return fmt.Errorf("update managed provider: %w", err)
+ if saveErr := tx.Save(&provider).Error; saveErr != nil {
+ return fmt.Errorf("update managed provider: %w", saveErr)
}
}
@@ -430,7 +430,7 @@ func (s *EnhancedSecurityNotificationService) MigrateFromLegacyConfig() error {
}
// Upsert marker
- if err := tx.Where("key = ?", newMarkerSetting.Key).First(&markerSetting).Error; err == gorm.ErrRecordNotFound {
+ if queryErr := tx.Where("key = ?", newMarkerSetting.Key).First(&markerSetting).Error; queryErr == gorm.ErrRecordNotFound {
return tx.Create(&newMarkerSetting).Error
}
newMarkerSetting.ID = markerSetting.ID
diff --git a/backend/internal/services/enhanced_security_notification_service_discord_only_test.go b/backend/internal/services/enhanced_security_notification_service_discord_only_test.go
index 6a5611ce..a05230f4 100644
--- a/backend/internal/services/enhanced_security_notification_service_discord_only_test.go
+++ b/backend/internal/services/enhanced_security_notification_service_discord_only_test.go
@@ -60,8 +60,8 @@ func TestDiscordOnly_DispatchToProviderAcceptsDiscord(t *testing.T) {
// Verify payload structure
var payload models.SecurityEvent
- err := json.NewDecoder(r.Body).Decode(&payload)
- assert.NoError(t, err)
+ decodeErr := json.NewDecoder(r.Body).Decode(&payload)
+ assert.NoError(t, decodeErr)
assert.Equal(t, "waf_block", payload.EventType)
w.WriteHeader(http.StatusOK)
diff --git a/backend/internal/services/mail_service_test.go b/backend/internal/services/mail_service_test.go
index b1d04f13..c2e072b5 100644
--- a/backend/internal/services/mail_service_test.go
+++ b/backend/internal/services/mail_service_test.go
@@ -1141,7 +1141,7 @@ func newTestTLSConfig(t *testing.T) (*tls.Config, []byte) {
return &tls.Config{Certificates: []tls.Certificate{cert}, MinVersion: tls.VersionTLS12}, caPEM
}
-func trustTestCertificate(t *testing.T, certPEM []byte) {
+func trustTestCertificate(t *testing.T, _ []byte) {
t.Helper()
// SSL_CERT_FILE is already set globally by TestMain.
// This function kept for API compatibility but no longer needs to set environment.
diff --git a/backend/internal/services/notification_service.go b/backend/internal/services/notification_service.go
index d4a824ad..f6b84544 100644
--- a/backend/internal/services/notification_service.go
+++ b/backend/internal/services/notification_service.go
@@ -16,6 +16,7 @@ import (
"github.com/Wikid82/charon/backend/internal/logger"
"github.com/Wikid82/charon/backend/internal/network"
+ "github.com/Wikid82/charon/backend/internal/notifications"
"github.com/Wikid82/charon/backend/internal/security"
"github.com/Wikid82/charon/backend/internal/trace"
@@ -25,11 +26,15 @@ import (
)
type NotificationService struct {
- DB *gorm.DB
+ DB *gorm.DB
+ httpWrapper *notifications.HTTPWrapper
}
func NewNotificationService(db *gorm.DB) *NotificationService {
- return &NotificationService{DB: db}
+ return &NotificationService{
+ DB: db,
+ httpWrapper: notifications.NewNotifyHTTPWrapper(),
+ }
}
var discordWebhookRegex = regexp.MustCompile(`^https://discord(?:app)?\.com/api/webhooks/(\d+)/([a-zA-Z0-9_-]+)`)
@@ -98,15 +103,46 @@ func validateDiscordProviderURL(providerType, rawURL string) error {
// supportsJSONTemplates returns true if the provider type can use JSON templates
func supportsJSONTemplates(providerType string) bool {
switch strings.ToLower(providerType) {
- case "webhook", "discord", "slack", "gotify", "generic":
+ case "webhook", "discord", "gotify", "slack", "generic":
return true
- case "telegram":
- return false // Telegram uses URL parameters
default:
return false
}
}
+func isSupportedNotificationProviderType(providerType string) bool {
+ switch strings.ToLower(strings.TrimSpace(providerType)) {
+ case "discord", "gotify", "webhook":
+ return true
+ default:
+ return false
+ }
+}
+
+func (s *NotificationService) isDispatchEnabled(providerType string) bool {
+ switch strings.ToLower(strings.TrimSpace(providerType)) {
+ case "discord":
+ return true
+ case "gotify":
+ return s.getFeatureFlagValue(notifications.FlagGotifyServiceEnabled, true)
+ case "webhook":
+ return s.getFeatureFlagValue(notifications.FlagWebhookServiceEnabled, true)
+ default:
+ return false
+ }
+}
+
+func (s *NotificationService) getFeatureFlagValue(key string, fallback bool) bool {
+ var setting models.Setting
+ err := s.DB.Where("key = ?", key).First(&setting).Error
+ if err != nil {
+ return fallback
+ }
+
+ v := strings.ToLower(strings.TrimSpace(setting.Value))
+ return v == "1" || v == "true" || v == "yes"
+}
+
// Internal Notifications (DB)
func (s *NotificationService) Create(nType models.NotificationType, title, message string) (*models.Notification, error) {
@@ -188,11 +224,10 @@ func (s *NotificationService) SendExternal(ctx context.Context, eventType, title
if !shouldSend {
continue
}
- // Non-dispatch policy for deprecated providers
- if provider.Type != "discord" {
+ if !s.isDispatchEnabled(provider.Type) {
logger.Log().WithField("provider", util.SanitizeForLog(provider.Name)).
WithField("type", provider.Type).
- Warn("Skipping dispatch to deprecated non-discord provider")
+ Warn("Skipping dispatch because provider type is disabled for notify dispatch")
continue
}
go func(p models.NotificationProvider) {
@@ -253,31 +288,15 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti
return fmt.Errorf("template size exceeds maximum limit of %d bytes", maxTemplateSize)
}
- // Validate webhook URL using the security package's SSRF-safe validator.
- // ValidateExternalURL performs comprehensive validation including:
- // - URL format and scheme validation (http/https only)
- // - DNS resolution and IP blocking for private/reserved ranges
- // - Protection against cloud metadata endpoints (169.254.169.254)
- // Using the security package's function helps CodeQL recognize the sanitization.
- //
- // Additionally, we apply `isValidRedirectURL` as a barrier-guard style predicate.
- // CodeQL recognizes this pattern as a sanitizer for untrusted URL values, while
- // the real SSRF protection remains `security.ValidateExternalURL`.
- if err := validateDiscordProviderURLFunc(p.Type, p.URL); err != nil {
- return err
- }
+ providerType := strings.ToLower(strings.TrimSpace(p.Type))
+ if providerType == "discord" {
+ if err := validateDiscordProviderURLFunc(p.Type, p.URL); err != nil {
+ return err
+ }
- webhookURL := p.URL
-
- if !isValidRedirectURL(webhookURL) {
- return fmt.Errorf("invalid webhook url")
- }
- validatedURLStr, err := security.ValidateExternalURL(webhookURL,
- security.WithAllowHTTP(), // Allow both http and https for webhooks
- security.WithAllowLocalhost(), // Allow localhost for testing
- )
- if err != nil {
- return fmt.Errorf("invalid webhook url: %w", err)
+ if !isValidRedirectURL(p.URL) {
+ return fmt.Errorf("invalid webhook url")
+ }
}
// Parse template and add helper funcs
@@ -348,11 +367,43 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti
}
}
- // Send Request with a safe client (SSRF protection, timeout, no auto-redirect)
- // Using network.NewSafeHTTPClient() for defense-in-depth against SSRF attacks.
+ if providerType == "gotify" || providerType == "webhook" {
+ headers := map[string]string{
+ "Content-Type": "application/json",
+ "User-Agent": "Charon-Notify/1.0",
+ }
+ if rid := ctx.Value(trace.RequestIDKey); rid != nil {
+ if ridStr, ok := rid.(string); ok {
+ headers["X-Request-ID"] = ridStr
+ }
+ }
+ if providerType == "gotify" {
+ if strings.TrimSpace(p.Token) != "" {
+ headers["X-Gotify-Key"] = strings.TrimSpace(p.Token)
+ }
+ }
+
+ if _, sendErr := s.httpWrapper.Send(ctx, notifications.HTTPWrapperRequest{
+ URL: p.URL,
+ Headers: headers,
+ Body: body.Bytes(),
+ }); sendErr != nil {
+ return fmt.Errorf("failed to send webhook: %w", sendErr)
+ }
+ return nil
+ }
+
+ validatedURLStr, err := security.ValidateExternalURL(p.URL,
+ security.WithAllowHTTP(),
+ security.WithAllowLocalhost(),
+ )
+ if err != nil {
+ return fmt.Errorf("invalid webhook url: %w", err)
+ }
+
client := network.NewSafeHTTPClient(
network.WithTimeout(10*time.Second),
- network.WithAllowLocalhost(), // Allow localhost for testing
+ network.WithAllowLocalhost(),
)
req, err := http.NewRequestWithContext(ctx, "POST", validatedURLStr, &body)
@@ -360,20 +411,12 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti
return fmt.Errorf("failed to create webhook request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
- // Propagate request id header if present in context
if rid := ctx.Value(trace.RequestIDKey); rid != nil {
if ridStr, ok := rid.(string); ok {
req.Header.Set("X-Request-ID", ridStr)
}
}
- // Safe: URL validated by security.ValidateExternalURL() which validates URL
- // format/scheme and blocks private/reserved destinations through DNS+dial-time checks.
- // Safe: URL validated by security.ValidateExternalURL() which:
- // 1. Validates URL format and scheme (HTTPS required in production)
- // 2. Resolves DNS and blocks private/reserved IPs (RFC 1918, loopback, link-local)
- // 3. Uses ssrfSafeDialer for connection-time IP revalidation (TOCTOU protection)
- // 4. No redirect following allowed
- // See: internal/security/url_validator.go
+
resp, err := webhookDoRequestFunc(client, req)
if err != nil {
return fmt.Errorf("failed to send webhook: %w", err)
@@ -411,17 +454,17 @@ func isValidRedirectURL(rawURL string) bool {
}
func (s *NotificationService) TestProvider(provider models.NotificationProvider) error {
- // Discord-only enforcement for this rollout
- if provider.Type != "discord" {
- return fmt.Errorf("only discord provider type is supported in this release")
+ providerType := strings.ToLower(strings.TrimSpace(provider.Type))
+ if !isSupportedNotificationProviderType(providerType) {
+ return fmt.Errorf("unsupported provider type: %s", providerType)
}
- if err := validateDiscordProviderURLFunc(provider.Type, provider.URL); err != nil {
+ if err := validateDiscordProviderURLFunc(providerType, provider.URL); err != nil {
return err
}
- if !supportsJSONTemplates(provider.Type) {
- return legacyFallbackInvocationError(provider.Type)
+ if !supportsJSONTemplates(providerType) {
+ return legacyFallbackInvocationError(providerType)
}
data := map[string]any{
@@ -523,15 +566,19 @@ func (s *NotificationService) ListProviders() ([]models.NotificationProvider, er
}
func (s *NotificationService) CreateProvider(provider *models.NotificationProvider) error {
- // Discord-only enforcement for this rollout
- if provider.Type != "discord" {
- return fmt.Errorf("only discord provider type is supported in this release")
+ provider.Type = strings.ToLower(strings.TrimSpace(provider.Type))
+ if !isSupportedNotificationProviderType(provider.Type) {
+ return fmt.Errorf("unsupported provider type")
}
if err := validateDiscordProviderURLFunc(provider.Type, provider.URL); err != nil {
return err
}
+ if provider.Type != "gotify" {
+ provider.Token = ""
+ }
+
// Validate custom template before creating
if strings.ToLower(strings.TrimSpace(provider.Template)) == "custom" && strings.TrimSpace(provider.Config) != "" {
// Provide a minimal preview payload
@@ -550,25 +597,28 @@ func (s *NotificationService) UpdateProvider(provider *models.NotificationProvid
return err
}
- // Block type mutation for non-Discord providers
- if existing.Type != "discord" && provider.Type != existing.Type {
- return fmt.Errorf("cannot change provider type for deprecated non-discord providers")
+ // Block type mutation for existing providers to avoid cross-provider token/schema confusion
+ if strings.TrimSpace(provider.Type) != "" && provider.Type != existing.Type {
+ return fmt.Errorf("cannot change provider type for existing providers")
}
+ provider.Type = existing.Type
- // Block enable mutation for non-Discord providers
- if existing.Type != "discord" && provider.Enabled && !existing.Enabled {
- return fmt.Errorf("cannot enable deprecated non-discord providers")
- }
-
- // Discord-only enforcement for type changes
- if provider.Type != "discord" {
- return fmt.Errorf("only discord provider type is supported in this release")
+ if !isSupportedNotificationProviderType(provider.Type) {
+ return fmt.Errorf("unsupported provider type")
}
if err := validateDiscordProviderURLFunc(provider.Type, provider.URL); err != nil {
return err
}
+ if provider.Type == "gotify" {
+ if strings.TrimSpace(provider.Token) == "" {
+ provider.Token = existing.Token
+ }
+ } else {
+ provider.Token = ""
+ }
+
// Validate custom template before saving
if strings.ToLower(strings.TrimSpace(provider.Template)) == "custom" && strings.TrimSpace(provider.Config) != "" {
payload := map[string]any{"Title": "Preview", "Message": "Preview", "Time": time.Now().Format(time.RFC3339), "EventType": "preview"}
@@ -581,6 +631,7 @@ func (s *NotificationService) UpdateProvider(provider *models.NotificationProvid
"name": provider.Name,
"type": provider.Type,
"url": provider.URL,
+ "token": provider.Token,
"config": provider.Config,
"template": provider.Template,
"enabled": provider.Enabled,
diff --git a/backend/internal/services/notification_service_discord_only_test.go b/backend/internal/services/notification_service_discord_only_test.go
index a5566db1..699ee1a7 100644
--- a/backend/internal/services/notification_service_discord_only_test.go
+++ b/backend/internal/services/notification_service_discord_only_test.go
@@ -2,6 +2,8 @@ package services
import (
"context"
+ "net/http"
+ "net/http/httptest"
"testing"
"time"
@@ -12,15 +14,15 @@ import (
"gorm.io/gorm"
)
-// TestDiscordOnly_CreateProviderRejectsNonDiscord tests service-level Discord-only enforcement for create.
-func TestDiscordOnly_CreateProviderRejectsNonDiscord(t *testing.T) {
+// TestDiscordOnly_CreateProviderRejectsUnsupported tests service-level provider allowlist for create.
+func TestDiscordOnly_CreateProviderRejectsUnsupported(t *testing.T) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
service := NewNotificationService(db)
- testCases := []string{"webhook", "slack", "gotify", "telegram", "generic"}
+ testCases := []string{"slack", "telegram", "generic", "email"}
for _, providerType := range testCases {
t.Run(providerType, func(t *testing.T) {
@@ -31,8 +33,8 @@ func TestDiscordOnly_CreateProviderRejectsNonDiscord(t *testing.T) {
}
err := service.CreateProvider(provider)
- assert.Error(t, err, "Should reject non-Discord provider")
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.Error(t, err, "Should reject unsupported provider")
+ assert.Contains(t, err.Error(), "unsupported provider type")
})
}
}
@@ -60,76 +62,81 @@ func TestDiscordOnly_CreateProviderAcceptsDiscord(t *testing.T) {
assert.Equal(t, "discord", created.Type)
}
-// TestDiscordOnly_UpdateProviderRejectsNonDiscord tests service-level Discord-only enforcement for update.
-func TestDiscordOnly_UpdateProviderRejectsNonDiscord(t *testing.T) {
+func TestDiscordOnly_CreateProviderAcceptsWebhook(t *testing.T) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
- // Create a deprecated webhook provider
- deprecatedProvider := models.NotificationProvider{
- ID: "test-id",
- Name: "Test Webhook",
- Type: "webhook",
- URL: "https://example.com/webhook",
- MigrationState: "deprecated",
- }
- require.NoError(t, db.Create(&deprecatedProvider).Error)
-
service := NewNotificationService(db)
- // Try to update with webhook type
provider := &models.NotificationProvider{
- ID: "test-id",
- Name: "Updated",
+ Name: "Test Webhook",
Type: "webhook",
URL: "https://example.com/webhook",
}
- err = service.UpdateProvider(provider)
- assert.Error(t, err, "Should reject non-Discord provider update")
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ err = service.CreateProvider(provider)
+ assert.NoError(t, err, "Should accept webhook provider")
}
-// TestDiscordOnly_UpdateProviderRejectsTypeMutation tests that service blocks type mutation for deprecated providers.
+func TestDiscordOnly_CreateProviderAcceptsGotifyWithOrWithoutToken(t *testing.T) {
+ db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
+
+ service := NewNotificationService(db)
+
+ provider := &models.NotificationProvider{
+ Name: "Test Gotify",
+ Type: "gotify",
+ URL: "https://gotify.example.com/message",
+ }
+
+ err = service.CreateProvider(provider)
+ assert.NoError(t, err)
+
+ provider.ID = ""
+ provider.Token = "secret"
+ err = service.CreateProvider(provider)
+ assert.NoError(t, err)
+}
+
+// TestDiscordOnly_UpdateProviderRejectsTypeMutation tests immutable provider type on update.
func TestDiscordOnly_UpdateProviderRejectsTypeMutation(t *testing.T) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
- // Create a deprecated webhook provider
- deprecatedProvider := models.NotificationProvider{
+ provider := models.NotificationProvider{
ID: "test-id",
Name: "Test Webhook",
Type: "webhook",
URL: "https://example.com/webhook",
MigrationState: "deprecated",
}
- require.NoError(t, db.Create(&deprecatedProvider).Error)
+ require.NoError(t, db.Create(&provider).Error)
service := NewNotificationService(db)
- // Try to change type to discord
- provider := &models.NotificationProvider{
+ updatedProvider := &models.NotificationProvider{
ID: "test-id",
- Name: "Test Webhook",
+ Name: "Updated",
Type: "discord",
URL: "https://discord.com/api/webhooks/123/abc",
}
- err = service.UpdateProvider(provider)
+ err = service.UpdateProvider(updatedProvider)
assert.Error(t, err, "Should reject type mutation")
assert.Contains(t, err.Error(), "cannot change provider type")
}
-// TestDiscordOnly_UpdateProviderRejectsEnable tests that service blocks enabling deprecated providers.
-func TestDiscordOnly_UpdateProviderRejectsEnable(t *testing.T) {
+// TestDiscordOnly_UpdateProviderAllowsWebhookUpdates tests supported provider updates.
+func TestDiscordOnly_UpdateProviderAllowsWebhookUpdates(t *testing.T) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
- // Create a deprecated webhook provider (disabled)
- deprecatedProvider := models.NotificationProvider{
+ provider := models.NotificationProvider{
ID: "test-id",
Name: "Test Webhook",
Type: "webhook",
@@ -137,12 +144,11 @@ func TestDiscordOnly_UpdateProviderRejectsEnable(t *testing.T) {
Enabled: false,
MigrationState: "deprecated",
}
- require.NoError(t, db.Create(&deprecatedProvider).Error)
+ require.NoError(t, db.Create(&provider).Error)
service := NewNotificationService(db)
- // Try to enable
- provider := &models.NotificationProvider{
+ updatedProvider := &models.NotificationProvider{
ID: "test-id",
Name: "Test Webhook",
Type: "webhook",
@@ -150,28 +156,33 @@ func TestDiscordOnly_UpdateProviderRejectsEnable(t *testing.T) {
Enabled: true,
}
- err = service.UpdateProvider(provider)
- assert.Error(t, err, "Should reject enabling deprecated provider")
- assert.Contains(t, err.Error(), "cannot enable deprecated")
+ err = service.UpdateProvider(updatedProvider)
+ assert.NoError(t, err)
}
-// TestDiscordOnly_TestProviderRejectsNonDiscord tests that TestProvider enforces Discord-only.
-func TestDiscordOnly_TestProviderRejectsNonDiscord(t *testing.T) {
+// TestDiscordOnly_TestProviderAllowsWebhookWithoutFeatureFlag tests that webhook TestProvider
+// works without explicit feature flag (bypasses dispatch gate).
+func TestDiscordOnly_TestProviderAllowsWebhookWithoutFeatureFlag(t *testing.T) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
- require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}))
+ require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Setting{}))
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer ts.Close()
service := NewNotificationService(db)
provider := models.NotificationProvider{
- Name: "Test Webhook",
- Type: "webhook",
- URL: "https://example.com/webhook",
+ Name: "Test Webhook",
+ Type: "webhook",
+ URL: ts.URL + "/webhook",
+ Template: "minimal",
}
err = service.TestProvider(provider)
- assert.Error(t, err, "Should reject non-Discord provider test")
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.NoError(t, err)
}
// TestDiscordOnly_MigrationDeprecatesNonDiscord tests that migration marks non-Discord as deprecated.
diff --git a/backend/internal/services/notification_service_json_test.go b/backend/internal/services/notification_service_json_test.go
index 2b6e65e6..261895e3 100644
--- a/backend/internal/services/notification_service_json_test.go
+++ b/backend/internal/services/notification_service_json_test.go
@@ -231,6 +231,7 @@ func TestSendJSONPayload_Gotify(t *testing.T) {
provider := models.NotificationProvider{
Type: "gotify",
URL: server.URL,
+ Token: "test-token",
Template: "custom",
Config: `{"message": {{toJSON .Message}}, "title": {{toJSON .Title}}}`,
}
@@ -262,7 +263,7 @@ func TestSendJSONPayload_TemplateTimeout(t *testing.T) {
Type: "discord",
URL: "http://10.0.0.1:9999",
Template: "custom",
- Config: `{"data": {{toJSON .}}}`,
+ Config: `{"content": {{toJSON .Message}}, "data": {{toJSON .}}}`,
}
// Create data that will be processed
diff --git a/backend/internal/services/notification_service_test.go b/backend/internal/services/notification_service_test.go
index 84576104..47ecc412 100644
--- a/backend/internal/services/notification_service_test.go
+++ b/backend/internal/services/notification_service_test.go
@@ -528,17 +528,7 @@ func TestNotificationService_TestProvider_Errors(t *testing.T) {
}
err := svc.TestProvider(provider)
assert.Error(t, err)
- assert.Contains(t, err.Error(), "only discord provider type is supported")
- })
-
- t.Run("webhook type not supported", func(t *testing.T) {
- provider := models.NotificationProvider{
- Type: "webhook",
- URL: "https://example.com/webhook",
- }
- err := svc.TestProvider(provider)
- assert.Error(t, err)
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.Contains(t, err.Error(), "unsupported provider type")
})
t.Run("discord with invalid URL format", func(t *testing.T) {
@@ -557,7 +547,7 @@ func TestNotificationService_TestProvider_Errors(t *testing.T) {
}
err := svc.TestProvider(provider)
assert.Error(t, err)
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.Contains(t, err.Error(), "unsupported provider type")
})
t.Run("webhook success", func(t *testing.T) {
@@ -663,7 +653,7 @@ func TestSSRF_WebhookIntegration(t *testing.T) {
data := map[string]any{"Title": "Test", "Message": "Test Message"}
err := svc.sendJSONPayload(context.Background(), provider, data)
assert.Error(t, err)
- assert.Contains(t, err.Error(), "invalid webhook url")
+ assert.Contains(t, err.Error(), "destination URL validation failed")
})
t.Run("blocks cloud metadata endpoint", func(t *testing.T) {
@@ -674,7 +664,7 @@ func TestSSRF_WebhookIntegration(t *testing.T) {
data := map[string]any{"Title": "Test", "Message": "Test Message"}
err := svc.sendJSONPayload(context.Background(), provider, data)
assert.Error(t, err)
- assert.Contains(t, err.Error(), "invalid webhook url")
+ assert.Contains(t, err.Error(), "destination URL validation failed")
})
t.Run("allows localhost for testing", func(t *testing.T) {
@@ -1795,13 +1785,13 @@ func TestLegacyFallbackInvocationError(t *testing.T) {
db := setupNotificationTestDB(t)
svc := NewNotificationService(db)
- // Test non-discord providers are rejected with discord-only error
+ // Test non-supported providers are rejected
err := svc.TestProvider(models.NotificationProvider{
Type: "telegram",
URL: "telegram://token@telegram?chats=1",
})
require.Error(t, err)
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.Contains(t, err.Error(), "unsupported provider type")
}
func TestLegacyFallbackInvocationError_DirectHelperAndHook(t *testing.T) {
@@ -1962,16 +1952,14 @@ func TestTestProvider_NotifyOnlyRejectsUnsupportedProvider(t *testing.T) {
db := setupNotificationTestDB(t)
svc := NewNotificationService(db)
- // Test non-discord providers are rejected
+ // Test truly unsupported providers are rejected
tests := []struct {
name string
providerType string
url string
}{
{"telegram", "telegram", "telegram://token@telegram?chats=123"},
- {"webhook", "webhook", "https://example.com/webhook"},
{"slack", "slack", "https://hooks.slack.com/services/T/B/X"},
- {"gotify", "gotify", "https://gotify.example.com/message"},
{"pushover", "pushover", "pushover://token@user"},
}
@@ -1985,7 +1973,7 @@ func TestTestProvider_NotifyOnlyRejectsUnsupportedProvider(t *testing.T) {
err := svc.TestProvider(provider)
require.Error(t, err)
- assert.Contains(t, err.Error(), "only discord provider type is supported")
+ assert.Contains(t, err.Error(), "unsupported provider type")
})
}
}
@@ -2444,3 +2432,185 @@ func TestNotificationService_EnsureNotifyOnlyProviderMigration_FailsClosed(t *te
// - No log-and-continue pattern present
// - Boot will treat migration incompleteness as failure
}
+
+func TestIsDispatchEnabled_GotifyDefaultTrue(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ // No feature flag row exists — should default to true
+ assert.True(t, svc.isDispatchEnabled("gotify"))
+}
+
+func TestIsDispatchEnabled_WebhookDefaultTrue(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ // No feature flag row exists — should default to true
+ assert.True(t, svc.isDispatchEnabled("webhook"))
+}
+
+func TestTestProvider_GotifyWorksWithoutFeatureFlag(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer ts.Close()
+
+ provider := models.NotificationProvider{
+ Type: "gotify",
+ URL: ts.URL + "/message",
+ Template: "minimal",
+ }
+
+ err := svc.TestProvider(provider)
+ assert.NoError(t, err)
+}
+
+func TestTestProvider_WebhookWorksWithoutFeatureFlag(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer ts.Close()
+
+ provider := models.NotificationProvider{
+ Type: "webhook",
+ URL: ts.URL + "/webhook",
+ Template: "minimal",
+ }
+
+ err := svc.TestProvider(provider)
+ assert.NoError(t, err)
+}
+
+func TestTestProvider_GotifyWorksWhenFlagExplicitlyFalse(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ // Explicitly set feature flag to false
+ db.Create(&models.Setting{Key: "feature.notifications.service.gotify.enabled", Value: "false"})
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer ts.Close()
+
+ provider := models.NotificationProvider{
+ Type: "gotify",
+ URL: ts.URL + "/message",
+ Template: "minimal",
+ }
+
+ // TestProvider bypasses the dispatch gate, so even with flag=false it should work
+ err := svc.TestProvider(provider)
+ assert.NoError(t, err)
+}
+
+func TestTestProvider_WebhookWorksWhenFlagExplicitlyFalse(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ _ = db.AutoMigrate(&models.Setting{})
+ svc := NewNotificationService(db)
+
+ // Explicitly set feature flag to false
+ db.Create(&models.Setting{Key: "feature.notifications.service.webhook.enabled", Value: "false"})
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ defer ts.Close()
+
+ provider := models.NotificationProvider{
+ Type: "webhook",
+ URL: ts.URL + "/webhook",
+ Template: "minimal",
+ }
+
+ // TestProvider bypasses the dispatch gate, so even with flag=false it should work
+ err := svc.TestProvider(provider)
+ assert.NoError(t, err)
+}
+
+func TestUpdateProvider_TypeMutationBlocked(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ svc := NewNotificationService(db)
+
+ existing := models.NotificationProvider{
+ ID: "prov-type-mut",
+ Type: "webhook",
+ Name: "Original",
+ URL: "https://example.com/hook",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ update := models.NotificationProvider{
+ ID: "prov-type-mut",
+ Type: "discord",
+ Name: "Changed",
+ URL: "https://discord.com/api/webhooks/123/abc",
+ }
+ err := svc.UpdateProvider(&update)
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "cannot change provider type")
+}
+
+func TestUpdateProvider_GotifyKeepsExistingToken(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ svc := NewNotificationService(db)
+
+ existing := models.NotificationProvider{
+ ID: "prov-gotify-token",
+ Type: "gotify",
+ Name: "My Gotify",
+ URL: "https://gotify.example.com",
+ Token: "original-secret-token",
+ }
+ require.NoError(t, db.Create(&existing).Error)
+
+ update := models.NotificationProvider{
+ ID: "prov-gotify-token",
+ Type: "gotify",
+ Name: "My Gotify Updated",
+ URL: "https://gotify.example.com",
+ Token: "",
+ }
+ err := svc.UpdateProvider(&update)
+ require.NoError(t, err)
+ assert.Equal(t, "original-secret-token", update.Token)
+}
+
+func TestGetFeatureFlagValue_FoundSetting(t *testing.T) {
+ db := setupNotificationTestDB(t)
+ require.NoError(t, db.AutoMigrate(&models.Setting{}))
+ svc := NewNotificationService(db)
+
+ tests := []struct {
+ name string
+ value string
+ expected bool
+ }{
+ {"true_string", "true", true},
+ {"yes_string", "yes", true},
+ {"one_string", "1", true},
+ {"false_string", "false", false},
+ {"no_string", "no", false},
+ {"zero_string", "0", false},
+ {"whitespace_true", " True ", true},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ db.Where("key = ?", "test.flag").Delete(&models.Setting{})
+ db.Create(&models.Setting{Key: "test.flag", Value: tt.value})
+ result := svc.getFeatureFlagValue("test.flag", false)
+ assert.Equal(t, tt.expected, result, "value=%q", tt.value)
+ })
+ }
+}
diff --git a/backend/internal/services/proxyhost_service.go b/backend/internal/services/proxyhost_service.go
index 98c419a6..ded58f08 100644
--- a/backend/internal/services/proxyhost_service.go
+++ b/backend/internal/services/proxyhost_service.go
@@ -227,7 +227,7 @@ func (s *ProxyHostService) GetByID(id uint) (*models.ProxyHost, error) {
// GetByUUID finds a proxy host by UUID.
func (s *ProxyHostService) GetByUUID(uuidStr string) (*models.ProxyHost, error) {
var host models.ProxyHost
- if err := s.db.Preload("Locations").Preload("Certificate").Preload("SecurityHeaderProfile").Where("uuid = ?", uuidStr).First(&host).Error; err != nil {
+ if err := s.db.Preload("Locations").Preload("Certificate").Preload("AccessList").Preload("SecurityHeaderProfile").Where("uuid = ?", uuidStr).First(&host).Error; err != nil {
return nil, err
}
return &host, nil
@@ -236,7 +236,7 @@ func (s *ProxyHostService) GetByUUID(uuidStr string) (*models.ProxyHost, error)
// List returns all proxy hosts.
func (s *ProxyHostService) List() ([]models.ProxyHost, error) {
var hosts []models.ProxyHost
- if err := s.db.Preload("Locations").Preload("Certificate").Preload("SecurityHeaderProfile").Order("updated_at desc").Find(&hosts).Error; err != nil {
+ if err := s.db.Preload("Locations").Preload("Certificate").Preload("AccessList").Preload("SecurityHeaderProfile").Order("updated_at desc").Find(&hosts).Error; err != nil {
return nil, err
}
return hosts, nil
diff --git a/backend/internal/services/uptime_service.go b/backend/internal/services/uptime_service.go
index 6da26b83..68c5628b 100644
--- a/backend/internal/services/uptime_service.go
+++ b/backend/internal/services/uptime_service.go
@@ -8,6 +8,7 @@ import (
"net"
"net/http"
"net/url"
+ "strconv"
"strings"
"sync"
"time"
@@ -372,12 +373,32 @@ func (s *UptimeService) CheckAll() {
// Check each host's monitors
for hostID, monitors := range hostMonitors {
- // If host is down, mark all monitors as down without individual checks
+ // If host is down, only short-circuit TCP monitors.
+ // HTTP/HTTPS monitors remain URL-truth authoritative and must still run checkMonitor.
if hostID != "" {
var uptimeHost models.UptimeHost
if err := s.DB.Where("id = ?", hostID).First(&uptimeHost).Error; err == nil {
if uptimeHost.Status == "down" {
- s.markHostMonitorsDown(monitors, &uptimeHost)
+ tcpMonitors := make([]models.UptimeMonitor, 0, len(monitors))
+ nonTCPMonitors := make([]models.UptimeMonitor, 0, len(monitors))
+
+ for _, monitor := range monitors {
+ normalizedType := strings.ToLower(strings.TrimSpace(monitor.Type))
+ if normalizedType == "tcp" {
+ tcpMonitors = append(tcpMonitors, monitor)
+ continue
+ }
+ nonTCPMonitors = append(nonTCPMonitors, monitor)
+ }
+
+ if len(tcpMonitors) > 0 {
+ s.markHostMonitorsDown(tcpMonitors, &uptimeHost)
+ }
+
+ for _, monitor := range nonTCPMonitors {
+ go s.checkMonitor(monitor)
+ }
+
continue
}
}
@@ -1184,3 +1205,112 @@ func (s *UptimeService) DeleteMonitor(id string) error {
return nil
}
+
+// SyncAndCheckForHost creates a monitor for the given proxy host (if one
+// doesn't already exist) and immediately triggers a health check in a
+// background goroutine. It is safe to call from any goroutine.
+//
+// Designed to be called as `go svc.SyncAndCheckForHost(hostID)` so it
+// does not block the API response.
+func (s *UptimeService) SyncAndCheckForHost(hostID uint) {
+ // Check feature flag — bail if uptime is disabled
+ var setting models.Setting
+ if err := s.DB.Where("key = ?", "feature.uptime.enabled").First(&setting).Error; err == nil {
+ if setting.Value != "true" {
+ return
+ }
+ }
+
+ // Per-host lock prevents duplicate monitors when multiple goroutines
+ // call SyncAndCheckForHost for the same hostID concurrently.
+ hostKey := fmt.Sprintf("proxy-%d", hostID)
+ s.hostMutexLock.Lock()
+ if s.hostMutexes[hostKey] == nil {
+ s.hostMutexes[hostKey] = &sync.Mutex{}
+ }
+ mu := s.hostMutexes[hostKey]
+ s.hostMutexLock.Unlock()
+
+ mu.Lock()
+ defer mu.Unlock()
+
+ // Look up the proxy host; it may have been deleted between the API
+ // response and this goroutine executing.
+ var host models.ProxyHost
+ if err := s.DB.Where("id = ?", hostID).First(&host).Error; err != nil {
+ hostIDStr := strconv.FormatUint(uint64(hostID), 10)
+ logger.Log().WithField("host_id", hostIDStr).Debug("SyncAndCheckForHost: proxy host not found (may have been deleted)")
+ return
+ }
+
+ // Ensure a monitor exists for this host
+ var monitor models.UptimeMonitor
+ err := s.DB.Where("proxy_host_id = ?", host.ID).First(&monitor).Error
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ domains := strings.Split(host.DomainNames, ",")
+ firstDomain := ""
+ if len(domains) > 0 {
+ firstDomain = strings.TrimSpace(domains[0])
+ }
+
+ scheme := "http"
+ if host.SSLForced {
+ scheme = "https"
+ }
+ publicURL := fmt.Sprintf("%s://%s", scheme, firstDomain)
+ upstreamHost := host.ForwardHost
+
+ name := host.Name
+ if name == "" {
+ name = firstDomain
+ }
+
+ uptimeHostID := s.ensureUptimeHost(upstreamHost, name)
+
+ monitor = models.UptimeMonitor{
+ ProxyHostID: &host.ID,
+ UptimeHostID: &uptimeHostID,
+ Name: name,
+ Type: "http",
+ URL: publicURL,
+ UpstreamHost: upstreamHost,
+ Interval: 60,
+ Enabled: true,
+ Status: "pending",
+ }
+ if createErr := s.DB.Create(&monitor).Error; createErr != nil {
+ logger.Log().WithError(createErr).WithField("host_id", host.ID).Error("SyncAndCheckForHost: failed to create monitor")
+ return
+ }
+ } else if err != nil {
+ logger.Log().WithError(err).WithField("host_id", host.ID).Error("SyncAndCheckForHost: failed to query monitor")
+ return
+ }
+
+ // Run health check immediately
+ s.checkMonitor(monitor)
+}
+
+// CleanupStaleFailureCounts resets monitors that are stuck in "down" status
+// with elevated failure counts from historical bugs (e.g., port mismatch era).
+// Only resets monitors with no recent successful heartbeat in the last 24 hours.
+func (s *UptimeService) CleanupStaleFailureCounts() error {
+ result := s.DB.Exec(`
+ UPDATE uptime_monitors SET failure_count = 0, status = 'pending'
+ WHERE status = 'down'
+ AND failure_count > 5
+ AND id NOT IN (
+ SELECT DISTINCT monitor_id FROM uptime_heartbeats
+ WHERE status = 'up' AND created_at > datetime('now', '-24 hours')
+ )
+ `)
+ if result.Error != nil {
+ return fmt.Errorf("cleanup stale failure counts: %w", result.Error)
+ }
+
+ if result.RowsAffected > 0 {
+ logger.Log().WithField("reset_count", result.RowsAffected).Info("Reset stale monitor failure counts")
+ }
+
+ return nil
+}
diff --git a/backend/internal/services/uptime_service_pr1_test.go b/backend/internal/services/uptime_service_pr1_test.go
new file mode 100644
index 00000000..162077ff
--- /dev/null
+++ b/backend/internal/services/uptime_service_pr1_test.go
@@ -0,0 +1,522 @@
+package services
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/google/uuid"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gorm.io/driver/sqlite"
+ "gorm.io/gorm"
+
+ "github.com/Wikid82/charon/backend/internal/models"
+)
+
+// setupPR1TestDB creates an in-memory SQLite database with all models needed
+// for PR-1 uptime bug fix tests.
+func setupPR1TestDB(t *testing.T) *gorm.DB {
+ t.Helper()
+ dir := t.TempDir()
+ dbPath := filepath.Join(dir, "pr1test.db")
+ dsn := dbPath + "?_journal_mode=WAL&_busy_timeout=5000"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.UptimeMonitor{},
+ &models.UptimeHeartbeat{},
+ &models.UptimeHost{},
+ &models.ProxyHost{},
+ &models.Setting{},
+ ))
+
+ t.Cleanup(func() {
+ sqlDB, _ := db.DB()
+ if sqlDB != nil {
+ _ = sqlDB.Close()
+ }
+ })
+
+ return db
+}
+
+// enableUptimeFeature sets the feature.uptime.enabled setting to "true".
+func enableUptimeFeature(t *testing.T, db *gorm.DB) {
+ t.Helper()
+ require.NoError(t, db.Create(&models.Setting{
+ Key: "feature.uptime.enabled",
+ Value: "true",
+ Type: "bool",
+ Category: "feature",
+ }).Error)
+}
+
+// createTestProxyHost creates a minimal proxy host for testing.
+func createTestProxyHost(t *testing.T, db *gorm.DB, name, domain, forwardHost string) models.ProxyHost {
+ t.Helper()
+ host := models.ProxyHost{
+ UUID: uuid.New().String(),
+ Name: name,
+ DomainNames: domain,
+ ForwardScheme: "http",
+ ForwardHost: forwardHost,
+ ForwardPort: 80,
+ Enabled: true,
+ }
+ require.NoError(t, db.Create(&host).Error)
+ return host
+}
+
+func createAlwaysOKServer(t *testing.T) *httptest.Server {
+ t.Helper()
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+ t.Cleanup(server.Close)
+ return server
+}
+
+func hostPortFromServerURL(serverURL string) string {
+ return strings.TrimPrefix(serverURL, "http://")
+}
+
+// --- Fix 1: Singleton UptimeService ---
+
+func TestSingletonUptimeService_SharedState(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ // Verify both pendingNotifications and hostMutexes are the same instance
+ // by writing to the maps from the shared reference.
+ svc.pendingNotifications["test-key"] = &pendingHostNotification{}
+ assert.Contains(t, svc.pendingNotifications, "test-key",
+ "pendingNotifications should be shared on the same instance")
+
+ // A second reference to the same service should see the same map state.
+ svc2 := svc // simulate routes.go passing the same pointer
+ assert.Contains(t, svc2.pendingNotifications, "test-key",
+ "second reference must share the same pendingNotifications map")
+}
+
+// --- Fix 2: SyncAndCheckForHost ---
+
+func TestSyncAndCheckForHost_CreatesMonitorAndHeartbeat(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "test-host", domain, "192.168.1.100")
+
+ // Execute synchronously (normally called as goroutine)
+ svc.SyncAndCheckForHost(host.ID)
+
+ // Verify monitor was created
+ var monitor models.UptimeMonitor
+ err := db.Where("proxy_host_id = ?", host.ID).First(&monitor).Error
+ require.NoError(t, err, "monitor should be created for the proxy host")
+ assert.Equal(t, "http://"+domain, monitor.URL)
+ assert.Equal(t, "192.168.1.100", monitor.UpstreamHost)
+ assert.Contains(t, []string{"up", "down", "pending"}, monitor.Status, "status should be set by checkMonitor")
+
+ // Verify at least one heartbeat was created (from the immediate check)
+ var hbCount int64
+ db.Model(&models.UptimeHeartbeat{}).Where("monitor_id = ?", monitor.ID).Count(&hbCount)
+ assert.Greater(t, hbCount, int64(0), "at least one heartbeat should exist after SyncAndCheckForHost")
+}
+
+func TestSyncAndCheckForHost_SSLForcedUsesHTTPS(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := models.ProxyHost{
+ UUID: uuid.New().String(),
+ Name: "ssl-host",
+ DomainNames: domain,
+ ForwardScheme: "https",
+ ForwardHost: "192.168.1.200",
+ ForwardPort: 443,
+ SSLForced: true,
+ Enabled: true,
+ }
+ require.NoError(t, db.Create(&host).Error)
+
+ svc.SyncAndCheckForHost(host.ID)
+
+ var monitor models.UptimeMonitor
+ require.NoError(t, db.Where("proxy_host_id = ?", host.ID).First(&monitor).Error)
+ assert.Equal(t, "https://"+domain, monitor.URL)
+}
+
+func TestSyncAndCheckForHost_DeletedHostNoPanic(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+
+ // Call with a host ID that doesn't exist — should log and return, not panic
+ assert.NotPanics(t, func() {
+ svc.SyncAndCheckForHost(99999)
+ })
+
+ // No monitor should be created
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Count(&count)
+ assert.Equal(t, int64(0), count)
+}
+
+func TestSyncAndCheckForHost_ExistingMonitorSkipsCreate(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "existing-mon", domain, "10.0.0.1")
+
+ // Pre-create a monitor
+ existingMonitor := models.UptimeMonitor{
+ ID: uuid.New().String(),
+ ProxyHostID: &host.ID,
+ Name: "pre-existing",
+ Type: "http",
+ URL: "http://" + domain,
+ Interval: 60,
+ Enabled: true,
+ Status: "up",
+ }
+ require.NoError(t, db.Create(&existingMonitor).Error)
+
+ svc.SyncAndCheckForHost(host.ID)
+
+ // Should still be exactly 1 monitor
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", host.ID).Count(&count)
+ assert.Equal(t, int64(1), count, "should not create a duplicate monitor")
+}
+
+// --- Fix 2 continued: Feature flag test ---
+
+func TestSyncAndCheckForHost_DisabledFeatureNoop(t *testing.T) {
+ db := setupPR1TestDB(t)
+ // Explicitly set feature to disabled
+ require.NoError(t, db.Create(&models.Setting{
+ Key: "feature.uptime.enabled",
+ Value: "false",
+ Type: "bool",
+ Category: "feature",
+ }).Error)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "disabled-host", domain, "10.0.0.2")
+
+ svc.SyncAndCheckForHost(host.ID)
+
+ // No monitor should be created when feature is disabled
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", host.ID).Count(&count)
+ assert.Equal(t, int64(0), count, "no monitor should be created when feature is disabled")
+}
+
+func TestSyncAndCheckForHost_MissingSetting_StillCreates(t *testing.T) {
+ db := setupPR1TestDB(t)
+ // No setting at all — the method should proceed (default: enabled behavior)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "no-setting", domain, "10.0.0.3")
+
+ svc.SyncAndCheckForHost(host.ID)
+
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", host.ID).Count(&count)
+ assert.Greater(t, count, int64(0), "monitor should be created when setting is missing (default: enabled)")
+}
+
+func TestSyncAndCheckForHost_UsesDomainWhenHostNameMissing(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "", domain, "10.10.10.10")
+
+ svc.SyncAndCheckForHost(host.ID)
+
+ var monitor models.UptimeMonitor
+ require.NoError(t, db.Where("proxy_host_id = ?", host.ID).First(&monitor).Error)
+ assert.Equal(t, domain, monitor.Name)
+}
+
+func TestSyncAndCheckForHost_CreateMonitorError_ReturnsWithoutPanic(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "create-error-host", domain, "10.10.10.11")
+
+ callbackName := "test:force_uptime_monitor_create_error"
+ require.NoError(t, db.Callback().Create().Before("gorm:create").Register(callbackName, func(tx *gorm.DB) {
+ if tx.Statement != nil && tx.Statement.Schema != nil && tx.Statement.Schema.Name == "UptimeMonitor" {
+ _ = tx.AddError(errors.New("forced uptime monitor create error"))
+ }
+ }))
+ t.Cleanup(func() {
+ _ = db.Callback().Create().Remove(callbackName)
+ })
+
+ assert.NotPanics(t, func() {
+ svc.SyncAndCheckForHost(host.ID)
+ })
+
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", host.ID).Count(&count)
+ assert.Equal(t, int64(0), count)
+}
+
+func TestSyncAndCheckForHost_QueryMonitorError_ReturnsWithoutPanic(t *testing.T) {
+ db := setupPR1TestDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ host := createTestProxyHost(t, db, "query-error-host", "query-error.example.com", "10.10.10.12")
+
+ require.NoError(t, db.Migrator().DropTable(&models.UptimeMonitor{}))
+
+ assert.NotPanics(t, func() {
+ svc.SyncAndCheckForHost(host.ID)
+ })
+}
+
+// --- Fix 4: CleanupStaleFailureCounts ---
+
+func TestCleanupStaleFailureCounts_ResetsStuckMonitors(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ // Create a "stuck" monitor: down, failure_count > 5, no recent UP heartbeat
+ stuckMonitor := models.UptimeMonitor{
+ ID: uuid.New().String(),
+ Name: "stuck-monitor",
+ Type: "http",
+ URL: "http://stuck.example.com",
+ Interval: 60,
+ Enabled: true,
+ Status: "down",
+ FailureCount: 10,
+ }
+ require.NoError(t, db.Create(&stuckMonitor).Error)
+
+ err := svc.CleanupStaleFailureCounts()
+ require.NoError(t, err)
+
+ // Verify the monitor was reset
+ var m models.UptimeMonitor
+ require.NoError(t, db.First(&m, "id = ?", stuckMonitor.ID).Error)
+ assert.Equal(t, 0, m.FailureCount, "failure_count should be reset to 0")
+ assert.Equal(t, "pending", m.Status, "status should be reset to pending")
+}
+
+func TestCleanupStaleFailureCounts_SkipsMonitorsWithRecentUpHeartbeat(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ // Create a monitor that is "down" with high failure_count BUT has a recent UP heartbeat
+ healthyMonitor := models.UptimeMonitor{
+ ID: uuid.New().String(),
+ Name: "healthy-monitor",
+ Type: "http",
+ URL: "http://healthy.example.com",
+ Interval: 60,
+ Enabled: true,
+ Status: "down",
+ FailureCount: 10,
+ }
+ require.NoError(t, db.Create(&healthyMonitor).Error)
+
+ // Add a recent UP heartbeat
+ hb := models.UptimeHeartbeat{
+ MonitorID: healthyMonitor.ID,
+ Status: "up",
+ Latency: 50,
+ CreatedAt: time.Now().Add(-1 * time.Hour), // 1 hour ago — within 24h window
+ }
+ require.NoError(t, db.Create(&hb).Error)
+
+ err := svc.CleanupStaleFailureCounts()
+ require.NoError(t, err)
+
+ // Monitor should NOT be reset because it has a recent UP heartbeat
+ var m models.UptimeMonitor
+ require.NoError(t, db.First(&m, "id = ?", healthyMonitor.ID).Error)
+ assert.Equal(t, 10, m.FailureCount, "failure_count should NOT be reset since there's a recent UP heartbeat")
+ assert.Equal(t, "down", m.Status, "status should remain down")
+}
+
+func TestCleanupStaleFailureCounts_SkipsLowFailureCount(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ // Monitor with failure_count <= 5 — should not be touched
+ monitor := models.UptimeMonitor{
+ ID: uuid.New().String(),
+ Name: "low-failure-monitor",
+ Type: "http",
+ URL: "http://low.example.com",
+ Interval: 60,
+ Enabled: true,
+ Status: "down",
+ FailureCount: 3,
+ }
+ require.NoError(t, db.Create(&monitor).Error)
+
+ err := svc.CleanupStaleFailureCounts()
+ require.NoError(t, err)
+
+ var m models.UptimeMonitor
+ require.NoError(t, db.First(&m, "id = ?", monitor.ID).Error)
+ assert.Equal(t, 3, m.FailureCount, "low failure_count should not be reset")
+ assert.Equal(t, "down", m.Status)
+}
+
+func TestCleanupStaleFailureCounts_DoesNotResetDownHosts(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ // Create a host that is currently down.
+ host := models.UptimeHost{
+ ID: uuid.New().String(),
+ Host: "stuck-host.local",
+ Name: "stuck-host",
+ Status: "down",
+ FailureCount: 10,
+ }
+ require.NoError(t, db.Create(&host).Error)
+
+ err := svc.CleanupStaleFailureCounts()
+ require.NoError(t, err)
+
+ var h models.UptimeHost
+ require.NoError(t, db.First(&h, "id = ?", host.ID).Error)
+ assert.Equal(t, 10, h.FailureCount, "cleanup must not reset host failure_count")
+ assert.Equal(t, "down", h.Status, "cleanup must not reset host status")
+}
+
+func TestCleanupStaleFailureCounts_ReturnsErrorWhenDatabaseUnavailable(t *testing.T) {
+ db := setupPR1TestDB(t)
+ svc := NewUptimeService(db, nil)
+
+ sqlDB, err := db.DB()
+ require.NoError(t, err)
+ require.NoError(t, sqlDB.Close())
+
+ err = svc.CleanupStaleFailureCounts()
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "cleanup stale failure counts")
+}
+
+// setupPR1ConcurrentDB creates a file-based SQLite database with WAL mode and
+// busy_timeout to handle concurrent writes without "database table is locked".
+func setupPR1ConcurrentDB(t *testing.T) *gorm.DB {
+ t.Helper()
+ dir := t.TempDir()
+ dbPath := filepath.Join(dir, "test.db")
+ dsn := dbPath + "?_journal_mode=WAL&_busy_timeout=5000"
+ db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
+ require.NoError(t, err)
+ require.NoError(t, db.AutoMigrate(
+ &models.UptimeMonitor{},
+ &models.UptimeHeartbeat{},
+ &models.UptimeHost{},
+ &models.ProxyHost{},
+ &models.Setting{},
+ ))
+
+ t.Cleanup(func() {
+ sqlDB, _ := db.DB()
+ if sqlDB != nil {
+ _ = sqlDB.Close()
+ }
+ _ = os.Remove(dbPath)
+ })
+
+ return db
+}
+
+// --- Concurrent access tests ---
+
+func TestSyncAndCheckForHost_ConcurrentCreates_NoDuplicates(t *testing.T) {
+ db := setupPR1ConcurrentDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ // Create multiple proxy hosts with unique domains
+ hosts := make([]models.ProxyHost, 5)
+ for i := range hosts {
+ hosts[i] = createTestProxyHost(t, db,
+ fmt.Sprintf("concurrent-host-%d", i),
+ domain,
+ fmt.Sprintf("10.0.0.%d", 100+i),
+ )
+ }
+
+ var wg sync.WaitGroup
+ for _, h := range hosts {
+ wg.Add(1)
+ go func(hostID uint) {
+ defer wg.Done()
+ svc.SyncAndCheckForHost(hostID)
+ }(h.ID)
+ }
+ wg.Wait()
+
+ // Each host should have exactly 1 monitor
+ for _, h := range hosts {
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", h.ID).Count(&count)
+ assert.Equal(t, int64(1), count, "each proxy host should have exactly 1 monitor")
+ }
+}
+
+func TestSyncAndCheckForHost_ConcurrentSameHost_NoDuplicates(t *testing.T) {
+ db := setupPR1ConcurrentDB(t)
+ enableUptimeFeature(t, db)
+ svc := NewUptimeService(db, nil)
+ server := createAlwaysOKServer(t)
+ domain := hostPortFromServerURL(server.URL)
+
+ host := createTestProxyHost(t, db, "race-host", domain, "10.0.0.200")
+
+ var wg sync.WaitGroup
+ for i := 0; i < 10; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ svc.SyncAndCheckForHost(host.ID)
+ }()
+ }
+ wg.Wait()
+
+ // Should still be exactly 1 monitor even after 10 concurrent calls
+ var count int64
+ db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", host.ID).Count(&count)
+ assert.Equal(t, int64(1), count, "concurrent SyncAndCheckForHost should not create duplicates")
+}
diff --git a/backend/internal/services/uptime_service_test.go b/backend/internal/services/uptime_service_test.go
index d9fc526a..e5480ce1 100644
--- a/backend/internal/services/uptime_service_test.go
+++ b/backend/internal/services/uptime_service_test.go
@@ -820,6 +820,277 @@ func TestUptimeService_CheckAll_Errors(t *testing.T) {
})
}
+func TestUptimeService_CheckAll_HostDown_PartitionsByMonitorType(t *testing.T) {
+ db := setupUptimeTestDB(t)
+ ns := NewNotificationService(db)
+ us := newTestUptimeService(t, db, ns)
+
+ us.config.TCPTimeout = 50 * time.Millisecond
+ us.config.MaxRetries = 0
+ us.config.FailureThreshold = 1
+ us.config.CheckTimeout = 2 * time.Second
+
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ assert.NoError(t, err)
+ addr := listener.Addr().(*net.TCPAddr)
+
+ server := &http.Server{
+ Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }),
+ ReadHeaderTimeout: 10 * time.Second,
+ }
+ go func() { _ = server.Serve(listener) }()
+ t.Cleanup(func() {
+ _ = server.Close()
+ _ = listener.Close()
+ })
+
+ closedListener, err := net.Listen("tcp", "127.0.0.1:0")
+ assert.NoError(t, err)
+ closedPort := closedListener.Addr().(*net.TCPAddr).Port
+ _ = closedListener.Close()
+
+ uptimeHost := models.UptimeHost{
+ Host: "127.0.0.2",
+ Name: "Down Host",
+ Status: "pending",
+ }
+ err = db.Create(&uptimeHost).Error
+ assert.NoError(t, err)
+
+ hostID := uptimeHost.ID
+ httpMonitor := models.UptimeMonitor{
+ ID: "hostdown-http-monitor",
+ Name: "HTTP Monitor",
+ Type: "http",
+ URL: fmt.Sprintf("http://127.0.0.1:%d", addr.Port),
+ Enabled: true,
+ Status: "pending",
+ UptimeHostID: &hostID,
+ MaxRetries: 1,
+ }
+ tcpMonitor := models.UptimeMonitor{
+ ID: "hostdown-tcp-monitor",
+ Name: "TCP Monitor",
+ Type: "tcp",
+ URL: fmt.Sprintf("127.0.0.2:%d", closedPort),
+ Enabled: true,
+ Status: "up",
+ UptimeHostID: &hostID,
+ MaxRetries: 1,
+ }
+ err = db.Create(&httpMonitor).Error
+ assert.NoError(t, err)
+ err = db.Create(&tcpMonitor).Error
+ assert.NoError(t, err)
+
+ us.CheckAll()
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeHost
+ if db.Where("id = ?", uptimeHost.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "down"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeMonitor
+ if db.Where("id = ?", httpMonitor.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "up"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeMonitor
+ if db.Where("id = ?", tcpMonitor.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "down"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ var httpHeartbeat models.UptimeHeartbeat
+ err = db.Where("monitor_id = ?", httpMonitor.ID).Order("created_at desc").First(&httpHeartbeat).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "up", httpHeartbeat.Status)
+ assert.Contains(t, httpHeartbeat.Message, "HTTP 200")
+ assert.NotContains(t, httpHeartbeat.Message, "Host unreachable")
+
+ var tcpHeartbeat models.UptimeHeartbeat
+ err = db.Where("monitor_id = ?", tcpMonitor.ID).Order("created_at desc").First(&tcpHeartbeat).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "down", tcpHeartbeat.Status)
+ assert.Equal(t, "Host unreachable", tcpHeartbeat.Message)
+}
+
+func TestUptimeService_CheckAll_ManualScheduledParity_ForHTTPOnHostDown(t *testing.T) {
+ db := setupUptimeTestDB(t)
+ ns := NewNotificationService(db)
+ us := newTestUptimeService(t, db, ns)
+
+ us.config.TCPTimeout = 50 * time.Millisecond
+ us.config.MaxRetries = 0
+ us.config.FailureThreshold = 1
+ us.config.CheckTimeout = 2 * time.Second
+
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ assert.NoError(t, err)
+ addr := listener.Addr().(*net.TCPAddr)
+
+ server := &http.Server{
+ Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }),
+ ReadHeaderTimeout: 10 * time.Second,
+ }
+ go func() { _ = server.Serve(listener) }()
+ t.Cleanup(func() {
+ _ = server.Close()
+ _ = listener.Close()
+ })
+
+ uptimeHost := models.UptimeHost{
+ Host: "127.0.0.2",
+ Name: "Parity Host",
+ Status: "pending",
+ }
+ err = db.Create(&uptimeHost).Error
+ assert.NoError(t, err)
+
+ hostID := uptimeHost.ID
+ manualMonitor := models.UptimeMonitor{
+ ID: "manual-http-parity",
+ Name: "Manual HTTP",
+ Type: "http",
+ URL: fmt.Sprintf("http://127.0.0.1:%d", addr.Port),
+ Enabled: true,
+ Status: "pending",
+ UptimeHostID: &hostID,
+ MaxRetries: 1,
+ }
+ scheduledMonitor := models.UptimeMonitor{
+ ID: "scheduled-http-parity",
+ Name: "Scheduled HTTP",
+ Type: "http",
+ URL: fmt.Sprintf("http://127.0.0.1:%d", addr.Port),
+ Enabled: true,
+ Status: "pending",
+ UptimeHostID: &hostID,
+ MaxRetries: 1,
+ }
+ err = db.Create(&manualMonitor).Error
+ assert.NoError(t, err)
+ err = db.Create(&scheduledMonitor).Error
+ assert.NoError(t, err)
+
+ us.CheckMonitor(manualMonitor)
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeMonitor
+ if db.Where("id = ?", manualMonitor.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "up"
+ }, 2*time.Second, 25*time.Millisecond)
+
+ us.CheckAll()
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeMonitor
+ if db.Where("id = ?", scheduledMonitor.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "up"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ var manualResult models.UptimeMonitor
+ err = db.Where("id = ?", manualMonitor.ID).First(&manualResult).Error
+ assert.NoError(t, err)
+
+ var scheduledResult models.UptimeMonitor
+ err = db.Where("id = ?", scheduledMonitor.ID).First(&scheduledResult).Error
+ assert.NoError(t, err)
+
+ assert.Equal(t, "up", manualResult.Status)
+ assert.Equal(t, manualResult.Status, scheduledResult.Status)
+}
+
+func TestUptimeService_CheckAll_ReachableHost_StillUsesHTTPResult(t *testing.T) {
+ db := setupUptimeTestDB(t)
+ ns := NewNotificationService(db)
+ us := newTestUptimeService(t, db, ns)
+
+ us.config.TCPTimeout = 50 * time.Millisecond
+ us.config.MaxRetries = 0
+ us.config.FailureThreshold = 1
+ us.config.CheckTimeout = 2 * time.Second
+
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ assert.NoError(t, err)
+ addr := listener.Addr().(*net.TCPAddr)
+
+ server := &http.Server{
+ Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusInternalServerError)
+ }),
+ ReadHeaderTimeout: 10 * time.Second,
+ }
+ go func() { _ = server.Serve(listener) }()
+ t.Cleanup(func() {
+ _ = server.Close()
+ _ = listener.Close()
+ })
+
+ uptimeHost := models.UptimeHost{
+ Host: "127.0.0.1",
+ Name: "Reachable Host",
+ Status: "pending",
+ }
+ err = db.Create(&uptimeHost).Error
+ assert.NoError(t, err)
+
+ hostID := uptimeHost.ID
+ httpMonitor := models.UptimeMonitor{
+ ID: "reachable-host-http-fail",
+ Name: "Reachable Host HTTP Failure",
+ Type: "http",
+ URL: fmt.Sprintf("http://127.0.0.1:%d", addr.Port),
+ Enabled: true,
+ Status: "pending",
+ UptimeHostID: &hostID,
+ MaxRetries: 1,
+ }
+ err = db.Create(&httpMonitor).Error
+ assert.NoError(t, err)
+
+ us.CheckAll()
+
+ assert.Eventually(t, func() bool {
+ var refreshedHost models.UptimeHost
+ if db.Where("id = ?", uptimeHost.ID).First(&refreshedHost).Error != nil {
+ return false
+ }
+ return refreshedHost.Status == "up"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ assert.Eventually(t, func() bool {
+ var refreshed models.UptimeMonitor
+ if db.Where("id = ?", httpMonitor.ID).First(&refreshed).Error != nil {
+ return false
+ }
+ return refreshed.Status == "down"
+ }, 3*time.Second, 25*time.Millisecond)
+
+ var heartbeat models.UptimeHeartbeat
+ err = db.Where("monitor_id = ?", httpMonitor.ID).Order("created_at desc").First(&heartbeat).Error
+ assert.NoError(t, err)
+ assert.Equal(t, "down", heartbeat.Status)
+ assert.Contains(t, heartbeat.Message, "HTTP 500")
+ assert.NotContains(t, heartbeat.Message, "Host unreachable")
+}
+
func TestUptimeService_CheckMonitor_EdgeCases(t *testing.T) {
t.Run("invalid URL format", func(t *testing.T) {
db := setupUptimeTestDB(t)
diff --git a/codecov.yml b/codecov.yml
index 9463cfb1..58082dfd 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -7,8 +7,8 @@ coverage:
status:
project:
default:
- target: 85%
- threshold: 0%
+ target: 87%
+ threshold: 1%
# Fail CI if Codecov upload/report indicates a problem
require_ci_to_pass: yes
@@ -74,10 +74,6 @@ ignore:
- "backend/*.html"
- "backend/codeql-db/**"
- # Docker-only code (not testable in CI)
- - "backend/internal/services/docker_service.go"
- - "backend/internal/api/handlers/docker_handler.go"
-
# CodeQL artifacts
- "codeql-db/**"
- "codeql-db-*/**"
diff --git a/docs/features.md b/docs/features.md
index c2b9bffa..056b448c 100644
--- a/docs/features.md
+++ b/docs/features.md
@@ -237,7 +237,7 @@ Watch requests flow through your proxy in real-time. Filter by domain, status co
### 🔔 Notifications
-Get alerted when it matters. Charon currently sends notifications through Discord webhooks using the Notify engine only. No legacy fallback path is used at runtime. Additional providers will roll out later in staged updates.
+Get alerted when it matters. Charon notifications now run through the Notify HTTP wrapper with support for Discord, Gotify, and Custom Webhook providers. Payload-focused test coverage is included to help catch formatting and delivery regressions before release.
→ [Learn More](features/notifications.md)
diff --git a/docs/features/notifications.md b/docs/features/notifications.md
index 8aa5aee8..e9e06bb4 100644
--- a/docs/features/notifications.md
+++ b/docs/features/notifications.md
@@ -11,11 +11,13 @@ Notifications can be triggered by various events:
- **Security Events**: WAF blocks, CrowdSec alerts, ACL violations
- **System Events**: Configuration changes, backup completions
-## Supported Service (Current Rollout)
+## Supported Services
| Service | JSON Templates | Native API | Rich Formatting |
|---------|----------------|------------|-----------------|
| **Discord** | ✅ Yes | ✅ Webhooks | ✅ Embeds |
+| **Gotify** | ✅ Yes | ✅ HTTP API | ✅ Priority + Extras |
+| **Custom Webhook** | ✅ Yes | ✅ HTTP API | ✅ Template-Controlled |
Additional providers are planned for later staged releases.
@@ -41,7 +43,7 @@ JSON templates give you complete control over notification formatting, allowing
### JSON Template Support
-For the currently supported service (Discord), you can choose from three template options:
+For current services (Discord, Gotify, and Custom Webhook), you can choose from three template options.
#### 1. Minimal Template (Default)
@@ -157,9 +159,9 @@ Discord supports rich embeds with colors, fields, and timestamps.
## Planned Provider Expansion
-Additional providers (for example Slack, Gotify, Telegram, and generic webhooks)
-are planned for later staged releases. This page will be expanded as each
-provider is validated and released.
+Additional providers (for example Slack and Telegram) are planned for later
+staged releases. This page will be expanded as each provider is validated and
+released.
## Template Variables
@@ -228,9 +230,13 @@ Template: detailed (or custom)
4. Test the notification
5. Save changes
-If you previously used non-Discord provider types, keep those entries as
-historical records only. They are not active runtime dispatch paths in the
-current rollout.
+Gotify and Custom Webhook providers are active runtime paths in the current
+rollout and can be used in production.
+
+## Validation Coverage
+
+The current rollout includes payload-focused notification tests to catch
+formatting and delivery regressions across provider types before release.
### Testing Your Template
diff --git a/docs/getting-started.md b/docs/getting-started.md
index 0c9f6d25..f4ac3076 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -89,6 +89,44 @@ docker run -d \
**Open ** in your browser!
+### Docker Socket Access (Important)
+
+Charon runs as a non-root user inside the container. To discover your other Docker containers, it needs permission to read the Docker socket. Without this, you'll see a "Docker Connection Failed" message in the UI.
+
+**Step 1:** Find your Docker socket's group ID:
+
+```bash
+stat -c '%g' /var/run/docker.sock
+```
+
+This prints a number (for example, `998` or `999`).
+
+**Step 2:** Add that number to your compose file under `group_add`:
+
+```yaml
+services:
+ charon:
+ image: wikid82/charon:latest
+ group_add:
+ - "998" # <-- replace with your number from Step 1
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro
+ # ... rest of your config
+```
+
+**Using `docker run` instead?** Add `--group-add ` to your command:
+
+```bash
+docker run -d \
+ --name charon \
+ --group-add 998 \
+ -v /var/run/docker.sock:/var/run/docker.sock:ro \
+ # ... rest of your flags
+ wikid82/charon:latest
+```
+
+**Why is this needed?** The Docker socket is owned by a specific group on your host machine. Adding that group lets Charon read the socket without running as root—keeping your setup secure.
+
---
## Step 1.5: Database Migrations (If Upgrading)
diff --git a/docs/implementation/WORKFLOW_REVIEW_2026-01-26.md b/docs/implementation/WORKFLOW_REVIEW_2026-01-26.md
index c82ca778..e9099914 100644
--- a/docs/implementation/WORKFLOW_REVIEW_2026-01-26.md
+++ b/docs/implementation/WORKFLOW_REVIEW_2026-01-26.md
@@ -159,7 +159,8 @@ A new scheduled workflow and helper script were added to safely prune old contai
- **Files added**:
- `.github/workflows/container-prune.yml` (weekly schedule, manual dispatch)
- - `scripts/prune-container-images.sh` (dry-run by default; supports GHCR and Docker Hub)
+ - `scripts/prune-ghcr.sh` (GHCR cleanup)
+ - `scripts/prune-dockerhub.sh` (Docker Hub cleanup)
- **Behavior**:
- Default: **dry-run=true** (no destructive changes).
diff --git a/docs/issues/manual_test_acl_security_headers_dropdown_hotfix.md b/docs/issues/manual_test_acl_security_headers_dropdown_hotfix.md
new file mode 100644
index 00000000..23abaef4
--- /dev/null
+++ b/docs/issues/manual_test_acl_security_headers_dropdown_hotfix.md
@@ -0,0 +1,77 @@
+## Manual Test Plan — ACL + Security Headers Dropdown Hotfix
+
+- Date: 2026-02-27
+- Scope: Proxy Host create/edit dropdown persistence
+- Goal: Confirm ACL and Security Headers selections save correctly, can be changed, and can be cleared without regressions.
+
+## Preconditions
+
+- [ ] Charon is running and reachable in browser
+- [ ] At least 2 Access Lists exist
+- [ ] At least 2 Security Headers profiles exist
+- [ ] Tester has permission to create and edit Proxy Hosts
+
+## Test Cases
+
+### TC-001 — Create Host With Both Dropdowns Set
+
+- Steps:
+ 1. Open Proxy Hosts and start creating a new host.
+ 2. Fill required host fields.
+ 3. Select any Access List.
+ 4. Select any Security Headers profile.
+ 5. Save.
+ 6. Reopen the same host in edit mode.
+- Expected:
+ - The selected Access List remains selected.
+ - The selected Security Headers profile remains selected.
+
+### TC-002 — Edit Host And Change Both Selections
+
+- Steps:
+ 1. Open an existing host that already has both values set.
+ 2. Change Access List to a different option.
+ 3. Change Security Headers to a different option.
+ 4. Save.
+ 5. Reopen the host.
+- Expected:
+ - New Access List is persisted.
+ - New Security Headers profile is persisted.
+ - Previous values are not shown.
+
+### TC-003 — Clear Access List
+
+- Steps:
+ 1. Open an existing host with an Access List selected.
+ 2. Set Access List to no selection.
+ 3. Save.
+ 4. Reopen the host.
+- Expected:
+ - Access List is empty (none).
+ - No old Access List value returns.
+
+### TC-004 — Clear Security Headers
+
+- Steps:
+ 1. Open an existing host with a Security Headers profile selected.
+ 2. Set Security Headers to no selection.
+ 3. Save.
+ 4. Reopen the host.
+- Expected:
+ - Security Headers is empty (none).
+ - No old profile value returns.
+
+### TC-005 — Regression Guard: Repeated Edit Cycles
+
+- Steps:
+ 1. Repeat edit/save cycle 3 times on one host.
+ 2. Alternate between selecting values and clearing values for both dropdowns.
+ 3. After each save, reopen the host.
+- Expected:
+ - Last saved choice is always what appears after reopen.
+ - No mismatch between what was selected and what is shown.
+
+## Execution Notes
+
+- Targeted tests for this hotfix are already passing.
+- Full-suite, security, and coverage gates are deferred to CI/end pass.
diff --git a/docs/issues/manual_test_notify_wrapper_gotify_webhook_regression_tracking.md b/docs/issues/manual_test_notify_wrapper_gotify_webhook_regression_tracking.md
new file mode 100644
index 00000000..63b7f30e
--- /dev/null
+++ b/docs/issues/manual_test_notify_wrapper_gotify_webhook_regression_tracking.md
@@ -0,0 +1,69 @@
+---
+title: Manual Test Tracking Plan - Notify Wrapper (Gotify + Custom Webhook)
+status: Open
+priority: High
+assignee: QA
+labels: testing, notifications, backend, frontend, security
+---
+
+# Test Goal
+Track manual verification for bugs and regressions after the Notify migration that added HTTP wrapper delivery for Gotify and Custom Webhook providers.
+
+# Scope
+- Provider creation and editing for Gotify and Custom Webhook
+- Send Test and Preview behavior
+- Payload rendering and delivery behavior
+- Secret handling and error-message safety
+- Existing Discord behavior regression checks
+
+# Preconditions
+- Charon is running and reachable in a browser.
+- Tester can open Settings → Notifications.
+- Tester has reachable endpoints for:
+ - One Gotify instance
+ - One custom webhook receiver
+
+## 1) Smoke Path - Provider CRUD
+- [ ] Create a Gotify provider with valid URL and token, save successfully.
+- [ ] Create a Custom Webhook provider with valid URL, save successfully.
+- [ ] Refresh and confirm both providers persist with expected non-secret fields.
+- [ ] Edit each provider, save changes, refresh, and confirm updates persist.
+
+## 2) Smoke Path - Test and Preview
+- [ ] Run Send Test for Gotify provider and confirm successful delivery.
+- [ ] Run Send Test for Custom Webhook provider and confirm successful delivery.
+- [ ] Run Preview for both providers and confirm payload is rendered as expected.
+- [ ] Confirm Discord provider test/preview still works.
+
+## 3) Payload Regression Checks
+- [ ] Validate minimal payload template sends correctly.
+- [ ] Validate detailed payload template sends correctly.
+- [ ] Validate custom payload template sends correctly.
+- [ ] Verify special characters and multi-line content render correctly.
+- [ ] Verify payload output remains stable after provider edit + save.
+
+## 4) Secret and Error Safety Checks
+- [ ] Confirm Gotify token is never shown in list/readback UI.
+- [ ] Confirm Gotify token is not exposed in test/preview responses shown in UI.
+- [ ] Trigger a failed test (invalid endpoint) and confirm error text is clear but does not expose secrets.
+- [ ] Confirm failed requests do not leak sensitive values in user-visible error content.
+
+## 5) Failure-Mode and Recovery Checks
+- [ ] Test with unreachable endpoint and confirm failure is reported clearly.
+- [ ] Test with malformed URL and confirm validation blocks save.
+- [ ] Test with slow endpoint and confirm UI remains responsive and recoverable.
+- [ ] Fix endpoint values and confirm retry succeeds without recreating provider.
+
+## 6) Cross-Provider Regression Checks
+- [ ] Confirm Gotify changes do not alter Custom Webhook settings.
+- [ ] Confirm Custom Webhook changes do not alter Discord settings.
+- [ ] Confirm deleting one provider does not corrupt remaining providers.
+
+## Pass/Fail Criteria
+- [ ] PASS when all smoke checks pass, payload output is correct, secrets stay hidden, and no cross-provider regressions are found.
+- [ ] FAIL when delivery breaks, payload rendering regresses, secrets are exposed, or provider changes affect unrelated providers.
+
+## Defect Tracking Notes
+- [ ] For each defect, record provider type, action, expected result, actual result, and severity.
+- [ ] Attach screenshot/video where useful.
+- [ ] Mark whether defect is release-blocking.
diff --git a/docs/issues/manual_test_pr1_caddy_compatibility_closure.md b/docs/issues/manual_test_pr1_caddy_compatibility_closure.md
new file mode 100644
index 00000000..b46d9711
--- /dev/null
+++ b/docs/issues/manual_test_pr1_caddy_compatibility_closure.md
@@ -0,0 +1,95 @@
+## Manual Test Tracking Plan — PR-1 Caddy Compatibility Closure
+
+- Date: 2026-02-23
+- Scope: PR-1 only
+- Goal: Track potential bugs in the completed PR-1 slice and confirm safe promotion.
+
+## In Scope Features
+
+1. Compatibility matrix execution and pass/fail outcomes
+2. Release guard behavior (promotion gate)
+3. Candidate build path behavior (`CADDY_USE_CANDIDATE=1`)
+4. Non-drift defaults (`CADDY_USE_CANDIDATE=0` remains default)
+
+## Out of Scope
+
+- PR-2 and later slices
+- Unrelated frontend feature behavior
+- Historical QA items not tied to PR-1
+
+## Environment Checklist
+
+- [ ] Local repository is up to date with PR-1 changes
+- [ ] Docker build completes successfully
+- [ ] Test output directory is clean or isolated for this run
+
+## Test Cases
+
+### TC-001 — Compatibility Matrix Completes
+
+- Area: Compatibility matrix
+- Risk: False PASS due to partial artifacts or mixed output paths
+- Steps:
+ 1. Run the matrix script with an isolated output directory.
+ 2. Verify all expected rows are present for scenarios A/B/C and amd64/arm64.
+ 3. Confirm each row has explicit PASS/FAIL values for required checks.
+- Expected:
+ - Matrix completes without missing rows.
+ - Row statuses are deterministic and readable.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-002 — Promotion Gate Enforces Scenario A Only
+
+- Area: Release guard
+- Risk: Incorrect gate logic blocks or allows promotion unexpectedly
+- Steps:
+ 1. Review matrix results for scenario A on amd64 and arm64.
+ 2. Confirm promotion decision uses scenario A on both architectures.
+ 3. Confirm scenario B/C are evidence-only and do not flip the promotion verdict.
+- Expected:
+ - Promotion gate follows PR-1 rule exactly.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-003 — Candidate Build Path Is Opt-In
+
+- Area: Candidate build path
+- Risk: Candidate path becomes active without explicit opt-in
+- Steps:
+ 1. Build with default arguments.
+ 2. Confirm runtime behavior is standard (non-candidate path).
+ 3. Build again with candidate opt-in enabled.
+ 4. Confirm candidate path is only active in the opt-in build.
+- Expected:
+ - Candidate behavior appears only when explicitly enabled.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-004 — Default Runtime Behavior Does Not Drift
+
+- Area: Non-drift defaults
+- Risk: Silent default drift after PR-1 merge
+- Steps:
+ 1. Verify Docker defaults used by standard build.
+ 2. Run a standard deployment path.
+ 3. Confirm behavior matches pre-PR-1 default expectations.
+- Expected:
+ - Default runtime remains non-candidate.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Defect Log
+
+Use this section for any issue found during manual testing.
+
+| ID | Test Case | Severity | Summary | Reproducible | Status |
+| --- | --- | --- | --- | --- | --- |
+| | | | | | |
+
+## Exit Criteria
+
+- [ ] All four PR-1 test cases executed
+- [ ] No unresolved critical defects
+- [ ] Promotion decision is traceable to matrix evidence
+- [ ] Any failures documented with clear next action
diff --git a/docs/issues/manual_test_pr2_security_posture_closure.md b/docs/issues/manual_test_pr2_security_posture_closure.md
new file mode 100644
index 00000000..0aabfc3c
--- /dev/null
+++ b/docs/issues/manual_test_pr2_security_posture_closure.md
@@ -0,0 +1,96 @@
+---
+title: "Manual Test Tracking Plan - Security Posture Closure"
+labels:
+ - testing
+ - security
+ - caddy
+priority: high
+---
+
+# Manual Test Tracking Plan - PR-2 Security Posture Closure
+
+## Scope
+PR-2 only.
+
+This plan tracks manual verification for:
+- Patch disposition decisions
+- Admin API assumptions and guardrails
+- Rollback checks
+
+Out of scope:
+- PR-1 compatibility closure tasks
+- PR-3 feature or UX expansion
+
+## Preconditions
+- [ ] Branch contains PR-2 documentation and configuration changes only.
+- [ ] Environment starts cleanly with default PR-2 settings.
+- [ ] Tester can run container start/restart and review startup logs.
+
+## Track A - Patch Disposition Validation
+
+### TC-PR2-001 Retained patches remain retained
+- [ ] Verify `expr` and `ipstore` patch decisions are documented as retained in the PR-2 security posture report.
+- [ ] Confirm no conflicting PR-2 docs state these patches are retired.
+- Expected result: retained/retained remains consistent across PR-2 closure docs.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR2-002 Nebula default retirement is clearly bounded
+- [ ] Verify PR-2 report states `nebula` retirement is by default scenario switch.
+- [ ] Verify rollback instruction is present and explicit.
+- Expected result: reviewer can identify default posture and rollback without ambiguity.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Track B - Admin API Assumption Checks
+
+### TC-PR2-003 Internal-only admin API assumption
+- [ ] Confirm PR-2 report states admin API is expected to be internal-only.
+- [ ] Confirm PR-2 QA report includes admin API validation/normalization posture.
+- Expected result: both reports communicate the same assumption.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR2-004 Invalid admin endpoint fails fast
+- [ ] Start with an intentionally invalid/non-allowlisted admin API URL.
+- [ ] Verify startup fails fast with clear configuration rejection behavior.
+- [ ] Restore valid URL and confirm startup succeeds.
+- Expected result: unsafe endpoint rejected; safe endpoint accepted.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR2-005 Port exposure assumption holds
+- [ ] Verify deployment defaults do not publish admin API port `2019`.
+- [ ] Confirm no PR-2 doc contradicts this default posture.
+- Expected result: admin API remains non-published by default.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Track C - Rollback Safety Checks
+
+### TC-PR2-006 Scenario rollback switch
+- [ ] Set `CADDY_PATCH_SCENARIO=A`.
+- [ ] Restart and verify the rollback path is accepted by the runtime.
+- [ ] Return to PR-2 default scenario and verify normal startup.
+- Expected result: rollback is deterministic and reversible.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR2-007 QA report rollback statement alignment
+- [ ] Confirm QA report and security posture report use the same rollback instruction.
+- [ ] Confirm both reports remain strictly PR-2 scoped.
+- Expected result: no conflicting rollback guidance; no PR-3 references.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Defect Log
+
+| ID | Test Case | Severity | Summary | Reproducible | Status |
+| --- | --- | --- | --- | --- | --- |
+| | | | | | |
+
+## Exit Criteria
+- [ ] All PR-2 test cases executed.
+- [ ] No unresolved critical defects.
+- [ ] Patch disposition, admin API assumptions, and rollback checks are all verified.
+- [ ] No PR-3 material introduced in this tracking plan.
diff --git a/docs/issues/manual_test_pr3_keepalive_controls_closure.md b/docs/issues/manual_test_pr3_keepalive_controls_closure.md
new file mode 100644
index 00000000..af3ff00a
--- /dev/null
+++ b/docs/issues/manual_test_pr3_keepalive_controls_closure.md
@@ -0,0 +1,102 @@
+---
+title: "Manual Test Tracking Plan - PR-3 Keepalive Controls Closure"
+labels:
+ - testing
+ - frontend
+ - backend
+ - security
+priority: high
+---
+
+# Manual Test Tracking Plan - PR-3 Keepalive Controls Closure
+
+## Scope
+PR-3 only.
+
+This plan tracks manual verification for:
+- Keepalive control behavior in System Settings
+- Safe default/fallback behavior for missing or invalid keepalive values
+- Non-exposure constraints for deferred advanced settings
+
+Out of scope:
+- PR-1 compatibility closure tasks
+- PR-2 security posture closure tasks
+- Any new page, route, or feature expansion beyond approved PR-3 controls
+
+## Preconditions
+- [ ] Branch includes PR-3 closure changes only.
+- [ ] Environment starts cleanly.
+- [ ] Tester can access System Settings and save settings.
+- [ ] Tester can restart and re-open the app to verify persisted behavior.
+
+## Track A - Keepalive Controls
+
+### TC-PR3-001 Keepalive controls are present and editable
+- [ ] Open System Settings.
+- [ ] Verify keepalive idle and keepalive count controls are visible.
+- [ ] Enter valid values and save.
+- Expected result: values save successfully and are shown after refresh.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR3-002 Keepalive values persist across reload
+- [ ] Save valid keepalive idle and count values.
+- [ ] Refresh the page.
+- [ ] Re-open System Settings.
+- Expected result: saved values are preserved.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Track B - Safe Defaults and Fallback
+
+### TC-PR3-003 Missing keepalive input keeps safe defaults
+- [ ] Clear optional keepalive inputs (leave unset/empty where allowed).
+- [ ] Save and reload settings.
+- Expected result: app remains stable and uses safe default behavior.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR3-004 Invalid keepalive input is handled safely
+- [ ] Enter invalid keepalive values (out-of-range or malformed).
+- [ ] Attempt to save.
+- [ ] Correct the values and save again.
+- Expected result: invalid values are rejected safely; system remains stable; valid correction saves.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR3-005 Regression check after fallback path
+- [ ] Trigger one invalid save attempt.
+- [ ] Save valid values immediately after.
+- [ ] Refresh and verify current values.
+- Expected result: no stuck state; final valid values are preserved.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Track C - Non-Exposure Constraints
+
+### TC-PR3-006 Deferred advanced settings remain non-exposed
+- [ ] Review System Settings controls.
+- [ ] Confirm `trusted_proxies_unix` is not exposed.
+- [ ] Confirm certificate lifecycle internals are not exposed.
+- Expected result: only approved PR-3 keepalive controls are user-visible.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+### TC-PR3-007 Scope containment remains intact
+- [ ] Verify no new page/tab/modal was introduced for PR-3 controls.
+- [ ] Verify settings flow still uses existing System Settings experience.
+- Expected result: PR-3 remains contained to approved existing surface.
+- Status: [ ] Not run [ ] Pass [ ] Fail
+- Notes:
+
+## Defect Log
+
+| ID | Test Case | Severity | Summary | Reproducible | Status |
+| --- | --- | --- | --- | --- | --- |
+| | | | | | |
+
+## Exit Criteria
+- [ ] All PR-3 test cases executed.
+- [ ] No unresolved critical defects.
+- [ ] Keepalive controls, safe fallback/default behavior, and non-exposure constraints are verified.
+- [ ] No PR-1 or PR-2 closure tasks introduced in this PR-3 plan.
diff --git a/docs/issues/manual_test_security_pr_event_gating_artifact_resolution.md b/docs/issues/manual_test_security_pr_event_gating_artifact_resolution.md
new file mode 100644
index 00000000..c714743a
--- /dev/null
+++ b/docs/issues/manual_test_security_pr_event_gating_artifact_resolution.md
@@ -0,0 +1,142 @@
+---
+title: Manual Test Plan - Security Scan PR Event Gating and Artifact Resolution
+status: Open
+priority: High
+assignee: DevOps
+labels: testing, workflows, security, ci/cd
+---
+
+## Goal
+Validate that `Security Scan (PR)` in `.github/workflows/security-pr.yml` behaves deterministically for trigger gating, PR artifact resolution, and trust-boundary checks.
+
+## Scope
+- Event gating for `workflow_run`, `workflow_dispatch`, `pull_request`, and `push`
+- PR artifact lookup and image loading path
+- Failure behavior for missing/corrupt artifacts
+- Permission and trust-boundary protection paths
+
+## Preconditions
+- You can run workflows in this repository.
+- You can view workflow logs in GitHub Actions.
+- At least one recent PR exists with a successful `Docker Build, Publish & Test` run and published `pr-image-` artifact.
+- Use a test branch or draft PR for negative testing.
+
+## Evidence to Capture
+- Run URL for each scenario
+- Job status (`success`, `failure`, `skipped`)
+- Exact failure line when expected
+- `reason_category` value when present
+
+## Manual Test Checklist
+
+### 1. `workflow_run` from upstream `pull_request` (happy path)
+- [ ] Trigger a PR build by pushing a commit to an open PR.
+- [ ] Wait for `Docker Build, Publish & Test` to complete successfully.
+- [ ] Confirm `Security Scan (PR)` starts from `workflow_run`.
+- [ ] Confirm job `Trivy Binary Scan` runs.
+- [ ] Confirm logs show trust-boundary validation success.
+- [ ] Confirm artifact `pr-image-` is found and downloaded.
+- [ ] Confirm `Load Docker image` resolves to `charon:artifact`.
+- [ ] Confirm binary extraction and Trivy scan steps execute.
+
+Expected outcome:
+- Workflow succeeds or fails only on real security findings, not on event/artifact resolution.
+
+Failure signals:
+- `reason_category=unsupported_upstream_event` on a PR-triggered upstream run.
+- Artifact lookup fails for a known valid PR artifact.
+- `Load Docker image` cannot resolve image ref despite valid artifact.
+
+### 2. `workflow_run` from upstream `push` (should not run)
+- [ ] Push directly to a branch that triggers `Docker Build, Publish & Test` as `push` (for example, `main` in a controlled test window).
+- [ ] Open `Security Scan (PR)` run created by `workflow_run`.
+- [ ] Verify `Trivy Binary Scan` is skipped by job-level gating.
+- [ ] Verify no artifact lookup/download steps were executed.
+
+Expected outcome:
+- `Security Scan (PR)` job does not run for upstream `push`.
+
+Failure signals:
+- `Trivy Binary Scan` executes for upstream `push`.
+- Any artifact resolution step runs under upstream `push`.
+
+### 3. `workflow_dispatch` with valid `pr_number`
+- [ ] Open `Security Scan (PR)` and click `Run workflow`.
+- [ ] Provide a numeric `pr_number` that has a successful docker-build artifact.
+- [ ] Start run and inspect logs.
+- [ ] Confirm PR number validation passes.
+- [ ] Confirm run lookup resolves a successful `docker-build.yml` run for that PR.
+- [ ] Confirm artifact download, image load, extraction, and Trivy steps run.
+
+Expected outcome:
+- Workflow executes artifact-only replay path and proceeds to scan.
+
+Failure signals:
+- Dispatch falls back to local image build.
+- `reason_category=not_found` for a PR known to have valid artifact.
+
+### 4. `workflow_dispatch` without `pr_number` (input validation)
+- [ ] Open `Run workflow` for `Security Scan (PR)`.
+- [ ] Attempt run with empty `pr_number` (or non-numeric value if UI blocks empty).
+- [ ] Inspect early step logs.
+
+Expected outcome:
+- Job fails fast before artifact lookup/load.
+- Clear validation message indicates missing/invalid `pr_number`.
+
+Failure signals:
+- Workflow continues to artifact lookup with invalid input.
+- Error message is ambiguous or missing reason category.
+
+### 5. Artifact missing case
+- [ ] Run `workflow_dispatch` with a numeric PR that does not have a successful docker-build artifact.
+- [ ] Inspect `Check for PR image artifact` logs.
+
+Expected outcome:
+- Hard fail with a clear error.
+- Log includes `reason_category=not_found`, run context, and artifact name.
+
+Failure signals:
+- Step silently skips or succeeds without artifact.
+- Workflow proceeds to download/load steps.
+
+### 6. Artifact corrupt/unreadable case
+- [ ] Use a controlled test branch to simulate bad artifact content for `charon-pr-image.tar` (for example, tar missing `manifest.json` and no usable load image ID, or unreadable tar).
+- [ ] Trigger path through `workflow_run` or `workflow_dispatch`.
+- [ ] Inspect `Load Docker image` logs.
+
+Expected outcome:
+- Job fails in `Load Docker image` before extraction when image cannot be resolved.
+- Error states artifact is missing/unreadable, or valid image reference cannot be resolved.
+
+Failure signals:
+- Job continues to extraction with empty/invalid image ref.
+- `docker create` fails later due to unresolved image (late failure indicates missed validation).
+
+### 7. Trust-boundary and permission guard failures
+- [ ] Verify `permissions` in run metadata are minimal: `contents: read`, `actions: read`, `security-events: write`.
+- [ ] For `workflow_run`, inspect guard step output.
+- [ ] Confirm guard fails when any of the following are invalid:
+ - Upstream workflow name mismatch
+ - Upstream event not `pull_request`
+ - Upstream head repository not equal to current repository
+
+Expected outcome:
+- Guard fails early with explicit `reason_category`.
+- No artifact lookup/load/extract occurs after guard failure.
+
+Failure signals:
+- Guard passes with mismatched trust-boundary values.
+- Workflow attempts artifact operations after trust-boundary failure.
+- Unexpected write permissions are present.
+
+## Regression Watchlist
+- Event-gating changes accidentally allow `workflow_run` from `push` to execute scan.
+- Manual dispatch path silently accepts non-numeric or empty PR input.
+- Artifact resolver relies on a single tag and breaks on alternate load output formats.
+- Trust-boundary checks are bypassed due to conditional logic drift.
+
+## Exit Criteria
+- All scenarios pass with expected behavior.
+- Any failure signal is logged as a bug with run URL and exact failing step.
+- No ambiguous skip behavior remains for required hard-fail paths.
diff --git a/docs/plans/archive/acl_security_headers_hotfix_plan.md b/docs/plans/archive/acl_security_headers_hotfix_plan.md
new file mode 100644
index 00000000..81fc1c46
--- /dev/null
+++ b/docs/plans/archive/acl_security_headers_hotfix_plan.md
@@ -0,0 +1,270 @@
+# ACL + Security Headers Hotfix Plan (Proxy Host Create/Edit)
+
+## 1. Introduction
+
+### Overview
+Hotfix request: Proxy Host form dropdown selections for Access Control List (ACL) and Security Headers are not being applied/persisted for new or edited hosts.
+
+Reported behavior:
+1. Existing hosts with previously assigned ACL/Security Header profile retain old values.
+2. Users cannot reliably remove or change those values in UI.
+3. Newly created hosts cannot reliably apply ACL/Security Header profile.
+
+### Objective
+Deliver an urgent but correct root-cause fix across frontend binding and backend persistence flow, with minimum user interruption and full validation gates.
+
+## 2. Research Findings (Current Architecture + Touchpoints)
+
+### Frontend Entry Points
+1. `frontend/src/pages/ProxyHosts.tsx`
+ - `handleSubmit(data)` calls `updateHost(editingHost.uuid, data)` or `createHost(data)`.
+ - Renders `ProxyHostForm` modal for create/edit flows.
+2. `frontend/src/components/ProxyHostForm.tsx`
+ - Local form state initializes `access_list_id` and `security_header_profile_id`.
+ - ACL control uses `AccessListSelector`.
+ - Security Headers control uses `Select` with `security_header_profile_id` mapping.
+ - Submission path: `handleSubmit` -> `onSubmit(payloadWithoutUptime)`.
+3. `frontend/src/components/AccessListSelector.tsx`
+ - Converts select values between `string` and `number | null`.
+
+### Frontend API/Hooks
+1. `frontend/src/hooks/useProxyHosts.ts`
+ - `createHost` -> `createProxyHost`.
+ - `updateHost` -> `updateProxyHost`.
+2. `frontend/src/api/proxyHosts.ts`
+ - `createProxyHost(host: Partial)` -> `POST /api/v1/proxy-hosts`.
+ - `updateProxyHost(uuid, host)` -> `PUT /api/v1/proxy-hosts/:uuid`.
+ - Contract fields: `access_list_id`, `security_header_profile_id`.
+
+### Backend Entry/Transformation/Persistence
+1. Route registration
+ - `backend/internal/api/routes/routes.go`: `proxyHostHandler.RegisterRoutes(protected)`.
+2. Handler
+ - `backend/internal/api/handlers/proxy_host_handler.go`
+ - `Create(c)` uses `ShouldBindJSON(&models.ProxyHost{})`.
+ - `Update(c)` uses `map[string]any` partial update parsing.
+ - Target fields:
+ - `payload["access_list_id"]` -> `parseNullableUintField` -> `host.AccessListID`
+ - `payload["security_header_profile_id"]` -> typed conversion -> `host.SecurityHeaderProfileID`
+3. Service
+ - `backend/internal/services/proxyhost_service.go`
+ - `Create(host)` validates + `db.Create(host)`.
+ - `Update(host)` validates + `db.Model(...).Select("*").Updates(host)`.
+4. Model
+ - `backend/internal/models/proxy_host.go`
+ - `AccessListID *uint \`json:"access_list_id"\``
+ - `SecurityHeaderProfileID *uint \`json:"security_header_profile_id"\``
+
+### Existing Tests Relevant to Incident
+1. Frontend unit regression coverage already exists:
+ - `frontend/src/components/__tests__/ProxyHostForm-dropdown-changes.test.tsx`
+2. E2E regression spec exists:
+ - `tests/security-enforcement/acl-dropdown-regression.spec.ts`
+3. Backend update and security-header tests exist:
+ - `backend/internal/api/handlers/proxy_host_handler_update_test.go`
+ - `backend/internal/api/handlers/proxy_host_handler_security_headers_test.go`
+
+## 3. Root-Cause-First Trace
+
+### Trace Model (Mandatory)
+1. Entry Point:
+ - UI dropdown interactions in `ProxyHostForm` and `AccessListSelector`.
+2. Transformation:
+ - Form state conversion (`string` <-> `number | null`) and payload construction in `ProxyHostForm`.
+ - API serialization via `frontend/src/api/proxyHosts.ts`.
+3. Persistence:
+ - Backend `Update` parser (`proxy_host_handler.go`) and `ProxyHostService.Update` persistence.
+4. Exit Point:
+ - Response body consumed by React Query invalidation/refetch in `useProxyHosts`.
+ - UI reflects updated values in table/form.
+
+### Most Likely Failure Zones
+1. Frontend select binding/conversion drift (top candidate)
+ - Shared symptom across ACL and Security Headers points to form/select layer.
+ - Candidate files:
+ - `frontend/src/components/ProxyHostForm.tsx`
+ - `frontend/src/components/AccessListSelector.tsx`
+ - `frontend/src/components/ui/Select.tsx`
+2. Payload mutation or stale form object behavior
+ - Ensure payload carries updated `access_list_id` / `security_header_profile_id` values at submit time.
+3. Backend partial-update parser edge behavior
+ - Ensure `nil`, numeric string, and number conversions are consistent between ACL and security header profile paths.
+
+### Investigation Decision
+Root-cause verification will be instrumented through failing-first Playwright scenario and targeted handler tests before applying code changes.
+
+## 4. EARS Requirements
+
+1. WHEN a user selects an ACL in the Proxy Host create/edit form, THE SYSTEM SHALL persist `access_list_id` and return it in API response.
+2. WHEN a user changes ACL from one value to another, THE SYSTEM SHALL replace prior `access_list_id` with the new value.
+3. WHEN a user selects "No Access Control", THE SYSTEM SHALL persist `access_list_id = null`.
+4. WHEN a user selects a Security Headers profile in the Proxy Host create/edit form, THE SYSTEM SHALL persist `security_header_profile_id` and return it in API response.
+5. WHEN a user changes Security Headers profile from one value to another, THE SYSTEM SHALL replace prior `security_header_profile_id` with the new value.
+6. WHEN a user selects "None" for Security Headers, THE SYSTEM SHALL persist `security_header_profile_id = null`.
+7. IF dropdown interaction fails to update internal form state, THEN THE SYSTEM SHALL prevent stale values from being persisted.
+8. WHILE updating Proxy Host settings, THE SYSTEM SHALL maintain existing behavior for unrelated fields and not regress certificate, DNS challenge, or uptime-linked updates.
+
+Note: User-visible blocking error behavior is deferred unless required by confirmed root cause.
+
+## 5. Technical Specification (Hotfix Scope)
+
+### API Contract (No Breaking Change)
+1. `POST /api/v1/proxy-hosts`
+ - Request fields include `access_list_id`, `security_header_profile_id` as nullable numeric fields.
+2. `PUT /api/v1/proxy-hosts/:uuid`
+ - Partial payload accepts nullable updates for both fields.
+3. Response must echo persisted values in snake_case:
+ - `access_list_id`
+ - `security_header_profile_id`
+
+### Data Model/DB
+No schema migration expected. Existing nullable FK fields in `backend/internal/models/proxy_host.go` are sufficient.
+
+### Targeted Code Areas for Fix
+1. Frontend
+ - `frontend/src/components/ProxyHostForm.tsx`
+ - `frontend/src/components/AccessListSelector.tsx`
+ - `frontend/src/components/ui/Select.tsx` (only if click/select propagation issue confirmed)
+ - `frontend/src/api/proxyHosts.ts` (only if serialization issue confirmed)
+2. Backend
+ - `backend/internal/api/handlers/proxy_host_handler.go` (only if parsing/persistence mismatch confirmed)
+ - `backend/internal/services/proxyhost_service.go` (only if update write path proves incorrect)
+
+## 6. Edge Cases
+
+1. Edit host with existing ACL/profile and switch to another value.
+2. Edit host with existing ACL/profile and clear to null.
+3. Create new host with ACL/profile set before first save.
+4. Submit with stringified numeric values (defensive compatibility).
+5. Submit with null values for both fields simultaneously.
+6. Missing/deleted profile or ACL IDs in backend (validation errors).
+7. Multiple rapid dropdown changes before save (last selection wins).
+
+## 7. Risk Analysis
+
+### High Risk
+1. Silent stale-state submission from form controls.
+2. Regressing other Proxy Host settings due to broad payload mutation.
+
+### Medium Risk
+1. Partial-update parser divergence between ACL and security profile behavior.
+2. UI select portal/z-index interaction causing non-deterministic click handling.
+
+### Mitigations
+1. Reproduce with Playwright first and capture exact failing action path.
+2. Add/strengthen focused frontend tests around create/edit/clear flows.
+3. Add/strengthen backend tests for nullable + conversion paths.
+4. Keep hotfix minimal and avoid unrelated refactors.
+
+## 8. Implementation Plan (Urgent, Minimal Interruption)
+
+### Phase 1: Reproduction + Guardrails (Playwright First)
+1. Execute targeted E2E spec for dropdown flow and create/edit persistence behavior.
+2. Capture exact failure step and confirm whether failure is click binding, payload value, or backend persistence.
+3. Add/adjust failing-first test if current suite does not capture observed production regression.
+
+### Phase 2: Frontend Fix
+1. Patch select binding/state mapping for ACL and Security Headers in `ProxyHostForm`/`AccessListSelector`.
+2. If needed, patch `ui/Select` interaction layering.
+3. Ensure payload contains correct final `access_list_id` and `security_header_profile_id` values at submit.
+4. Extend `ProxyHostForm` tests for create/edit/change/remove flows.
+
+### Phase 3: Backend Hardening (Conditional)
+1. Only if frontend payload is correct but persistence is wrong:
+ - Backend fix MUST use field-scoped partial-update semantics for `access_list_id` and `security_header_profile_id` only (unless separately justified).
+ - Ensure write path persists null transitions reliably.
+2. Add/adjust handler/service regression tests proving no unintended mutation of unrelated proxy host fields during these targeted updates.
+
+### Phase 4: Integration + Regression
+1. Run complete targeted Proxy Host UI flow tests.
+2. Validate list refresh and modal reopen reflect persisted values.
+3. Validate no regressions in bulk ACL / bulk security-header operations.
+
+### Phase 5: Documentation + Handoff
+1. Update changelog/release notes only for hotfix behavior.
+2. Keep architecture docs unchanged unless root cause requires architectural note.
+3. Handoff to Supervisor agent for review after plan approval and implementation.
+
+## 9. Acceptance Criteria
+
+1. ACL dropdown selection persists on create and edit.
+2. Security Headers dropdown selection persists on create and edit.
+3. Clearing ACL persists `null` and is reflected after reload.
+4. Clearing Security Headers persists `null` and is reflected after reload.
+5. Existing hosts can change from one ACL/profile to another without stale value retention.
+6. New hosts can apply ACL/profile at creation time.
+7. No regressions in unrelated proxy host fields.
+8. All validation gates in Section 11 pass.
+9. API create response returns persisted `access_list_id` and `security_header_profile_id` matching submitted values (including `null`).
+10. API update response returns persisted `access_list_id` and `security_header_profile_id` after `value->value`, `value->null`, and `null->value` transitions.
+11. Backend persistence verification confirms unrelated proxy host fields remain unchanged for targeted updates.
+
+## 10. PR Slicing Strategy
+
+### Decision
+Single PR (hotfix-first), with contingency split only if backend root cause is confirmed late.
+
+### Rationale
+1. Incident impact is immediate user-facing and concentrated in one feature path.
+2. Frontend + targeted backend/test changes are tightly coupled for verification.
+3. Single PR minimizes release coordination and user interruption.
+
+### Contingency (Only if split becomes necessary)
+1. PR-1: Frontend binding + tests
+ - Scope: `ProxyHostForm`, `AccessListSelector`, `ui/Select` (if required), related tests.
+ - Dependency: none.
+ - Acceptance: UI submit payload verified correct in unit + Playwright.
+2. PR-2: Backend parser/persistence + tests (conditional)
+ - Scope: `proxy_host_handler.go`, `proxyhost_service.go`, handler/service tests.
+ - Dependency: PR-1 merged or rebased for aligned contract.
+ - Acceptance: API update/create persist both nullable IDs correctly.
+3. PR-3: Regression hardening + docs
+ - Scope: extra regression coverage, release-note hotfix entry.
+ - Dependency: PR-1/PR-2.
+ - Acceptance: full DoD validation sequence passes.
+
+## 11. Validation Plan (Mandatory Sequence)
+
+0. E2E environment prerequisite
+ - Determine rebuild necessity per testing policy: if application/runtime or Docker input changes are present, rebuild is required.
+ - If rebuild is required or the container is unhealthy, run `.github/skills/scripts/skill-runner.sh docker-rebuild-e2e`.
+ - Record container health outcome before executing tests.
+1. Playwright first
+ - Run targeted Proxy Host dropdown and create/edit persistence scenarios.
+2. Local patch coverage preflight
+ - Generate `test-results/local-patch-report.md` and `test-results/local-patch-report.json`.
+3. Unit and coverage
+ - Backend coverage run (threshold >= 85%).
+ - Frontend coverage run (threshold >= 85%).
+4. Type checks
+ - Frontend TypeScript check.
+5. Pre-commit
+ - `pre-commit run --all-files` with zero blocking failures.
+6. Security scans
+ - CodeQL Go + JS (security-and-quality).
+ - Findings check gate.
+ - Trivy scan.
+ - Conditional GORM security scan if model/DB-layer changes are made.
+7. Build verification
+ - Backend build + frontend build pass.
+
+## 12. File Review: `.gitignore`, `codecov.yml`, `.dockerignore`, `Dockerfile`
+
+Assessment for this hotfix:
+1. `.gitignore`: no required change for ACL/Security Headers hotfix.
+2. `codecov.yml`: no required change; current exclusions/thresholds are compatible.
+3. `.dockerignore`: no required change unless new hotfix-only artifact paths are introduced.
+4. `Dockerfile`: no required change; incident is application logic/UI binding, not image build pipeline.
+
+If implementation introduces new persistent test artifacts, update ignore files in the same PR.
+
+## 13. Rollback and Contingency
+
+1. If hotfix causes regression in proxy host save flow, revert hotfix commit and redeploy prior stable build.
+2. If frontend-only fix is insufficient, activate conditional backend phase immediately.
+3. If validation gates fail on security/coverage, hold merge until fixed; no partial exception for this incident.
+4. Post-rollback smoke checks:
+ - Create host with ACL/profile.
+ - Edit to different ACL/profile values.
+ - Clear both values to `null`.
+ - Verify persisted values in API response and after UI reload.
diff --git a/docs/plans/archived_docker-socket-group-spec.md b/docs/plans/archived_docker-socket-group-spec.md
new file mode 100644
index 00000000..973a9ed6
--- /dev/null
+++ b/docs/plans/archived_docker-socket-group-spec.md
@@ -0,0 +1,586 @@
+---
+post_title: "Current Spec: Local Docker Socket Group Access Remediation"
+categories:
+ - planning
+ - docker
+ - security
+ - backend
+ - frontend
+tags:
+ - docker.sock
+ - least-privilege
+ - group-add
+ - compose
+ - validation
+summary: "Comprehensive plan to resolve local docker socket access failures for non-root process uid=1000 gid=1000 when host socket gid is not in supplemental groups, with phased rollout, PR slicing, and least-privilege validation."
+post_date: 2026-02-25
+---
+
+## 1) Introduction
+
+### Overview
+
+Charon local Docker discovery currently fails in environments where:
+
+- Socket mount exists: `/var/run/docker.sock:/var/run/docker.sock:ro`
+- Charon process runs non-root (typically `uid=1000 gid=1000`)
+- Host socket group (example: `gid=988`) is not present in process supplemental groups
+
+Observed user-facing failure class (already emitted by backend details builder):
+
+- `Local Docker socket mounted but not accessible by current process (uid=1000 gid=1000)... Process groups do not include socket gid 988; run container with matching supplemental group (e.g., --group-add 988).`
+
+### Goals
+
+1. Preserve non-root default execution (`USER charon`) while enabling local Docker discovery safely.
+2. Standardize supplemental-group strategy across compose variants and launcher scripts.
+3. Keep behavior deterministic in backend/API/frontend error surfacing when permissions are wrong.
+4. Validate least-privilege posture (non-root, minimal group grant, no broad privilege escalation).
+
+### Non-Goals
+
+- No redesign of remote Docker support (`tcp://...`) beyond compatibility checks.
+- No changes to unrelated security modules (WAF, ACL, CrowdSec workflows).
+- No broad Docker daemon hardening beyond this socket-access path.
+
+### Scope Labels (Authoritative)
+
+- `repo-deliverable`: changes that must be included in repository PR slices under `/projects/Charon`.
+- `operator-local follow-up`: optional local environment changes outside repository scope (for example `/root/docker/...`), not required for repo PR acceptance.
+
+---
+
+## 2) Research Findings
+
+### 2.1 Critical Runtime Files (Confirmed)
+
+- `backend/internal/services/docker_service.go`
+ - Key functions:
+ - `NewDockerService()`
+ - `(*DockerService).ListContainers(...)`
+ - `resolveLocalDockerHost()`
+ - `buildLocalDockerUnavailableDetails(...)`
+ - `isDockerConnectivityError(...)`
+ - `extractErrno(...)`
+ - `localSocketStatSummary(...)`
+ - Contains explicit supplemental-group hint text with `--group-add ` when `EACCES/EPERM` occurs.
+
+- `backend/internal/api/handlers/docker_handler.go`
+ - Key function: `(*DockerHandler).ListContainers(...)`
+ - Maps `DockerUnavailableError` to HTTP `503` with `details` string consumed by UI.
+
+- `frontend/src/hooks/useDocker.ts`
+ - Hook: `useDocker(host?, serverId?)`
+ - Converts `503` payload details into surfaced `Error(message)`.
+
+- `frontend/src/components/ProxyHostForm.tsx`
+ - Uses `useDocker`.
+ - Error panel title: `Docker Connection Failed`.
+ - Existing troubleshooting text currently mentions socket mount but not explicit supplemental group action.
+
+- `.docker/docker-entrypoint.sh`
+ - Root path auto-aligns docker socket GID with user group membership via:
+ - `get_group_by_gid()`
+ - `create_group_with_gid()`
+ - `add_user_to_group()`
+ - Non-root path logs generic `--group-add` guidance but does not include resolved host socket GID.
+
+- `Dockerfile`
+ - Creates non-root user `charon` (uid/gid 1000) and final `USER charon`.
+ - This is correct for least privilege and should remain default.
+
+### 2.2 Compose and Script Surface Area
+
+Primary in-repo compose files with docker socket mount:
+
+- `.docker/compose/docker-compose.yml` (`charon` service)
+- `.docker/compose/docker-compose.local.yml` (`charon` service)
+- `.docker/compose/docker-compose.dev.yml` (`app` service)
+- `.docker/compose/docker-compose.playwright-local.yml` (`charon-e2e` service)
+- `.docker/compose/docker-compose.playwright-ci.yml` (`charon-app`, `crowdsec` services)
+
+Primary out-of-repo/local-ops file in active workspace:
+
+- `/root/docker/containers/charon/docker-compose.yml` (`charon` service)
+ - Includes socket mount.
+ - `user:` is currently commented out.
+ - No `group_add` entry exists.
+
+Launcher scripts discovered:
+
+- `.github/skills/docker-start-dev-scripts/run.sh`
+ - Runs: `docker compose -f .docker/compose/docker-compose.dev.yml up -d`
+- `/root/docker/containers/charon/docker-compose-up-charon.sh`
+ - Runs: `docker compose up -d`
+
+### 2.3 Existing Tests Relevant to This Failure
+
+Backend service tests (`backend/internal/services/docker_service_test.go`):
+
+- `TestBuildLocalDockerUnavailableDetails_PermissionDeniedIncludesGroupHint`
+- `TestBuildLocalDockerUnavailableDetails_MissingSocket`
+- Connectivity classification tests across URL/syscall/network errors.
+
+Backend handler tests (`backend/internal/api/handlers/docker_handler_test.go`):
+
+- `TestDockerHandler_ListContainers_DockerUnavailableMappedTo503`
+- Other selector and remote-host mapping tests.
+
+Frontend hook tests (`frontend/src/hooks/__tests__/useDocker.test.tsx`):
+
+- `it('extracts details from 503 service unavailable error', ...)`
+
+### 2.4 Config Review Findings (`.gitignore`, `codecov.yml`, `.dockerignore`, `Dockerfile`)
+
+- `.gitignore`: no blocker for this feature; already excludes local env/artifacts extensively.
+- `.dockerignore`: no blocker for this feature; includes docs/tests and build artifacts exclusions.
+- `Dockerfile`: non-root default is aligned with least-privilege intent.
+- `codecov.yml`: currently excludes the two key Docker logic files:
+ - `backend/internal/services/docker_service.go`
+ - `backend/internal/api/handlers/docker_handler.go`
+ This exclusion undermines regression visibility for this exact problem class and should be revised.
+
+### 2.5 Confidence
+
+Confidence score: **97%**
+
+Reasoning:
+
+- Root cause and symptom path are already explicit in code.
+- Required files and control points are concrete and localized.
+- Existing tests already cover adjacent behavior and reduce implementation risk.
+
+---
+
+## 3) Requirements (EARS)
+
+- WHEN local Docker source is selected and `/var/run/docker.sock` is mounted, THE SYSTEM SHALL return containers if the process has supplemental membership for socket GID.
+- WHEN local Docker source is selected and socket permissions deny access (`EACCES`/`EPERM`), THE SYSTEM SHALL return HTTP `503` with a deterministic, actionable details message including supplemental-group guidance.
+- WHEN container runs non-root and socket GID is known, THE SYSTEM SHALL provide explicit startup diagnostics indicating the required `group_add` value.
+- WHEN docker-compose-based local/dev startup is used, THE SYSTEM SHALL support local-only `group_add` configuration from host socket GID without requiring root process runtime.
+- WHEN remote Docker source is selected (`server_id` path), THE SYSTEM SHALL remain functionally unchanged.
+- WHEN least-privilege validation is executed, THE SYSTEM SHALL demonstrate non-root process execution and only necessary supplemental group grant.
+- IF resolved socket GID equals `0`, THEN THE SYSTEM SHALL require explicit operator opt-in and risk acknowledgment before any `group_add: ["0"]` path is used.
+
+---
+
+## 4) Technical Specifications
+
+### 4.1 Architecture and Data Flow
+
+User flow:
+
+1. UI `ProxyHostForm` sets source = `Local (Docker Socket)`.
+2. `useDocker(...)` calls `dockerApi.listContainers(...)`.
+3. Backend `DockerHandler.ListContainers(...)` invokes `DockerService.ListContainers(...)`.
+4. If socket access denied, backend emits `DockerUnavailableError` with details.
+5. Handler returns `503` JSON `{ error, details }`.
+6. Frontend surfaces message in `Docker Connection Failed` block.
+
+No database schema change is required.
+
+### 4.2 API Contract (No endpoint shape change)
+
+Endpoint:
+
+- `GET /api/v1/docker/containers`
+ - Query params:
+ - `host` (allowed: empty or `local` only)
+ - `server_id` (UUID for remote server lookup)
+
+Responses:
+
+- `200 OK`: `DockerContainer[]`
+- `503 Service Unavailable`:
+ - `error: "Docker daemon unavailable"`
+ - `details: `
+- `400`, `404`, `500` unchanged.
+
+### 4.3 Deterministic `group_add` Policy (Chosen)
+
+Chosen policy: **conditional local-only profile/override while keeping CI unaffected**.
+
+Authoritative policy statement:
+
+1. `repo-deliverable`: repository compose paths used for local operator runs (`.docker/compose/docker-compose.local.yml`, `.docker/compose/docker-compose.dev.yml`) may include local-only `group_add` wiring using `DOCKER_SOCK_GID`.
+2. `repo-deliverable`: CI compose paths (`.docker/compose/docker-compose.playwright-ci.yml`) remain unaffected by this policy and must not require `DOCKER_SOCK_GID`.
+3. `repo-deliverable`: base compose (`.docker/compose/docker-compose.yml`) remains safe by default and must not force a local host-specific GID requirement in CI.
+4. `operator-local follow-up`: out-of-repo operator files (for example `/root/docker/containers/charon/docker-compose.yml`) may mirror this policy but are explicitly outside mandatory repo PR scope.
+
+CI compatibility statement:
+
+- CI workflows remain deterministic because they do not depend on local host socket GID export for this remediation.
+- No CI job should fail due to missing `DOCKER_SOCK_GID` after this plan.
+
+Security guardrail for `gid==0` (mandatory):
+
+- If `stat -c '%g' /var/run/docker.sock` returns `0`, local profile/override usage must fail closed by default.
+- Enabling `group_add: ["0"]` requires explicit opt-in (for example `ALLOW_DOCKER_SOCK_GID_0=true`) and documented risk acknowledgment in operator guidance.
+- Silent fallback to GID `0` is prohibited.
+
+### 4.4 Entrypoint Diagnostic Improvements
+
+In `.docker/docker-entrypoint.sh` non-root socket branch:
+
+- Extend current message to include resolved socket GID from `stat -c '%g' /var/run/docker.sock`.
+- Emit exact recommendation format:
+ - `Use docker compose group_add: [""] or run with --group-add `
+- If resolved GID is `0`, emit explicit warning requiring opt-in/risk acknowledgment instead of generic recommendation.
+
+No privilege escalation should be introduced.
+
+### 4.5 Frontend UX Message Precision
+
+In `frontend/src/components/ProxyHostForm.tsx` troubleshooting text:
+
+- Retain mount guidance.
+- Add supplemental-group guidance for containerized runs.
+- Keep language concise and operational.
+
+### 4.6 Coverage and Quality Config Adjustments
+
+`codecov.yml` review outcome:
+
+- Proposed: remove Docker logic file ignores for:
+ - `backend/internal/services/docker_service.go`
+ - `backend/internal/api/handlers/docker_handler.go`
+- Reason: this issue is rooted in these files; exclusion hides regressions.
+
+`.gitignore` review outcome:
+
+- No change required for core remediation.
+
+`.dockerignore` review outcome:
+
+- No required change for runtime fix.
+- Optional follow-up: verify no additional local-only compose/env files are copied in future.
+
+`Dockerfile` review outcome:
+
+- No required behavioral change; preserve non-root default.
+
+---
+
+## 5) Risks, Edge Cases, Mitigations
+
+### Risks
+
+1. Host socket GID differs across environments (`docker` group not stable numeric ID).
+2. CI runners may not permit or need explicit `group_add` depending on runner Docker setup.
+3. Over-granting groups could violate least-privilege intent.
+4. Socket GID can be `0` on some hosts and implies root-group blast radius.
+
+### Edge Cases
+
+- Socket path missing (`ENOENT`) remains handled with existing details path.
+- Rootless host Docker sockets (`/run/user//docker.sock`) remain selectable by `resolveLocalDockerHost()`.
+- Remote server discovery path (`tcp://...`) must remain unaffected.
+
+### Mitigations
+
+- Use environment-substituted `DOCKER_SOCK_GID`, not hardcoded `988` in committed compose files.
+- Keep `group_add` scoped only to local operator flows that require socket discovery.
+- Fail closed on `DOCKER_SOCK_GID=0` unless explicit opt-in and risk acknowledgment are present.
+- Verify `id` output inside container to confirm only necessary supplemental group is present.
+
+---
+
+## 6) Implementation Plan (Phased, minimal request count)
+
+Design principle for phases: maximize delivery per request by grouping strongly-related changes into each phase and minimizing handoffs.
+
+### Phase 1 — Baseline + Diagnostics + Compose Foundations
+
+Scope:
+
+1. Compose updates in local/dev paths to support local-only `group_add` via `DOCKER_SOCK_GID`.
+2. Entrypoint diagnostic enhancement for non-root socket path.
+
+`repo-deliverable` files:
+
+- `.docker/compose/docker-compose.local.yml`
+- `.docker/compose/docker-compose.dev.yml`
+- `.docker/docker-entrypoint.sh`
+
+`operator-local follow-up` files (non-blocking, out of repo PR scope):
+
+- `/root/docker/containers/charon/docker-compose.yml`
+- `/root/docker/containers/charon/docker-compose-up-charon.sh`
+
+Deliverables:
+
+- Deterministic startup guidance and immediate local remediation path.
+
+### Phase 2 — API/UI Behavior Tightening + Tests
+
+Scope:
+
+1. Preserve and, if needed, refine backend detail text consistency in `buildLocalDockerUnavailableDetails(...)`.
+2. UI troubleshooting copy update in `ProxyHostForm.tsx`.
+3. Expand/refresh tests for permission-denied + supplemental-group hint rendering path.
+
+Primary files:
+
+- `backend/internal/services/docker_service.go`
+- `backend/internal/services/docker_service_test.go`
+- `backend/internal/api/handlers/docker_handler.go`
+- `backend/internal/api/handlers/docker_handler_test.go`
+- `frontend/src/hooks/useDocker.ts`
+- `frontend/src/hooks/__tests__/useDocker.test.tsx`
+- `frontend/src/components/ProxyHostForm.tsx`
+- `frontend/src/components/__tests__/ProxyHostForm*.test.tsx`
+
+Deliverables:
+
+- User sees precise, actionable guidance when failure occurs.
+- Regression tests protect failure classification and surfaced guidance.
+
+### Phase 3 — Coverage Policy + Documentation + CI/Validation Hardening
+
+Scope:
+
+1. Remove Docker logic exclusions in `codecov.yml`.
+2. Update docs to include `group_add` guidance where socket mount is described.
+3. Validate CI/playwright compose behavior remains unaffected and verify local least-privilege checks.
+
+Primary files:
+
+- `codecov.yml`
+- `README.md`
+- `docs/getting-started.md`
+- `SECURITY.md`
+- `.vscode/tasks.json` (only if adding dedicated validation task labels)
+
+Deliverables:
+
+- Documentation and coverage policy match runtime behavior.
+- Verified validation playbook for operators and CI.
+
+---
+
+## 7) PR Slicing Strategy
+
+### Decision
+
+**Split into multiple PRs (PR-1 / PR-2 / PR-3).**
+
+### Trigger Reasons
+
+- Cross-domain change set (compose + shell entrypoint + backend + frontend + tests + docs + coverage policy).
+- Distinct rollback boundaries needed (runtime config vs behavior vs governance/reporting).
+- Faster and safer review with independently verifiable increments.
+
+### Ordered PR Slices
+
+#### PR-1: Runtime Access Foundation (Compose + Entrypoint)
+
+Scope:
+
+- Add local-only `group_add` strategy to local/dev compose flows.
+- Improve non-root entrypoint diagnostics to print required GID.
+
+Files (expected):
+
+- `.docker/compose/docker-compose.local.yml`
+- `.docker/compose/docker-compose.dev.yml`
+- `.docker/docker-entrypoint.sh`
+
+Operator-local follow-up (not part of repo PR gate):
+
+- `/root/docker/containers/charon/docker-compose.yml`
+- `/root/docker/containers/charon/docker-compose-up-charon.sh`
+
+Dependencies:
+
+- None.
+
+Acceptance criteria:
+
+1. Container remains non-root (`id -u = 1000`).
+2. With local-only config enabled and `DOCKER_SOCK_GID` exported, `id -G` inside container includes socket GID.
+3. `GET /api/v1/docker/containers?host=local` no longer fails due to `EACCES` in correctly configured environment.
+4. If resolved socket GID is `0`, setup fails by default unless explicit opt-in and risk acknowledgment are provided.
+
+Rollback/contingency:
+
+- Revert compose and entrypoint deltas only.
+
+#### PR-2: Behavior + UX + Tests
+
+Scope:
+
+- Backend details consistency (if required).
+- Frontend troubleshooting message update.
+- Add/adjust tests around permission-denied + supplemental-group guidance.
+
+Files (expected):
+
+- `backend/internal/services/docker_service.go`
+- `backend/internal/services/docker_service_test.go`
+- `backend/internal/api/handlers/docker_handler.go`
+- `backend/internal/api/handlers/docker_handler_test.go`
+- `frontend/src/hooks/useDocker.ts`
+- `frontend/src/hooks/__tests__/useDocker.test.tsx`
+- `frontend/src/components/ProxyHostForm.tsx`
+- `frontend/src/components/__tests__/ProxyHostForm*.test.tsx`
+
+Dependencies:
+
+- PR-1 recommended (runtime setup available for realistic local validation).
+
+Acceptance criteria:
+
+1. `503` details include actionable group guidance for permission-denied scenarios.
+2. UI error panel provides mount + supplemental-group troubleshooting.
+3. All touched unit/e2e tests pass for local Docker source path.
+
+Rollback/contingency:
+
+- Revert only behavior/UI/test deltas; keep PR-1 foundations.
+
+#### PR-3: Coverage + Docs + Validation Playbook
+
+Scope:
+
+- Update `codecov.yml` exclusions for Docker logic files.
+- Update user/operator docs where socket mount guidance appears.
+- Optional task additions for socket-permission diagnostics.
+
+Files (expected):
+
+- `codecov.yml`
+- `README.md`
+- `docs/getting-started.md`
+- `SECURITY.md`
+- `.vscode/tasks.json` (optional)
+
+Dependencies:
+
+- PR-2 preferred to ensure policy aligns with test coverage additions.
+
+Acceptance criteria:
+
+1. Codecov includes Docker service/handler in coverage accounting.
+2. Docs show both socket mount and supplemental-group requirement.
+3. Validation command set is documented and reproducible.
+
+Rollback/contingency:
+
+- Revert reporting/docs/task changes only.
+
+---
+
+## 8) Validation Strategy (Protocol-Ordered)
+
+### 8.1 E2E Prerequisite / Rebuild Check (Mandatory First)
+
+Follow project protocol to decide whether E2E container rebuild is required before tests:
+
+1. If application/runtime or Docker build inputs changed, rebuild E2E environment.
+2. If only test files changed and environment is healthy, reuse current container.
+3. If environment state is suspect, rebuild.
+
+Primary task:
+
+- VS Code task: `Docker: Rebuild E2E Environment` (or clean variant when needed).
+
+### 8.2 E2E First (Mandatory)
+
+Run E2E before unit tests:
+
+- VS Code task: `Test: E2E Playwright (Targeted Suite)` for scoped regression checks.
+- VS Code task: `Test: E2E Playwright (Skill)` for broader safety pass as needed.
+
+### 8.3 Local Patch Report (Mandatory Before Unit/Coverage)
+
+Generate patch artifacts immediately after E2E:
+
+```bash
+cd /projects/Charon
+bash scripts/local-patch-report.sh
+```
+
+Required artifacts:
+
+- `test-results/local-patch-report.md`
+- `test-results/local-patch-report.json`
+
+### 8.4 Unit + Coverage Validation
+
+Backend and frontend unit coverage gates after patch report:
+
+```bash
+cd /projects/Charon/backend && go test ./internal/services ./internal/api/handlers
+cd /projects/Charon/frontend && npm run test -- src/hooks/__tests__/useDocker.test.tsx
+```
+
+Then run coverage tasks/scripts per project protocol (minimum threshold enforcement remains unchanged).
+
+### 8.5 Least-Privilege + `gid==0` Guardrail Checks
+
+Pass conditions:
+
+1. Container process remains non-root.
+2. Supplemental group grant is limited to socket GID only for local operator flow.
+3. No privileged mode or unrelated capability additions.
+4. Socket remains read-only.
+5. If socket GID resolves to `0`, local run fails closed unless explicit opt-in and risk acknowledgment are present.
+
+---
+
+## 9) Suggested File-Level Updates Summary
+
+### `repo-deliverable` Must Update
+
+- `.docker/compose/docker-compose.local.yml`
+- `.docker/compose/docker-compose.dev.yml`
+- `.docker/docker-entrypoint.sh`
+- `frontend/src/components/ProxyHostForm.tsx`
+- `codecov.yml`
+
+### `repo-deliverable` Should Update
+
+- `README.md`
+- `docs/getting-started.md`
+- `SECURITY.md`
+
+### `repo-deliverable` Optional Update
+
+- `.vscode/tasks.json` (dedicated task to precompute/export `DOCKER_SOCK_GID` and start compose)
+
+### `operator-local follow-up` (Out of Mandatory Repo PR Scope)
+
+- `/root/docker/containers/charon/docker-compose.yml`
+- `/root/docker/containers/charon/docker-compose-up-charon.sh`
+
+### Reviewed, No Required Change
+
+- `.gitignore`
+- `.dockerignore`
+- `Dockerfile` (keep non-root default)
+
+---
+
+## 10) Acceptance Criteria / DoD
+
+1. Local Docker source works in non-root container when supplemental socket group is supplied.
+2. Failure path remains explicit and actionable when supplemental group is missing.
+3. Scope split is explicit and consistent: `repo-deliverable` vs `operator-local follow-up`.
+4. Chosen policy is unambiguous: conditional local-only `group_add`; CI remains unaffected.
+5. `gid==0` path is guarded by explicit opt-in/risk acknowledgment and never silently defaulted.
+6. Validation order is protocol-aligned: E2E prerequisite/rebuild check -> E2E first -> local patch report -> unit/coverage.
+7. Coverage policy no longer suppresses Docker service/handler regression visibility.
+8. PR-1, PR-2, PR-3 each pass their slice acceptance criteria with independent rollback safety.
+9. This file contains one active plan with one frontmatter block and no archived concatenated plan content.
+
+---
+
+## 11) Handoff
+
+This plan is complete and execution-ready for Supervisor review. It includes:
+
+- Root-cause grounded file/function map
+- EARS requirements
+- Specific multi-phase implementation path
+- PR slicing with dependencies and rollback notes
+- Validation sequence explicitly aligned to project protocol order and least-privilege guarantees
diff --git a/docs/plans/current_spec.md b/docs/plans/current_spec.md
index ef2a4694..a69a91c1 100644
--- a/docs/plans/current_spec.md
+++ b/docs/plans/current_spec.md
@@ -1,194 +1,362 @@
----
-post_title: "Current Spec: Resolve Proxy Host Hostname Validation Test Failures"
-categories:
- - actions
- - testing
- - backend
-tags:
- - go
- - proxyhost
- - unit-tests
- - validation
-summary: "Focused plan to resolve failing TestProxyHostService_ValidateHostname malformed URL cases by aligning test expectations with intended validation behavior and validating via targeted service tests and coverage gate."
-post_date: 2026-02-22
----
+# Uptime Monitoring Regression Investigation (Scheduled vs Manual)
-## Active Plan: Resolve Failing Hostname Validation Tests
+Date: 2026-03-01
+Owner: Planning Agent
+Status: Investigation Complete, Fix Plan Proposed
+Severity: High (false DOWN states on automated monitoring)
-Date: 2026-02-22
-Status: Active and authoritative
-Scope Type: Backend test-failure remediation (service validation drift analysis)
-Authority: This is the only active authoritative plan section in this file.
+## 1. Executive Summary
-## Introduction
+Two services (Wizarr and Charon) can flip to `DOWN` during scheduled cycles while manual checks immediately return `UP` because scheduled checks use a host-level TCP gate that can short-circuit monitor-level HTTP checks.
-This plan resolves backend run failures in `TestProxyHostService_ValidateHostname`
-for malformed URL cases while preserving intended hostname validation behavior.
+The scheduled path is:
+- `ticker -> CheckAll -> checkAllHosts -> (host status down) -> markHostMonitorsDown`
-Primary objective:
+The manual path is:
+- `POST /api/v1/uptime/monitors/:id/check -> CheckMonitor -> checkMonitor`
-- Restore green test execution in `backend/internal/services` with a minimal,
- low-risk change path.
+Only the scheduled path runs host precheck gating. If host precheck fails (TCP to upstream host/port), `CheckAll` skips HTTP checks and forcibly writes monitor status to `down` with heartbeat message `Host unreachable`.
-## Research Findings
+This is a backend state mutation problem (not only UI rendering).
-### Evidence Collected
+## 1.1 Monitoring Policy (Authoritative Behavior)
-- Failing command output confirms two failing subtests:
- - `TestProxyHostService_ValidateHostname/malformed_https_URL`
- - `TestProxyHostService_ValidateHostname/malformed_http_URL`
-- Failure message for both cases: `invalid hostname format`.
+Charon uptime monitoring SHALL follow URL-truth semantics for HTTP/HTTPS monitors,
+matching third-party external monitor behavior (Uptime Kuma style) without requiring
+any additional service.
-### Exact Files Involved
+Policy:
+- HTTP/HTTPS monitors are URL-truth based. The monitor result is authoritative based
+ on the configured URL check outcome (status code/timeout/TLS/connectivity from URL
+ perspective).
+- Internal TCP reachability precheck (`ForwardHost:ForwardPort`) is
+ non-authoritative for HTTP/HTTPS monitor status.
+- TCP monitors remain endpoint-socket checks and may rely on direct socket
+ reachability semantics.
+- Host precheck may still be used for optimization, grouping telemetry, and operator
+ diagnostics, but SHALL NOT force HTTP/HTTPS monitors to DOWN.
-1. `backend/internal/services/proxyhost_service_validation_test.go`
- - Test function: `TestProxyHostService_ValidateHostname`
- - Failing cases currently expect `wantErr: false` for malformed URLs.
-2. `backend/internal/services/proxyhost_service.go`
- - Service function: `ValidateHostname(host string) error`
- - Behavior: strips scheme, then validates hostname characters; malformed
- residual values containing `:` are rejected with `invalid hostname format`.
+## 2. Research Findings
-### Root Cause Determination
+### 2.1 Execution Path Comparison (Required)
-- Root cause is **test expectation drift**, not runtime service regression.
-- `git blame` shows malformed URL test cases were added on 2026-02-22 with
- permissive expectations, while validation behavior rejecting malformed host
- strings predates those additions.
-- Existing behavior aligns with stricter hostname validation and should remain
- the default unless product requirements explicitly demand permissive handling
- of malformed host inputs.
+### Scheduled path behavior
+- Entry: `backend/internal/api/routes/routes.go` (background ticker, calls `uptimeService.CheckAll()`)
+- `CheckAll()` calls `checkAllHosts()` first.
+ - File: `backend/internal/services/uptime_service.go:354`
+- `checkAllHosts()` updates each `UptimeHost.Status` via TCP checks in `checkHost()`.
+ - File: `backend/internal/services/uptime_service.go:395`
+- `checkHost()` dials `UptimeHost.Host` + monitor port (prefer `ProxyHost.ForwardPort`, fallback to URL port).
+ - File: `backend/internal/services/uptime_service.go:437`
+- Back in `CheckAll()`, monitors are grouped by `UptimeHostID`.
+ - File: `backend/internal/services/uptime_service.go:367`
+- If `UptimeHost.Status == "down"`, `markHostMonitorsDown()` is called and individual monitor checks are skipped.
+ - File: `backend/internal/services/uptime_service.go:381`
+ - File: `backend/internal/services/uptime_service.go:593`
-### Confidence Assessment
+### Manual path behavior
+- Entry: `POST /api/v1/uptime/monitors/:id/check`.
+ - Handler: `backend/internal/api/handlers/uptime_handler.go:107`
+- Calls `service.CheckMonitor(*monitor)` asynchronously.
+ - File: `backend/internal/services/uptime_service.go:707`
+- `checkMonitor()` performs direct HTTP/TCP monitor check and updates monitor + heartbeat.
+ - File: `backend/internal/services/uptime_service.go:711`
-- Confidence score: **95% (High)**
-- Rationale: direct reproduction, targeted file inspection, and blame history
- converge on expectation drift.
+### Key divergence
+- Scheduled: host-gated (precheck can override monitor)
+- Manual: direct monitor check (no host gate)
-## Requirements (EARS)
+## 3. Root Cause With Evidence
-- WHEN malformed `http://` or `https://` host strings are passed to
- `ValidateHostname`, THE SYSTEM SHALL return a validation error.
-- WHEN service validation behavior is intentionally strict, THE TESTS SHALL
- assert rejection for malformed URL residual host strings.
-- IF product intent is permissive for malformed inputs, THEN THE SYSTEM SHALL
- minimally relax parsing logic without weakening valid invalid-character checks.
-- WHEN changes are completed, THE SYSTEM SHALL pass targeted service tests and
- the backend coverage gate script.
+## 3.1 Primary Root Cause: Host Precheck Overrides HTTP Success in Scheduled Cycles
-## Technical Specification
+When `UptimeHost` is marked `down`, scheduled checks do not run `checkMonitor()` for that host's monitors. Instead they call `markHostMonitorsDown()` which:
+- sets each monitor `Status = "down"`
+- writes `UptimeHeartbeat{Status: "down", Message: "Host unreachable"}`
+- maxes failure count (`FailureCount = MaxRetries`)
-### Minimal Fix Path (Preferred)
+Evidence:
+- Short-circuit: `backend/internal/services/uptime_service.go:381`
+- Forced down write: `backend/internal/services/uptime_service.go:610`
+- Forced heartbeat message: `backend/internal/services/uptime_service.go:624`
-Preferred path: **test-only correction**.
+This exactly matches symptom pattern:
+1. Manual refresh sets monitor `UP` via direct HTTP check.
+2. Next scheduler cycle can force it back to `DOWN` from host precheck path.
-1. Update malformed URL table entries in
- `backend/internal/services/proxyhost_service_validation_test.go`:
- - `malformed https URL` -> `wantErr: true`
- - `malformed http URL` -> `wantErr: true`
-2. Keep current service behavior in
- `backend/internal/services/proxyhost_service.go` unchanged.
-3. Optional test hardening (still test-only): assert error contains
- `invalid hostname format` for those two cases.
+## 3.2 Hypothesis Check: TCP precheck can fail while public URL HTTP check succeeds
-### Alternative Path (Only if Product Intent Differs)
+Confirmed as plausible by design:
+- `checkHost()` tests upstream reachability (`ForwardHost:ForwardPort`) from Charon runtime.
+- `checkMonitor()` tests monitor URL (public domain URL, often via Caddy/public routing).
-Use only if maintainers explicitly confirm malformed URL inputs should pass:
+A service can be publicly reachable by monitor URL while upstream TCP precheck fails due to network namespace/routing/DNS/hairpin differences.
-1. Apply minimal service correction in `ValidateHostname` to normalize malformed
- scheme inputs before character validation.
-2. Add or update tests to preserve strict rejection for truly invalid hostnames
- (e.g., `$`, `@`, `%`, `&`) so validation is not broadly weakened.
+This is especially likely for:
+- self-referential routes (Charon monitoring Charon via public hostname)
+- host/container networking asymmetry
+- services reachable through proxy path but not directly on upstream socket from current runtime context
-Decision default for this plan: **Preferred path (test updates only)**.
+## 3.3 Recent Change Correlation (Required)
-## Implementation Plan
+### `SyncAndCheckForHost` (regression amplifier)
+- Introduced in commit `2cd19d89` and called from proxy host create path.
+- Files:
+ - `backend/internal/services/uptime_service.go:1195`
+ - `backend/internal/api/handlers/proxy_host_handler.go:418`
+- Behavior: creates/syncs monitor and immediately runs `checkMonitor()`.
-### Phase 1: Test-first Repro and Baseline
+Impact: makes monitors quickly show `UP` after create/manual, then scheduler can flip to `DOWN` if host precheck fails. This increased visibility of scheduled/manual inconsistency.
-1. Confirm current failure (already reproduced).
-2. Record failing subtests and error signatures as baseline evidence.
+### `CleanupStaleFailureCounts`
+- Introduced in `2cd19d89`, refined in `7a12ab79`.
+- File: `backend/internal/services/uptime_service.go:1277`
+- It runs at startup and resets stale monitor states only; not per-cycle override logic.
+- Not root cause of recurring per-cycle flip.
-### Phase 2: Minimal Remediation
+### Frontend effective status changes
+- Latest commit `0241de69` refactors `effectiveStatus` handling.
+- File: `frontend/src/pages/Uptime.tsx`.
+- Backend evidence proves this is not visual-only: scheduler writes `down` heartbeats/messages directly in DB.
-1. Apply preferred test expectation update in
- `backend/internal/services/proxyhost_service_validation_test.go`.
-2. Keep service code unchanged unless product intent is clarified otherwise.
+## 3.4 Grouping Logic Analysis (`UptimeHost`/`UpstreamHost`)
-### Phase 3: Targeted Validation
+Monitors are grouped by `UptimeHostID` in `CheckAll()`. `UptimeHost` is derived from `ProxyHost.ForwardHost` in sync flows.
-Run in this order:
+Relevant code:
+- group map by `UptimeHostID`: `backend/internal/services/uptime_service.go:367`
+- host linkage in sync: `backend/internal/services/uptime_service.go:189`, `backend/internal/services/uptime_service.go:226`
+- sync single-host update path: `backend/internal/services/uptime_service.go:1023`
-1. `go test ./backend/internal/services -run TestProxyHostService_ValidateHostname -v`
-2. Related service package tests:
- - `go test ./backend/internal/services -run TestProxyHostService -v`
- - `go test ./backend/internal/services -v`
-3. Final gate:
- - `bash scripts/go-test-coverage.sh`
+Risk: one host precheck failure can mark all grouped monitors down without URL-level validation.
-## Risk Assessment
+## 4. Technical Specification (Fix Plan)
-### Key Risks
+## 4.1 Minimal Proper Fix (First)
-1. **Semantic risk (low):** updating tests could mask an intended behavior
- change if malformed URL permissiveness was deliberate.
-2. **Coverage risk (low):** test expectation changes may alter branch coverage
- marginally but should not threaten gate based on current context.
-3. **Regression risk (low):** service runtime behavior remains unchanged in the
- preferred path.
+Goal: eliminate false DOWN while preserving existing behavior as much as possible.
-### Mitigations
+Change `CheckAll()` host-down branch to avoid hard override for HTTP/HTTPS monitors.
-- Keep change surgical to two table entries.
-- Preserve existing invalid-character rejection coverage.
-- Require full service package run plus coverage script before merge.
+Mandatory hotfix rule:
+- WHEN a host precheck is `down`, THE SYSTEM SHALL partition host monitors by type inside `CheckAll()`.
+- `markHostMonitorsDown` MUST be invoked only for `tcp` monitors.
+- `http`/`https` monitors MUST still run through `checkMonitor()` and MUST NOT be force-written `down` by the host precheck path.
+- Host precheck outcomes MAY be recorded for optimization/telemetry/grouping, but MUST NOT be treated as final status for `http`/`https` monitors.
-## Rollback Plan
-
-If maintainer/product decision confirms permissive malformed URL handling is
-required:
-
-1. Revert the test expectation update commit.
-2. Implement minimal service normalization change in
- `backend/internal/services/proxyhost_service.go`.
-3. Add explicit tests documenting the accepted malformed-input behavior and
- retain strict negative tests for illegal hostname characters.
-4. Re-run targeted validation commands and coverage gate.
-
-## PR Slicing Strategy
-
-Decision: **Single PR**.
+Proposed rule:
+1. If host is down:
+ - For `http`/`https` monitors: still run `checkMonitor()` (do not force down).
+ - For `tcp` monitors: keep current host-down fast-path (`markHostMonitorsDown`) or direct tcp check.
+2. If host is not down:
+ - Keep existing behavior (run `checkMonitor()` for all monitors).
Rationale:
+- Aligns scheduled behavior with manual for URL-based monitors.
+- Preserves reverse proxy product semantics where public URL availability is the source of truth.
+- Minimal code delta in `CheckAll()` decision branch.
+- Preserves optimization for true TCP-only monitors.
-- Scope is tightly bounded to one service test suite and one failure cluster.
-- Preferred remediation is test-only with low rollback complexity.
-- Review surface is small and dependency-free.
+### Exact file/function targets
+- `backend/internal/services/uptime_service.go`
+ - `CheckAll()`
+ - add small helper (optional): `partitionMonitorsByType(...)`
-Contingency split trigger:
+## 4.2 Long-Term Robust Fix (Deferred)
-- Only split if product intent forces service logic change, in which case:
- - PR-1: test expectation alignment rollback + service behavior decision record
- - PR-2: minimal service correction + adjusted tests
+Introduce host precheck as advisory signal, not authoritative override.
-## Config/Infra File Impact Review
+Design:
+1. Add `HostReachability` result to run context (not persisted as forced monitor status).
+2. Always execute per-monitor checks, but use host precheck to:
+ - tune retries/backoff
+ - annotate failure reason
+ - optimize notification batching
+3. Optionally add feature flag:
+ - `feature.uptime.strict_host_precheck` (default `false`)
+ - allows legacy strict gating in environments that want it.
-Reviewed for required updates:
+Benefits:
+- Removes false DOWN caused by precheck mismatch.
+- Keeps performance and batching controls.
+- More explicit semantics for operators.
-- `.gitignore`
-- `.dockerignore`
-- `codecov.yml`
-- `Dockerfile`
+## 5. API/Schema Impact
-Planned changes: **None required** for this focused backend test-remediation
-scope.
+No API contract change required for minimal fix.
+No database migration required for minimal fix.
-## Acceptance Criteria
+Long-term fix may add one feature flag setting only.
-1. `TestProxyHostService_ValidateHostname` passes, including malformed URL
- subtests.
-2. `go test ./backend/internal/services -run TestProxyHostService -v` passes.
-3. `go test ./backend/internal/services -v` passes.
-4. `bash scripts/go-test-coverage.sh` passes final gate.
-5. Root cause is documented as expectation drift vs. service behavior drift, and
- chosen path is explicitly recorded.
+## 6. EARS Requirements
+
+### Ubiquitous
+- THE SYSTEM SHALL evaluate HTTP/HTTPS monitor availability using URL-level checks as the authoritative signal.
+
+### Event-driven
+- WHEN the scheduled uptime cycle runs, THE SYSTEM SHALL execute HTTP/HTTPS monitor checks regardless of internal host precheck state.
+- WHEN the scheduled uptime cycle runs and host precheck is down, THE SYSTEM SHALL apply host-level forced-down logic only to TCP monitors.
+
+### State-driven
+- WHILE a monitor type is `http` or `https`, THE SYSTEM SHALL NOT force monitor status to `down` solely from internal host precheck failure.
+- WHILE a monitor type is `tcp`, THE SYSTEM SHALL evaluate status using endpoint socket reachability semantics.
+
+### Unwanted behavior
+- IF internal host precheck is unreachable AND URL-level HTTP/HTTPS check returns success, THEN THE SYSTEM SHALL set monitor status to `up`.
+- IF internal host precheck is reachable AND URL-level HTTP/HTTPS check fails, THEN THE SYSTEM SHALL set monitor status to `down`.
+
+### Optional
+- WHERE host precheck telemetry is enabled, THE SYSTEM SHALL record host-level reachability for diagnostics and grouping without overriding HTTP/HTTPS monitor final state.
+
+## 7. Implementation Plan
+
+### Phase 1: Reproduction Lock-In (Tests First)
+- Add backend service test proving current regression:
+ - host precheck fails
+ - monitor URL check would succeed
+ - scheduled `CheckAll()` currently writes down (existing behavior)
+- File: `backend/internal/services/uptime_service_test.go` (new test block)
+
+### Phase 2: Minimal Backend Fix
+- Update `CheckAll()` branch logic to run HTTP/HTTPS monitors even when host is down.
+- Make monitor partitioning explicit and mandatory in `CheckAll()` host-down branch.
+- Add an implementation guard before partitioning: normalize monitor type using
+ `strings.TrimSpace` + `strings.ToLower` to prevent `HTTP`/`HTTPS` case
+ regressions and whitespace-related misclassification.
+- Ensure `markHostMonitorsDown` is called only for TCP monitor partitions.
+- File: `backend/internal/services/uptime_service.go`
+
+### Phase 3: Backend Validation
+- Add/adjust tests:
+ - scheduled path no longer forces down when HTTP succeeds
+ - manual and scheduled reach same final state for HTTP monitors
+ - internal host unreachable + public URL HTTP 200 => monitor is `UP`
+ - internal host reachable + public URL failure => monitor is `DOWN`
+ - TCP monitor behavior unchanged under host-down conditions
+- Files:
+ - `backend/internal/services/uptime_service_test.go`
+ - `backend/internal/services/uptime_service_race_test.go` (if needed for concurrency side-effects)
+
+### Phase 4: Integration/E2E Coverage
+- Add targeted API-level integration test for scheduler vs manual parity.
+- Add Playwright scenario for:
+ - monitor set UP by manual check
+ - remains UP after scheduled cycle when URL is reachable
+- Add parity scenario for:
+ - internal TCP precheck unreachable + URL returns 200 => `UP`
+ - internal TCP precheck reachable + URL failure => `DOWN`
+- Files:
+ - `backend/internal/api/routes/routes_test.go` (or uptime handler integration suite)
+ - `tests/monitoring/uptime-monitoring.spec.ts` (or equivalent uptime spec file)
+
+Scope note:
+- This hotfix plan is intentionally limited to backend behavior correction and
+ regression tests (unit/integration/E2E).
+- Dedicated documentation-phase work is deferred and out of scope for this
+ hotfix PR.
+
+## 8. Test Plan (Unit / Integration / E2E)
+
+Duplicate notification definition (hotfix acceptance/testing):
+- A duplicate notification means the same `(monitor_id, status,
+ scheduler_tick_id)` is emitted more than once within a single scheduler run.
+
+## Unit Tests
+1. `CheckAll_HostDown_DoesNotForceDown_HTTPMonitor_WhenHTTPCheckSucceeds`
+2. `CheckAll_HostDown_StillHandles_TCPMonitor_Conservatively`
+3. `CheckAll_ManualAndScheduledParity_HTTPMonitor`
+4. `CheckAll_InternalHostUnreachable_PublicURL200_HTTPMonitorEndsUp` (blocking)
+5. `CheckAll_InternalHostReachable_PublicURLFail_HTTPMonitorEndsDown` (blocking)
+
+## Integration Tests
+1. Scheduler endpoint (`/api/v1/system/uptime/check`) parity with monitor check endpoint.
+2. Verify DB heartbeat message is real HTTP result (not `Host unreachable`) for HTTP monitors where URL is reachable.
+3. Verify when host precheck is down, HTTP monitor heartbeat/notification output is derived from `checkMonitor()` (not synthetic host-path `Host unreachable`).
+4. Verify no duplicate notifications are emitted from host+monitor paths for the same scheduler run, where duplicate is defined as repeated `(monitor_id, status, scheduler_tick_id)`.
+5. Verify internal host precheck unreachable + public URL 200 still resolves monitor `UP`.
+6. Verify internal host precheck reachable + public URL failure resolves monitor `DOWN`.
+
+## E2E Tests
+1. Create/sync monitor scenario where manual refresh returns `UP`.
+2. Wait one scheduler interval.
+3. Assert monitor remains `UP` and latest heartbeat is not forced `Host unreachable` for reachable URL.
+4. Assert scenario: internal host precheck unreachable + public URL 200 => monitor remains `UP`.
+5. Assert scenario: internal host precheck reachable + public URL failure => monitor is `DOWN`.
+
+## Regression Guardrails
+- Add a test explicitly asserting that host precheck must not unconditionally override HTTP monitor checks.
+- Add explicit assertions that HTTP monitors under host-down precheck emit
+ check-derived heartbeat messages and do not produce duplicate notifications
+ under the `(monitor_id, status, scheduler_tick_id)` rule within a single
+ scheduler run.
+
+## 9. Risks and Rollback
+
+## Risks
+1. More HTTP checks under true host outage may increase check volume.
+2. Notification patterns may shift from single host-level event to monitor-level batched events.
+3. Edge cases for mixed-type monitor groups (HTTP + TCP) need deterministic behavior.
+
+## Mitigations
+1. Preserve batching (`queueDownNotification`) and existing retry thresholds.
+2. Keep TCP strict path unchanged in minimal fix.
+3. Add explicit log fields and targeted tests for mixed groups.
+
+## Rollback Plan
+1. Revert the `CheckAll()` branch change only (single-file rollback).
+2. Keep added tests; mark expected behavior as legacy if temporary rollback needed.
+3. If necessary, introduce temporary feature toggle to switch between strict and tolerant host gating.
+
+## 10. PR Slicing Strategy
+
+Decision: Single focused PR (hotfix + tests)
+
+Trigger reasons:
+- High-severity runtime behavior fix requiring minimal blast radius
+- Fast review/rollback with behavior-only delta plus regression coverage
+- Avoid scope creep into optional hardening/feature-flag work
+
+### PR-1 (Hotfix + Tests)
+Scope:
+- `CheckAll()` host-down branch adjustment for HTTP/HTTPS
+- Unit/integration/E2E regression tests for URL-truth semantics
+
+Files:
+- `backend/internal/services/uptime_service.go`
+- `backend/internal/services/uptime_service_test.go`
+- `backend/internal/api/routes/routes_test.go` (or equivalent)
+- `tests/monitoring/uptime-monitoring.spec.ts` (or equivalent)
+
+Validation gates:
+- backend unit tests pass
+- targeted uptime integration tests pass
+- targeted uptime E2E tests pass
+- no behavior regression in existing `CheckAll` tests
+
+Rollback:
+- single revert of PR-1 commit
+
+## 11. Acceptance Criteria (DoD)
+
+1. Scheduled and manual checks produce consistent status for HTTP/HTTPS monitors.
+2. A reachable monitor URL is not forced to `DOWN` solely by host precheck failure.
+3. New regression tests fail before fix and pass after fix.
+4. No break in TCP monitor behavior expectations.
+5. No new critical/high security findings in touched paths.
+6. Blocking parity case passes: internal host precheck unreachable + public URL 200 => scheduled result is `UP`.
+7. Blocking parity case passes: internal host precheck reachable + public URL failure => scheduled result is `DOWN`.
+8. Under host-down precheck, HTTP monitors produce check-derived heartbeat messages (not synthetic `Host unreachable` from host path).
+9. No duplicate notifications are produced by host+monitor paths within a
+ single scheduler run, where duplicate is defined as repeated
+ `(monitor_id, status, scheduler_tick_id)`.
+
+## 12. Implementation Risks
+
+1. Increased scheduler workload during host-precheck failures because HTTP/HTTPS checks continue to run.
+2. Notification cadence may change due to check-derived monitor outcomes replacing host-forced synthetic downs.
+3. Mixed monitor groups (TCP + HTTP/HTTPS) require strict ordering/partitioning to avoid regression.
+
+Mitigations:
+- Keep change localized to `CheckAll()` host-down branch decisioning.
+- Add explicit regression tests for both parity directions and mixed monitor types.
+- Keep rollback path as single-commit revert.
diff --git a/docs/reports/caddy-compatibility-matrix.md b/docs/reports/caddy-compatibility-matrix.md
new file mode 100644
index 00000000..15f104a4
--- /dev/null
+++ b/docs/reports/caddy-compatibility-matrix.md
@@ -0,0 +1,32 @@
+# PR-1 Caddy Compatibility Matrix Report
+
+- Generated at: 2026-02-23T13:52:26Z
+- Candidate Caddy version: 2.11.1
+- Plugin set: caddy-security,coraza-caddy,caddy-crowdsec-bouncer,caddy-geoip2,caddy-ratelimit
+- Smoke set: boot_caddy,plugin_modules,config_validate,admin_api_health
+- Matrix dimensions: patch scenario × platform/arch × checked plugin modules
+
+## Deterministic Pass/Fail
+
+A matrix cell is PASS only when every smoke check and module inventory extraction passes.
+
+Promotion gate semantics (spec-aligned):
+- Scenario A on linux/amd64 and linux/arm64 is promotion-gating.
+- Scenario B/C are evidence-only; failures in B/C do not fail the PR-1 promotion gate.
+
+## Matrix Output
+
+| Scenario | Platform | Plugins Checked | boot_caddy | plugin_modules | config_validate | admin_api_health | module_inventory | Status |
+| --- | --- | --- | --- | --- | --- | --- | --- | --- |
+| A | linux/amd64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+| A | linux/arm64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+| B | linux/amd64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+| B | linux/arm64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+| C | linux/amd64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+| C | linux/arm64 | http.handlers.auth_portal, http.handlers.waf, http.handlers.crowdsec, http.handlers.geoip2, http.handlers.rate_limit | PASS | PASS | PASS | PASS | PASS | PASS |
+
+## Artifacts
+
+- Matrix CSV: test-results/caddy-compat/matrix-summary.csv
+- Per-cell module inventories: test-results/caddy-compat/module-inventory-*-go-version-m.txt
+- Per-cell Caddy module listings: test-results/caddy-compat/module-inventory-*-modules.txt
diff --git a/docs/reports/caddy-security-posture.md b/docs/reports/caddy-security-posture.md
new file mode 100644
index 00000000..893e6d55
--- /dev/null
+++ b/docs/reports/caddy-security-posture.md
@@ -0,0 +1,65 @@
+## PR-2 Security Patch Posture and Advisory Disposition
+
+- Date: 2026-02-23
+- Scope: PR-2 only (security patch posture + xcaddy patch retirement decision)
+- Upstream target: Caddy 2.11.x line (`2.11.1` candidate in this repository)
+- Inputs:
+ - PR-1 compatibility matrix: `docs/reports/caddy-compatibility-matrix.md`
+ - Plan authority: `docs/plans/current_spec.md`
+ - Runtime and bootstrap assumptions: `.docker/docker-entrypoint.sh`, `.docker/compose/docker-compose.yml`
+
+### 1) Final patch disposition
+
+| Patch target | Decision | Rationale (evidence-backed) | Rollback path |
+| --- | --- | --- | --- |
+| `github.com/expr-lang/expr@v1.17.7` | Retain | Enforced by current builder patching and CI dependency checks. | Keep current pin. |
+| `github.com/hslatman/ipstore@v0.4.0` | Retain | No PR-2 evidence supports safe retirement. | Keep current pin. |
+| `github.com/slackhq/nebula@v1.9.7` | Retire by default | Matrix evidence supports scenario `B`/`C`; default moved to `B` with rollback preserved. | Set `CADDY_PATCH_SCENARIO=A`. |
+
+### 2) Caddy 2.11.x advisory disposition
+
+| Advisory | Component summary | Exploitability | Evidence source | Owner | Recheck cadence |
+| --- | --- | --- | --- | --- | --- |
+| `GHSA-5r3v-vc8m-m96g` (`CVE-2026-27590`) | FastCGI `split_path` confusion | Not affected | Upstream advisory + Charon runtime path review (no FastCGI transport in default generated config path) | QA_Security | weekly |
+| `GHSA-879p-475x-rqh2` (`CVE-2026-27589`) | Admin API cross-origin no-cors | Mitigated | Upstream advisory + local controls: `CHARON_CADDY_ADMIN_API` now validated against internal allowlist and expected port 2019; production compose does not publish 2019 by default | QA_Security | weekly |
+| `GHSA-x76f-jf84-rqj8` (`CVE-2026-27588`) | Host matcher case bypass | Mitigated | Upstream advisory + PR-1 Caddy 2.11.x matrix compatibility evidence and Charon route/security test reliance on upgraded line | QA_Security | release-candidate |
+| `GHSA-g7pc-pc7g-h8jh` (`CVE-2026-27587`) | Path matcher escaped-case bypass | Mitigated | Upstream advisory + PR-1 matrix evidence and maintained security enforcement suite coverage | QA_Security | release-candidate |
+| `GHSA-hffm-g8v7-wrv7` (`CVE-2026-27586`) | mTLS client-auth fail-open | Not affected | Upstream advisory + Charon default deployment model does not enable mTLS client-auth CA pool configuration by default | QA_Security | on-upstream-change |
+| `GHSA-4xrr-hq4w-6vf4` (`CVE-2026-27585`) | File matcher glob sanitization bypass | Not affected | Upstream advisory + no default Charon generated config dependency on vulnerable matcher pattern | QA_Security | on-upstream-change |
+
+### 3) Admin API exposure assumptions and hardening status
+
+- Assumption: only internal Caddy admin endpoints are valid management targets.
+- PR-2 enforcement:
+ - validate and normalize `CHARON_CADDY_ADMIN_API`/`CPM_CADDY_ADMIN_API`
+ - host allowlist + expected port `2019`
+ - fail-fast startup on invalid/non-allowlisted endpoint
+- Exposure check: production compose defaults do not publish port `2019`.
+
+### 4) Runtime safety and rollback preservation
+
+- Runtime defaults keep `expr` and `ipstore` pinned.
+- `nebula` pin retirement is controlled by scenario switch, not hard deletion.
+- Emergency rollback remains one-step: `CADDY_PATCH_SCENARIO=A`.
+
+### Validation executed for PR-2
+
+| Command / Task | Outcome |
+| --- | --- |
+| `cd /projects/Charon/backend && go test ./internal/config` | PASS |
+| VS Code task `Security: Caddy PR-1 Compatibility Matrix` | PASS (A/B/C scenarios pass on `linux/amd64` and `linux/arm64`; promotion gate PASS) |
+
+Relevant generated artifacts:
+- `docs/reports/caddy-compatibility-matrix.md`
+- `test-results/caddy-compat/matrix-summary.csv`
+- `test-results/caddy-compat/module-inventory-*-go-version-m.txt`
+- `test-results/caddy-compat/module-inventory-*-modules.txt`
+
+### Residual risks / follow-up watch
+
+1. Caddy advisories with reserved or evolving CVE enrichment may change exploitability interpretation; recheck cadence remains active.
+2. Caddy bootstrap still binds admin listener to container interface (`0.0.0.0:2019`) for compatibility, so operator misconfiguration that publishes port `2019` can expand attack surface; production compose defaults avoid publishing this port.
+
+### PR-2 closure statement
+
+PR-2 posture decisions are review-ready: patch disposition is explicit, admin API assumptions are enforced, and rollback remains deterministic. No PR-3 scope is included.
diff --git a/docs/reports/qa_report.md b/docs/reports/qa_report.md
index 9f5cdb21..b2dc9a57 100644
--- a/docs/reports/qa_report.md
+++ b/docs/reports/qa_report.md
@@ -1,143 +1,80 @@
-## QA/Security Validation Report - Governance Documentation Slice
+## QA Report - PR #779
-Date: 2026-02-20
-Repository: /projects/Charon
-Scope files:
-- `.github/instructions/copilot-instructions.md`
-- `.github/instructions/testing.instructions.md`
-- `.github/instructions/security-and-owasp.instructions.md`
-- `.github/agents/Management.agent.md`
-- `.github/agents/Backend_Dev.agent.md`
-- `.github/agents/QA_Security.agent.md`
-- `SECURITY.md`
-- `docs/security.md`
-- `docs/features/notifications.md`
+- Date: 2026-03-01
+- Scope: Post-remediation merge-readiness gates after Caddy Import E2E fix
-### Result Summary
+## E2E Status
-| Check | Status | Notes |
-|---|---|---|
-| 1) No secrets/tokens introduced in changed docs | PASS | No raw token values, API keys, or private credential material detected in scoped diffs; only policy/example strings were found. |
-| 2) Policy consistency verification | PASS | GORM conditional DoD gate, check-mode semantics, include/exclude trigger matrix, Gotify no-exposure + URL redaction, and precedence hierarchy are consistently present across canonical instructions and aligned agent/operator docs. |
-| 3) Markdown lint on scoped files | PASS | `markdownlint-cli2` reports baseline debt (`319` total), but intersection of lint hits with added hunk ranges for this governance slice returned no new lint hits in added sections. |
-| 4) Confirm governance-only scope for this slice | PASS | Scoped diff over the 9 target files confirms this implementation slice touches only those 9 governance files for evaluation. Unrelated branch changes were explicitly excluded by scope criteria. |
-| 5) QA report update for governance slice | PASS | This section added as the governance-slice QA record. |
+- Command status provided by current PR context:
+ `npx playwright test --project=chromium --project=firefox --project=webkit tests/core/caddy-import`
+- Result: `106 passed, 0 failed, 0 skipped`
+- Gate: PASS
-### Commands Executed
+## Patch Report Status
-```bash
-git diff --name-only -- .github/instructions/copilot-instructions.md .github/instructions/testing.instructions.md .github/instructions/security-and-owasp.instructions.md .github/agents/Management.agent.md .github/agents/Backend_Dev.agent.md .github/agents/QA_Security.agent.md SECURITY.md docs/security.md docs/features/notifications.md
+- Command: `bash scripts/local-patch-report.sh`
+- Artifacts:
+ - `test-results/local-patch-report.md` (present)
+ - `test-results/local-patch-report.json` (present)
+- Result: PASS (artifacts generated)
+- Notes:
+ - Warning: overall patch coverage `81.7%` below advisory threshold `90.0%`
+ - Warning: backend patch coverage `81.6%` below advisory threshold `85.0%`
-git diff -U0 -- | grep '^+[^+]' | grep -Ei '(token|secret|api[_-]?key|password|ghp_|sk_|AKIA|xox|BEGIN)'
+## Backend Coverage
-npx --yes markdownlint-cli2 \
- .github/instructions/copilot-instructions.md \
- .github/instructions/testing.instructions.md \
- .github/instructions/security-and-owasp.instructions.md \
- .github/agents/Management.agent.md \
- .github/agents/Backend_Dev.agent.md \
- .github/agents/QA_Security.agent.md \
- SECURITY.md docs/security.md docs/features/notifications.md
+- Command: `.github/skills/scripts/skill-runner.sh test-backend-coverage`
+- Result: PASS
+- Metrics:
+ - Statement coverage: `87.5%`
+ - Line coverage: `87.7%`
+ - Gate threshold observed in run: `87%`
-# Added-line lint intersection:
-# 1) build added hunk ranges from `git diff -U0 -- `
-# 2) run markdownlint output capture
-# 3) intersect (file,line) lint hits with added ranges
-# Result: no lint hits on added governance lines
-```
+## Frontend Coverage
-### Blockers
+- Command: `.github/skills/scripts/skill-runner.sh test-frontend-coverage`
+- Result: FAIL
+- Failure root cause:
+ - Test timeout at `frontend/src/components/__tests__/ProxyHostForm.test.tsx:1419`
+ - Failing test: `maps remote docker container to remote host and public port`
+ - Error: `Test timed out in 5000ms`
+- Coverage snapshot produced before failure:
+ - Statements: `88.95%`
+ - Lines: `89.62%`
+ - Functions: `86.05%`
+ - Branches: `81.3%`
-- None specific to this governance slice.
+## Typecheck
-### Baseline Notes (Non-Blocking for This Slice)
+- Command: `npm --prefix frontend run type-check`
+- Result: PASS
-- Markdownlint baseline debt remains in the 9 scoped files and broader repository, but no new critical regression was introduced in governance-added sections for this slice.
+## Pre-commit
-### Final Governance Slice Verdict
+- Command: `pre-commit run --all-files`
+- Result: PASS
+- Notable hooks: `golangci-lint (Fast Linters - BLOCKING)`, `Frontend TypeScript Check`, `Frontend Lint (Fix)` all passed
-**PASS** — All slice-scoped criteria passed under change-scope evaluation.
+## Security Scans
-## QA/Security Validation Report - PR-2 Frontend Slice
+- Trivy filesystem scan:
+ - Command: `.github/skills/scripts/skill-runner.sh security-scan-trivy`
+ - Result: PASS
+ - Critical/High findings: `0/0`
-Date: 2026-02-20
-Repository: /projects/Charon
-Scope: Final focused QA/security gate for notifications/security-event UX changes. Full E2E suite remains deferred to CI.
+- Docker image scan:
+ - Command: `.github/skills/scripts/skill-runner.sh security-scan-docker-image`
+ - Result: PASS
+ - Critical/High findings: `0/0`
+ - Additional findings: `10 medium`, `3 low` (non-blocking)
-### Gate Results
+## Remediation Required Before Merge
-| # | Required Check | Command(s) | Status | Evidence |
-|---|---|---|---|---|
-| 1 | Focused frontend tests for changed area | `cd frontend && npm run test -- src/pages/__tests__/Notifications.test.tsx src/pages/__tests__/Security.functional.test.tsx src/components/__tests__/SecurityNotificationSettingsModal.test.tsx src/api/__tests__/notifications.test.ts` | PASS | `4` files passed, `59` tests passed, `1` skipped. |
-| 2 | Frontend type-check | `cd frontend && npm run type-check` | PASS | `tsc --noEmit` completed with no errors. |
-| 3 | Frontend coverage gate | `.github/skills/scripts/skill-runner.sh test-frontend-coverage` | PASS | Coverage report: statements `87.86%`, lines `88.63%`; gate line threshold `85%` passed. |
-| 4 | Focused Playwright suite for notifications/security UX | `npx playwright test tests/settings/notifications.spec.ts --project=firefox` `npx playwright test tests/security-enforcement/zzz-security-ui/system-security-settings.spec.ts --project=security-tests` | PASS | Notifications suite (prior run): `27/27` passed. Security settings focused suite (latest): `21/21` passed. |
-| 5 | Pre-commit fast hooks | `pre-commit run --files $(git diff --name-only --diff-filter=ACMRTUXB)` | PASS | Fast hooks passed, including `golangci-lint (Fast Linters - BLOCKING)`, `Go Vet`, `dockerfile validation`, `Frontend TypeScript Check`, and `Frontend Lint (Fix)`. |
-| 6 | CodeQL findings gate status (CI-aligned outputs) | Task `Security: CodeQL Go Scan (CI-Aligned) [~60s]` Task `Security: CodeQL JS Scan (CI-Aligned) [~90s]` `pre-commit run --hook-stage manual codeql-check-findings --all-files` | PASS | Fresh SARIF artifacts present (`codeql-results-go.sarif`, `codeql-results-js.sarif`); manual findings gate reports no HIGH/CRITICAL findings. |
-| 7 | Dockerized Trivy + Docker image scan status | `.github/skills/scripts/skill-runner.sh security-scan-trivy vuln,secret,misconfig json` Task `Security: Scan Docker Image (Local)` | PASS | Existing Dockerized Trivy result remains passing from prior run. Latest local Docker image gate: `Critical: 0`, `High: 0` (effective gate pass). |
+1. Stabilize the timed-out frontend test at `frontend/src/components/__tests__/ProxyHostForm.test.tsx:1419`.
+2. Re-run `.github/skills/scripts/skill-runner.sh test-frontend-coverage` until the suite is fully green.
+3. Optional quality improvement: raise patch coverage warnings (`81.7%` overall, `81.6%` backend) with targeted tests on uncovered changed lines from `test-results/local-patch-report.md`.
-### Confirmation of Prior Passing Gates (No Re-run)
+## Final Merge Recommendation
-- Frontend tests/type-check/coverage remain confirmed PASS from prior validated run.
-- Pre-commit fast hooks remain confirmed PASS from prior validated run.
-- CodeQL Go + JS CI-aligned scans remain confirmed PASS from prior validated run.
-- Dockerized Trivy scan remains confirmed PASS from prior validated run.
-
-### Blocking Items
-
-- None for PR-2 focused QA/security scope.
-
-### Final Verdict
-
-- Overall Result: **PASS**
-- Full E2E regression remains deferred to CI as requested.
-- No remaining focused blockers identified.
-
-### Handoff References
-
-- Manual test plan (PR-1 + PR-2): `docs/issues/manual_test_provider_security_notifications_pr1_pr2.md`
-- Existing focused QA evidence in this report remains the baseline for automated validation.
-
-## QA/Security Validation Report - SMTP Flaky Test Fix (Test-Only Backend Change)
-
-Date: 2026-02-22
-Repository: /projects/Charon
-Scope: Validate SMTP STARTTLS test-stability fix without production behavior change.
-
-### Scope Verification
-
-| Check | Status | Evidence |
-|---|---|---|
-| Changed files are test-only (no production code changes) | PASS | `git status --short` shows only `backend/internal/services/mail_service_test.go` and `docs/plans/current_spec.md` modified. |
-| Production behavior unchanged by diff scope | PASS | No non-test backend/service implementation files modified. |
-
-### Required Validation Results
-
-| # | Command | Status | Evidence Snippet |
-|---|---|---|---|
-| 1 | `go test ./backend/internal/services -run TestMailService_TestConnection_StartTLSSuccessWithAuth -count=20` | PASS | `ok github.com/Wikid82/charon/backend/internal/services 1.403s` |
-| 2 | `go test -race ./backend/internal/services -run 'TestMailService_(TestConnection|Send)' -count=1` | PASS | `ok github.com/Wikid82/charon/backend/internal/services 1.270s` |
-| 3 | `bash scripts/go-test-coverage.sh` | PASS | `Statement coverage: 86.1%` / `Line coverage: 86.4%` / `Coverage requirement met` |
-| 4 | `pre-commit run --all-files` | PASS | All hooks passed, including `golangci-lint (Fast Linters - BLOCKING)`, `Go Vet`, `Frontend TypeScript Check`, `Frontend Lint (Fix)`. |
-
-### Additional QA Context
-
-| Check | Status | Evidence |
-|---|---|---|
-| Local patch coverage preflight artifacts generated | PASS | `bash scripts/local-patch-report.sh` produced `test-results/local-patch-report.md` and `test-results/local-patch-report.json`. |
-| Patch coverage threshold warning (advisory) | WARN (non-blocking) | Report output: `WARN: Overall patch coverage 53.8% ...` and `WARN: Backend patch coverage 52.0% ...`. |
-
-### Security Stance
-
-| Check | Status | Notes |
-|---|---|---|
-| New secret/token exposure risk introduced by test changes | PASS | Change scope is test helper logic only; no credentials/tokens were added to production paths, logs, or API outputs. |
-| Gotify token leakage pattern introduced | PASS | No Gotify tokenized URLs or token fields were added in the changed test file. |
-
-### Blockers
-
-- None.
-
-### Verdict
-
-**PASS** — SMTP flaky test fix validates as test-only, stable under repetition/race checks, meets backend coverage gate, passes full pre-commit, and introduces no new secret/token exposure risk.
+- Recommendation: **NO-GO**
+- Reason: Required frontend coverage gate did not pass due to a deterministic test timeout.
diff --git a/docs/reports/qa_report_pr754.md b/docs/reports/qa_report_pr754.md
new file mode 100644
index 00000000..93aafc59
--- /dev/null
+++ b/docs/reports/qa_report_pr754.md
@@ -0,0 +1,138 @@
+# QA Report — PR #754: Enable and Test Gotify and Custom Webhook Notifications
+
+**Branch:** `feature/beta-release`
+**Date:** 2026-02-25
+**Auditor:** QA Security Agent
+
+---
+
+## Summary
+
+| # | Check | Result | Details |
+|---|-------|--------|---------|
+| 1 | Local Patch Coverage Preflight | **WARN** | 79.5% overall (threshold 90%), 78.3% backend (threshold 85%) — advisory only |
+| 2 | Backend Coverage ≥ 85% | **PASS** | 87.0% statement / 87.3% line (threshold 87%) |
+| 3 | Frontend Coverage ≥ 85% | **PASS** | 88.21% statement / 88.97% line (threshold 85%) |
+| 4 | TypeScript Type Check | **PASS** | Zero errors |
+| 5 | Pre-commit Hooks | **PASS** | All 15 hooks passed |
+| 6a | Trivy Filesystem Scan | **PASS** | 0 CRITICAL/HIGH in project code (findings only in Go module cache) |
+| 6b | Docker Image Scan | **WARN** | 1 HIGH in Caddy transitive dep (CVE-2026-25793, nebula v1.9.7 → fixed 1.10.3) |
+| 6c | CodeQL (Go + JavaScript) | **PASS** | 0 errors, 0 warnings across both languages |
+| 7 | GORM Security Scan | **PASS** | 0 CRITICAL/HIGH (2 INFO suggestions: missing indexes on UserPermittedHost) |
+| 8 | Go Vulnerability Check | **PASS** | No vulnerabilities found |
+
+---
+
+## Detailed Findings
+
+### 1. Local Patch Coverage Preflight
+
+- **Status:** WARN (advisory, not blocking per policy)
+- Overall patch coverage: **79.5%** (threshold: 90%)
+- Backend patch coverage: **78.3%** (threshold: 85%)
+- Artifacts generated but `test-results/` directory was not persisted at repo root
+- **Action:** Consider adding targeted tests for uncovered changed lines in notification service/handler
+
+### 2. Backend Unit Test Coverage
+
+- **Status:** PASS
+- Statement coverage: **87.0%**
+- Line coverage: **87.3%**
+- All tests passed (0 failures)
+
+### 3. Frontend Unit Test Coverage
+
+- **Status:** PASS
+- Statement coverage: **88.21%**
+- Branch coverage: **80.58%**
+- Function coverage: **85.20%**
+- Line coverage: **88.97%**
+- All tests passed (0 failures)
+- Coverage files generated: `lcov.info`, `coverage-summary.json`, `coverage-final.json`
+
+### 4. TypeScript Type Check
+
+- **Status:** PASS
+- `tsc --noEmit` completed with zero errors
+
+### 5. Pre-commit Hooks
+
+- **Status:** PASS
+- All hooks passed:
+ - fix end of files
+ - trim trailing whitespace
+ - check yaml
+ - check for added large files
+ - shellcheck
+ - actionlint (GitHub Actions)
+ - dockerfile validation
+ - Go Vet
+ - golangci-lint (Fast Linters - BLOCKING)
+ - Check .version matches latest Git tag
+ - Prevent large files not tracked by LFS
+ - Prevent committing CodeQL DB artifacts
+ - Prevent committing data/backups files
+ - Frontend TypeScript Check
+ - Frontend Lint (Fix)
+
+### 6a. Trivy Filesystem Scan
+
+- **Status:** PASS
+- Scanned `backend/` and `frontend/` directories: **0 CRITICAL, 0 HIGH**
+- Full workspace scan found 3 CRITICAL + 14 HIGH across Go module cache dependencies (not project code)
+- Trivy misconfig scanner crashed (known Trivy bug in ansible parser — nil pointer dereference in `discovery.go:82`). Vuln scanner completed successfully.
+
+### 6b. Docker Image Scan
+
+- **Status:** WARN (not blocking — upstream dependency)
+- Image: `charon:local`
+- **1 HIGH finding:**
+ - **CVE-2026-25793** — `github.com/slackhq/nebula` v1.9.7 (in `usr/bin/caddy` binary)
+ - Description: Blocklist evasion via ECDSA Signature Malleability
+ - Fixed in: v1.10.3
+ - Impact: Caddy transitive dependency, not Charon code
+- **Remediation:** Upgrade Caddy to a version that pulls nebula ≥ 1.10.3 when available
+
+### 6c. CodeQL Scans
+
+- **Status:** PASS
+- **Go:** 0 errors, 0 warnings
+- **JavaScript:** 0 errors, 0 warnings (347/347 files scanned)
+- SARIF outputs: `codeql-results-go.sarif`, `codeql-results-javascript.sarif`
+
+### 7. GORM Security Scan
+
+- **Status:** PASS
+- Scanned: 41 Go files (2207 lines), 2 seconds
+- **0 CRITICAL, 0 HIGH, 0 MEDIUM**
+- 2 INFO suggestions:
+ - `backend/internal/models/user.go:109` — `UserPermittedHost.UserID` missing index
+ - `backend/internal/models/user.go:110` — `UserPermittedHost.ProxyHostID` missing index
+
+### 8. Go Vulnerability Check
+
+- **Status:** PASS
+- `govulncheck ./...` — No vulnerabilities found
+
+---
+
+## Gotify Token Security Review
+
+- No Gotify tokens found in logs, test artifacts, or API examples
+- No tokenized URL query parameters exposed in diagnostics or output
+- Token handling follows `json:"-"` pattern (verified via `HasToken` computed field approach in PR)
+
+---
+
+## Recommendation
+
+### GO / NO-GO: **GO** (conditional)
+
+All blocking gates pass. Two advisory warnings exist:
+
+1. **Patch coverage** (79.5% overall, 78.3% backend) is below advisory thresholds but not a blocking gate per current policy
+2. **Docker image** has 1 HIGH CVE in Caddy's transitive dependency (nebula) — upstream fix required, not actionable in Charon code
+
+**Conditions:**
+- Track nebula CVE-2026-25793 remediation as a follow-up issue when a Caddy update incorporates the fix
+- Consider adding targeted tests for uncovered changed lines in notification service/handler to improve patch coverage
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 1a9af5e2..b8d9823b 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -15,7 +15,7 @@
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-tooltip": "^1.2.8",
"@tanstack/react-query": "^5.90.21",
- "axios": "^1.13.5",
+ "axios": "^1.13.6",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"date-fns": "^4.1.0",
@@ -27,36 +27,39 @@
"react-hook-form": "^7.71.2",
"react-hot-toast": "^2.6.0",
"react-i18next": "^16.5.4",
- "react-router-dom": "^7.13.0",
+ "react-router-dom": "^7.13.1",
"tailwind-merge": "^3.5.0",
"tldts": "^7.0.23"
},
"devDependencies": {
+ "@eslint/css": "^0.14.1",
"@eslint/js": "^9.39.3 <10.0.0",
+ "@eslint/json": "^1.0.1",
+ "@eslint/markdown": "^7.5.1",
"@playwright/test": "^1.58.2",
- "@tailwindcss/postcss": "^4.2.0",
+ "@tailwindcss/postcss": "^4.2.1",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@testing-library/user-event": "^14.6.1",
- "@types/node": "^25.3.0",
+ "@types/node": "^25.3.3",
"@types/react": "^19.2.14",
"@types/react-dom": "^19.2.3",
- "@typescript-eslint/eslint-plugin": "^8.56.0",
- "@typescript-eslint/parser": "^8.56.0",
+ "@typescript-eslint/eslint-plugin": "^8.56.1",
+ "@typescript-eslint/parser": "^8.56.1",
"@vitejs/plugin-react": "^5.1.4",
"@vitest/coverage-istanbul": "^4.0.18",
"@vitest/coverage-v8": "^4.0.18",
"@vitest/ui": "^4.0.18",
- "autoprefixer": "^10.4.24",
+ "autoprefixer": "^10.4.27",
"eslint": "^9.39.3 <10.0.0",
"eslint-plugin-react-hooks": "^7.0.1",
- "eslint-plugin-react-refresh": "^0.5.0",
+ "eslint-plugin-react-refresh": "^0.5.2",
"jsdom": "28.1.0",
"knip": "^5.85.0",
"postcss": "^8.5.6",
- "tailwindcss": "^4.2.0",
+ "tailwindcss": "^4.2.1",
"typescript": "^5.9.3",
- "typescript-eslint": "^8.56.0",
+ "typescript-eslint": "^8.56.1",
"vite": "^7.3.1",
"vitest": "^4.0.18"
}
@@ -89,17 +92,20 @@
}
},
"node_modules/@asamuzakjp/css-color": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.2.tgz",
- "integrity": "sha512-NfBUvBaYgKIuq6E/RBLY1m0IohzNHAYyaJGuTK79Z23uNwmz2jl1mPsC5ZxCCxylinKhT1Amn5oNTlx1wN8cQg==",
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-5.0.1.tgz",
+ "integrity": "sha512-2SZFvqMyvboVV1d15lMf7XiI3m7SDqXUuKaTymJYLN6dSGadqp+fVojqJlVoMlbZnlTmu3S0TLwLTJpvBMO1Aw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@csstools/css-calc": "^3.0.0",
- "@csstools/css-color-parser": "^4.0.1",
+ "@csstools/css-calc": "^3.1.1",
+ "@csstools/css-color-parser": "^4.0.2",
"@csstools/css-parser-algorithms": "^4.0.0",
"@csstools/css-tokenizer": "^4.0.0",
- "lru-cache": "^11.2.5"
+ "lru-cache": "^11.2.6"
+ },
+ "engines": {
+ "node": "^20.19.0 || ^22.12.0 || >=24.0.0"
}
},
"node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
@@ -1129,6 +1135,13 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
+ "node_modules/@eslint/config-array/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@eslint/config-array/node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
@@ -1141,9 +1154,9 @@
}
},
"node_modules/@eslint/config-array/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -1179,21 +1192,50 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
- "node_modules/@eslint/eslintrc": {
- "version": "3.3.3",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz",
- "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==",
+ "node_modules/@eslint/css": {
+ "version": "0.14.1",
+ "resolved": "https://registry.npmjs.org/@eslint/css/-/css-0.14.1.tgz",
+ "integrity": "sha512-NXiteSacmpaXqgyIW3+GcNzexXyfC0kd+gig6WTjD4A74kBGJeNx1tV0Hxa0v7x0+mnIyKfGPhGNs1uhRFdh+w==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@eslint/core": "^0.17.0",
+ "@eslint/css-tree": "^3.6.6",
+ "@eslint/plugin-kit": "^0.4.1"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ }
+ },
+ "node_modules/@eslint/css-tree": {
+ "version": "3.6.9",
+ "resolved": "https://registry.npmjs.org/@eslint/css-tree/-/css-tree-3.6.9.tgz",
+ "integrity": "sha512-3D5/OHibNEGk+wKwNwMbz63NMf367EoR4mVNNpxddCHKEb2Nez7z62J2U6YjtErSsZDoY0CsccmoUpdEbkogNA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "ajv": "^6.12.4",
+ "mdn-data": "2.23.0",
+ "source-map-js": "^1.0.1"
+ },
+ "engines": {
+ "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
+ }
+ },
+ "node_modules/@eslint/eslintrc": {
+ "version": "3.3.4",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.4.tgz",
+ "integrity": "sha512-4h4MVF8pmBsncB60r0wSJiIeUKTSD4m7FmTFThG8RHlsg9ajqckLm9OraguFGZE4vVdpiI1Q4+hFnisopmG6gQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ajv": "^6.14.0",
"debug": "^4.3.2",
"espree": "^10.0.1",
"globals": "^14.0.0",
"ignore": "^5.2.0",
"import-fresh": "^3.2.1",
"js-yaml": "^4.1.1",
- "minimatch": "^3.1.2",
+ "minimatch": "^3.1.3",
"strip-json-comments": "^3.1.1"
},
"engines": {
@@ -1203,6 +1245,13 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/@eslint/eslintrc/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
@@ -1225,9 +1274,9 @@
}
},
"node_modules/@eslint/eslintrc/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -1250,6 +1299,73 @@
"url": "https://eslint.org/donate"
}
},
+ "node_modules/@eslint/json": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@eslint/json/-/json-1.0.1.tgz",
+ "integrity": "sha512-bE2nGv8/U+uRvQEJWOgCsZCa65XsCBgxyyx/sXtTHVv0kqdauACLzyp7A1C3yNn7pRaWjIt5acxY+TAbSyIJXw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@eslint/core": "^1.1.0",
+ "@eslint/plugin-kit": "^0.6.0",
+ "@humanwhocodes/momoa": "^3.3.10",
+ "natural-compare": "^1.4.0"
+ },
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ }
+ },
+ "node_modules/@eslint/json/node_modules/@eslint/core": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-1.1.0.tgz",
+ "integrity": "sha512-/nr9K9wkr3P1EzFTdFdMoLuo1PmIxjmwvPozwoSodjNBdefGujXQUF93u1DDZpEaTuDvMsIQddsd35BwtrW9Xw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@types/json-schema": "^7.0.15"
+ },
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ }
+ },
+ "node_modules/@eslint/json/node_modules/@eslint/plugin-kit": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.6.0.tgz",
+ "integrity": "sha512-bIZEUzOI1jkhviX2cp5vNyXQc6olzb2ohewQubuYlMXZ2Q/XjBO0x0XhGPvc9fjSIiUN0vw+0hq53BJ4eQSJKQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@eslint/core": "^1.1.0",
+ "levn": "^0.4.1"
+ },
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ }
+ },
+ "node_modules/@eslint/markdown": {
+ "version": "7.5.1",
+ "resolved": "https://registry.npmjs.org/@eslint/markdown/-/markdown-7.5.1.tgz",
+ "integrity": "sha512-R8uZemG9dKTbru/DQRPblbJyXpObwKzo8rv1KYGGuPUPtjM4LXBYM9q5CIZAComzZupws3tWbDwam5AFpPLyJQ==",
+ "dev": true,
+ "license": "MIT",
+ "workspaces": [
+ "examples/*"
+ ],
+ "dependencies": {
+ "@eslint/core": "^0.17.0",
+ "@eslint/plugin-kit": "^0.4.1",
+ "github-slugger": "^2.0.0",
+ "mdast-util-from-markdown": "^2.0.2",
+ "mdast-util-frontmatter": "^2.0.1",
+ "mdast-util-gfm": "^3.1.0",
+ "micromark-extension-frontmatter": "^2.0.0",
+ "micromark-extension-gfm": "^3.0.0",
+ "micromark-util-normalize-identifier": "^2.0.1"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ }
+ },
"node_modules/@eslint/object-schema": {
"version": "2.1.7",
"resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz",
@@ -1368,6 +1484,16 @@
"url": "https://github.com/sponsors/nzakas"
}
},
+ "node_modules/@humanwhocodes/momoa": {
+ "version": "3.3.10",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-3.3.10.tgz",
+ "integrity": "sha512-KWiFQpSAqEIyrTXko3hFNLeQvSK8zXlJQzhhxsyVn58WFRYXST99b3Nqnu+ttOtjds2Pl2grUHGpe2NzhPynuQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@humanwhocodes/retry": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz",
@@ -1498,9 +1624,9 @@
}
},
"node_modules/@oxc-resolver/binding-android-arm-eabi": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.18.0.tgz",
- "integrity": "sha512-EhwJNzbfLwQQIeyak3n08EB3UHknMnjy1dFyL98r3xlorje2uzHOT2vkB5nB1zqtTtzT31uSot3oGZFfODbGUg==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.19.1.tgz",
+ "integrity": "sha512-aUs47y+xyXHUKlbhqHUjBABjvycq6YSD7bpxSW7vplUmdzAlJ93yXY6ZR0c1o1x5A/QKbENCvs3+NlY8IpIVzg==",
"cpu": [
"arm"
],
@@ -1512,9 +1638,9 @@
]
},
"node_modules/@oxc-resolver/binding-android-arm64": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.18.0.tgz",
- "integrity": "sha512-esOPsT9S9B6vEMMp1qR9Yz5UepQXljoWRJYoyp7GV/4SYQOSTpN0+V2fTruxbMmzqLK+fjCEU2x3SVhc96LQLQ==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.19.1.tgz",
+ "integrity": "sha512-oolbkRX+m7Pq2LNjr/kKgYeC7bRDMVTWPgxBGMjSpZi/+UskVo4jsMU3MLheZV55jL6c3rNelPl4oD60ggYmqA==",
"cpu": [
"arm64"
],
@@ -1526,9 +1652,9 @@
]
},
"node_modules/@oxc-resolver/binding-darwin-arm64": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.18.0.tgz",
- "integrity": "sha512-iJknScn8fRLRhGR6VHG31bzOoyLihSDmsJHRjHwRUL0yF1MkLlvzmZ+liKl9MGl+WZkZHaOFT5T1jNlLSWTowQ==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.19.1.tgz",
+ "integrity": "sha512-nUC6d2i3R5B12sUW4O646qD5cnMXf2oBGPLIIeaRfU9doJRORAbE2SGv4eW6rMqhD+G7nf2Y8TTJTLiiO3Q/dQ==",
"cpu": [
"arm64"
],
@@ -1540,9 +1666,9 @@
]
},
"node_modules/@oxc-resolver/binding-darwin-x64": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.18.0.tgz",
- "integrity": "sha512-3rMweF2GQLzkaUoWgFKy1fRtk0dpj4JDqucoZLJN9IZG+TC+RZg7QMwG5WKMvmEjzdYmOTw1L1XqZDVXF2ksaQ==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.19.1.tgz",
+ "integrity": "sha512-cV50vE5+uAgNcFa3QY1JOeKDSkM/9ReIcc/9wn4TavhW/itkDGrXhw9jaKnkQnGbjJ198Yh5nbX/Gr2mr4Z5jQ==",
"cpu": [
"x64"
],
@@ -1554,9 +1680,9 @@
]
},
"node_modules/@oxc-resolver/binding-freebsd-x64": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.18.0.tgz",
- "integrity": "sha512-TfXsFby4QvpGwmUP66+X+XXQsycddZe9ZUUu/vHhq2XGI1EkparCSzjpYW1Nz5fFncbI5oLymQLln/qR+qxyOw==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.19.1.tgz",
+ "integrity": "sha512-xZOQiYGFxtk48PBKff+Zwoym7ScPAIVp4c14lfLxizO2LTTTJe5sx9vQNGrBymrf/vatSPNMD4FgsaaRigPkqw==",
"cpu": [
"x64"
],
@@ -1568,9 +1694,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-arm-gnueabihf": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.18.0.tgz",
- "integrity": "sha512-WolOILquy9DJsHcfFMHeA5EjTCI9A7JoERFJru4UI2zKZcnfNPo5GApzYwiloscEp/s+fALPmyRntswUns0qHg==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.19.1.tgz",
+ "integrity": "sha512-lXZYWAC6kaGe/ky2su94e9jN9t6M0/6c+GrSlCqL//XO1cxi5lpAhnJYdyrKfm0ZEr/c7RNyAx3P7FSBcBd5+A==",
"cpu": [
"arm"
],
@@ -1582,9 +1708,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-arm-musleabihf": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.18.0.tgz",
- "integrity": "sha512-r+5nHJyPdiBqOGTYAFyuq5RtuAQbm4y69GYWNG/uup9Cqr7RG9Ak0YZgGEbkQsc+XBs00ougu/D1+w3UAYIWHA==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.19.1.tgz",
+ "integrity": "sha512-veG1kKsuK5+t2IsO9q0DErYVSw2azvCVvWHnfTOS73WE0STdLLB7Q1bB9WR+yHPQM76ASkFyRbogWo1GR1+WbQ==",
"cpu": [
"arm"
],
@@ -1596,9 +1722,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-arm64-gnu": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.18.0.tgz",
- "integrity": "sha512-bUzg6QxljqMLLwsxYajAQEHW1LYRLdKOg/aykt14PSqUUOmfnOJjPdSLTiHIZCluVzPCQxv1LjoyRcoTAXfQaQ==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.19.1.tgz",
+ "integrity": "sha512-heV2+jmXyYnUrpUXSPugqWDRpnsQcDm2AX4wzTuvgdlZfoNYO0O3W2AVpJYaDn9AG4JdM6Kxom8+foE7/BcSig==",
"cpu": [
"arm64"
],
@@ -1610,9 +1736,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-arm64-musl": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.18.0.tgz",
- "integrity": "sha512-l43GVwls5+YR8WXOIez5x7Pp/MfhdkMOZOOjFUSWC/9qMnSLX1kd95j9oxDrkWdD321JdHTyd4eau5KQPxZM9w==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.19.1.tgz",
+ "integrity": "sha512-jvo2Pjs1c9KPxMuMPIeQsgu0mOJF9rEb3y3TdpsrqwxRM+AN6/nDDwv45n5ZrUnQMsdBy5gIabioMKnQfWo9ew==",
"cpu": [
"arm64"
],
@@ -1624,9 +1750,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-ppc64-gnu": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.18.0.tgz",
- "integrity": "sha512-ayj7TweYWi/azxWmRpUZGz41kKNvfkXam20UrFhaQDrSNGNqefQRODxhJn0iv6jt4qChh7TUxDIoavR6ftRsjw==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.19.1.tgz",
+ "integrity": "sha512-vLmdNxWCdN7Uo5suays6A/+ywBby2PWBBPXctWPg5V0+eVuzsJxgAn6MMB4mPlshskYbppjpN2Zg83ArHze9gQ==",
"cpu": [
"ppc64"
],
@@ -1638,9 +1764,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-riscv64-gnu": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.18.0.tgz",
- "integrity": "sha512-2Jz7jpq6BBNlBBup3usZB6sZWEZOBbjWn++/bKC2lpAT+sTEwdTonnf3rNcb+XY7+v53jYB9pM8LEKVXZfr8BA==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.19.1.tgz",
+ "integrity": "sha512-/b+WgR+VTSBxzgOhDO7TlMXC1ufPIMR6Vj1zN+/x+MnyXGW7prTLzU9eW85Aj7Th7CCEG9ArCbTeqxCzFWdg2w==",
"cpu": [
"riscv64"
],
@@ -1652,9 +1778,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-riscv64-musl": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.18.0.tgz",
- "integrity": "sha512-omw8/ISOc6ubR247iEMma4/JRfbY2I+nGJC59oKBhCIEZoyqEg/NmDSBc4ToMH+AsZDucqQUDOCku3k7pBiEag==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.19.1.tgz",
+ "integrity": "sha512-YlRdeWb9j42p29ROh+h4eg/OQ3dTJlpHSa+84pUM9+p6i3djtPz1q55yLJhgW9XfDch7FN1pQ/Vd6YP+xfRIuw==",
"cpu": [
"riscv64"
],
@@ -1666,9 +1792,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-s390x-gnu": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.18.0.tgz",
- "integrity": "sha512-uFipBXaS+honSL5r5G/rlvVrkffUjpKwD3S/aIiwp64bylK3+RztgV+mM1blk+OT5gBRG864auhH6jCfrOo3ZA==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.19.1.tgz",
+ "integrity": "sha512-EDpafVOQWF8/MJynsjOGFThcqhRHy417sRyLfQmeiamJ8qVhSKAn2Dn2VVKUGCjVB9C46VGjhNo7nOPUi1x6uA==",
"cpu": [
"s390x"
],
@@ -1680,9 +1806,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-x64-gnu": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.18.0.tgz",
- "integrity": "sha512-bY4uMIoKRv8Ine3UiKLFPWRZ+fPCDamTHZFf5pNOjlfmTJIANtJo0mzWDUdFZLYhVgQdegrDL9etZbTMR8qieg==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.19.1.tgz",
+ "integrity": "sha512-NxjZe+rqWhr+RT8/Ik+5ptA3oz7tUw361Wa5RWQXKnfqwSSHdHyrw6IdcTfYuml9dM856AlKWZIUXDmA9kkiBQ==",
"cpu": [
"x64"
],
@@ -1694,9 +1820,9 @@
]
},
"node_modules/@oxc-resolver/binding-linux-x64-musl": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.18.0.tgz",
- "integrity": "sha512-40IicL/aitfNOWur06x7Do41WcqFJ9VUNAciFjZCXzF6wR2i6uVsi6N19ecqgSRoLYFCAoRYi9F50QteIxCwKQ==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.19.1.tgz",
+ "integrity": "sha512-cM/hQwsO3ReJg5kR+SpI69DMfvNCp+A/eVR4b4YClE5bVZwz8rh2Nh05InhwI5HR/9cArbEkzMjcKgTHS6UaNw==",
"cpu": [
"x64"
],
@@ -1708,9 +1834,9 @@
]
},
"node_modules/@oxc-resolver/binding-openharmony-arm64": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-openharmony-arm64/-/binding-openharmony-arm64-11.18.0.tgz",
- "integrity": "sha512-DJIzYjUnSJtz4Trs/J9TnzivtPcUKn9AeL3YjHlM5+RvK27ZL9xISs3gg2VAo2nWU7ThuadC1jSYkWaZyONMwg==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-openharmony-arm64/-/binding-openharmony-arm64-11.19.1.tgz",
+ "integrity": "sha512-QF080IowFB0+9Rh6RcD19bdgh49BpQHUW5TajG1qvWHvmrQznTZZjYlgE2ltLXyKY+qs4F/v5xuX1XS7Is+3qA==",
"cpu": [
"arm64"
],
@@ -1722,9 +1848,9 @@
]
},
"node_modules/@oxc-resolver/binding-wasm32-wasi": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.18.0.tgz",
- "integrity": "sha512-57+R8Ioqc8g9k80WovoupOoyIOfLEceHTizkUcwOXspXLhiZ67ScM7Q8OuvhDoRRSZzH6yI0qML3WZwMFR3s7g==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.19.1.tgz",
+ "integrity": "sha512-w8UCKhX826cP/ZLokXDS6+milN8y4X7zidsAttEdWlVoamTNf6lhBJldaWr3ukTDiye7s4HRcuPEPOXNC432Vg==",
"cpu": [
"wasm32"
],
@@ -1739,9 +1865,9 @@
}
},
"node_modules/@oxc-resolver/binding-win32-arm64-msvc": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.18.0.tgz",
- "integrity": "sha512-t9Oa4BPptJqVlHTT1cV1frs+LY/vjsKhHI6ltj2EwoGM1TykJ0WW43UlQaU4SC8N+oTY8JRbAywVMNkfqjSu9w==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.19.1.tgz",
+ "integrity": "sha512-nJ4AsUVZrVKwnU/QRdzPCCrO0TrabBqgJ8pJhXITdZGYOV28TIYystV1VFLbQ7DtAcaBHpocT5/ZJnF78YJPtQ==",
"cpu": [
"arm64"
],
@@ -1753,9 +1879,9 @@
]
},
"node_modules/@oxc-resolver/binding-win32-ia32-msvc": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.18.0.tgz",
- "integrity": "sha512-4maf/f6ea5IEtIXqGwSw38srRtVHTre9iKShG4gjzat7c3Iq6B1OppXMj8gNmTuM4n8Xh1hQM9z2hBELccJr1g==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.19.1.tgz",
+ "integrity": "sha512-EW+ND5q2Tl+a3pH81l1QbfgbF3HmqgwLfDfVithRFheac8OTcnbXt/JxqD2GbDkb7xYEqy1zNaVFRr3oeG8npA==",
"cpu": [
"ia32"
],
@@ -1767,9 +1893,9 @@
]
},
"node_modules/@oxc-resolver/binding-win32-x64-msvc": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.18.0.tgz",
- "integrity": "sha512-EhW8Su3AEACSw5HfzKMmyCtV0oArNrVViPdeOfvVYL9TrkL+/4c8fWHFTBtxUMUyCjhSG5xYNdwty1D/TAgL0Q==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.19.1.tgz",
+ "integrity": "sha512-6hIU3RQu45B+VNTY4Ru8ppFwjVS/S5qwYyGhBotmjxfEKk41I2DlGtRfGJndZ5+6lneE2pwloqunlOyZuX/XAw==",
"cpu": [
"x64"
],
@@ -2572,9 +2698,9 @@
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.58.0.tgz",
- "integrity": "sha512-mr0tmS/4FoVk1cnaeN244A/wjvGDNItZKR8hRhnmCzygyRXYtKF5jVDSIILR1U97CTzAYmbgIj/Dukg62ggG5w==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz",
+ "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==",
"cpu": [
"arm"
],
@@ -2586,9 +2712,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.58.0.tgz",
- "integrity": "sha512-+s++dbp+/RTte62mQD9wLSbiMTV+xr/PeRJEc/sFZFSBRlHPNPVaf5FXlzAL77Mr8FtSfQqCN+I598M8U41ccQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz",
+ "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==",
"cpu": [
"arm64"
],
@@ -2600,9 +2726,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.58.0.tgz",
- "integrity": "sha512-MFWBwTcYs0jZbINQBXHfSrpSQJq3IUOakcKPzfeSznONop14Pxuqa0Kg19GD0rNBMPQI2tFtu3UzapZpH0Uc1Q==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz",
+ "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==",
"cpu": [
"arm64"
],
@@ -2614,9 +2740,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.58.0.tgz",
- "integrity": "sha512-yiKJY7pj9c9JwzuKYLFaDZw5gma3fI9bkPEIyofvVfsPqjCWPglSHdpdwXpKGvDeYDms3Qal8qGMEHZ1M/4Udg==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz",
+ "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==",
"cpu": [
"x64"
],
@@ -2628,9 +2754,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.58.0.tgz",
- "integrity": "sha512-x97kCoBh5MOevpn/CNK9W1x8BEzO238541BGWBc315uOlN0AD/ifZ1msg+ZQB05Ux+VF6EcYqpiagfLJ8U3LvQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz",
+ "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==",
"cpu": [
"arm64"
],
@@ -2642,9 +2768,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.58.0.tgz",
- "integrity": "sha512-Aa8jPoZ6IQAG2eIrcXPpjRcMjROMFxCt1UYPZZtCxRV68WkuSigYtQ/7Zwrcr2IvtNJo7T2JfDXyMLxq5L4Jlg==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz",
+ "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==",
"cpu": [
"x64"
],
@@ -2656,9 +2782,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.58.0.tgz",
- "integrity": "sha512-Ob8YgT5kD/lSIYW2Rcngs5kNB/44Q2RzBSPz9brf2WEtcGR7/f/E9HeHn1wYaAwKBni+bdXEwgHvUd0x12lQSA==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz",
+ "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==",
"cpu": [
"arm"
],
@@ -2670,9 +2796,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.58.0.tgz",
- "integrity": "sha512-K+RI5oP1ceqoadvNt1FecL17Qtw/n9BgRSzxif3rTL2QlIu88ccvY+Y9nnHe/cmT5zbH9+bpiJuG1mGHRVwF4Q==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz",
+ "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==",
"cpu": [
"arm"
],
@@ -2684,9 +2810,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.58.0.tgz",
- "integrity": "sha512-T+17JAsCKUjmbopcKepJjHWHXSjeW7O5PL7lEFaeQmiVyw4kkc5/lyYKzrv6ElWRX/MrEWfPiJWqbTvfIvjM1Q==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz",
+ "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==",
"cpu": [
"arm64"
],
@@ -2698,9 +2824,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.58.0.tgz",
- "integrity": "sha512-cCePktb9+6R9itIJdeCFF9txPU7pQeEHB5AbHu/MKsfH/k70ZtOeq1k4YAtBv9Z7mmKI5/wOLYjQ+B9QdxR6LA==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz",
+ "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==",
"cpu": [
"arm64"
],
@@ -2712,9 +2838,9 @@
]
},
"node_modules/@rollup/rollup-linux-loong64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.58.0.tgz",
- "integrity": "sha512-iekUaLkfliAsDl4/xSdoCJ1gnnIXvoNz85C8U8+ZxknM5pBStfZjeXgB8lXobDQvvPRCN8FPmmuTtH+z95HTmg==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz",
+ "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==",
"cpu": [
"loong64"
],
@@ -2726,9 +2852,9 @@
]
},
"node_modules/@rollup/rollup-linux-loong64-musl": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.58.0.tgz",
- "integrity": "sha512-68ofRgJNl/jYJbxFjCKE7IwhbfxOl1muPN4KbIqAIe32lm22KmU7E8OPvyy68HTNkI2iV/c8y2kSPSm2mW/Q9Q==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz",
+ "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==",
"cpu": [
"loong64"
],
@@ -2740,9 +2866,9 @@
]
},
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.58.0.tgz",
- "integrity": "sha512-dpz8vT0i+JqUKuSNPCP5SYyIV2Lh0sNL1+FhM7eLC457d5B9/BC3kDPp5BBftMmTNsBarcPcoz5UGSsnCiw4XQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz",
+ "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==",
"cpu": [
"ppc64"
],
@@ -2754,9 +2880,9 @@
]
},
"node_modules/@rollup/rollup-linux-ppc64-musl": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.58.0.tgz",
- "integrity": "sha512-4gdkkf9UJ7tafnweBCR/mk4jf3Jfl0cKX9Np80t5i78kjIH0ZdezUv/JDI2VtruE5lunfACqftJ8dIMGN4oHew==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz",
+ "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==",
"cpu": [
"ppc64"
],
@@ -2768,9 +2894,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.58.0.tgz",
- "integrity": "sha512-YFS4vPnOkDTD/JriUeeZurFYoJhPf9GQQEF/v4lltp3mVcBmnsAdjEWhr2cjUCZzZNzxCG0HZOvJU44UGHSdzw==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz",
+ "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==",
"cpu": [
"riscv64"
],
@@ -2782,9 +2908,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.58.0.tgz",
- "integrity": "sha512-x2xgZlFne+QVNKV8b4wwaCS8pwq3y14zedZ5DqLzjdRITvreBk//4Knbcvm7+lWmms9V9qFp60MtUd0/t/PXPw==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz",
+ "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==",
"cpu": [
"riscv64"
],
@@ -2796,9 +2922,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.58.0.tgz",
- "integrity": "sha512-jIhrujyn4UnWF8S+DHSkAkDEO3hLX0cjzxJZPLF80xFyzyUIYgSMRcYQ3+uqEoyDD2beGq7Dj7edi8OnJcS/hg==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz",
+ "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==",
"cpu": [
"s390x"
],
@@ -2810,9 +2936,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.58.0.tgz",
- "integrity": "sha512-+410Srdoh78MKSJxTQ+hZ/Mx+ajd6RjjPwBPNd0R3J9FtL6ZA0GqiiyNjCO9In0IzZkCNrpGymSfn+kgyPQocg==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz",
+ "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==",
"cpu": [
"x64"
],
@@ -2824,9 +2950,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.58.0.tgz",
- "integrity": "sha512-ZjMyby5SICi227y1MTR3VYBpFTdZs823Rs/hpakufleBoufoOIB6jtm9FEoxn/cgO7l6PM2rCEl5Kre5vX0QrQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz",
+ "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==",
"cpu": [
"x64"
],
@@ -2838,9 +2964,9 @@
]
},
"node_modules/@rollup/rollup-openbsd-x64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.58.0.tgz",
- "integrity": "sha512-ds4iwfYkSQ0k1nb8LTcyXw//ToHOnNTJtceySpL3fa7tc/AsE+UpUFphW126A6fKBGJD5dhRvg8zw1rvoGFxmw==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz",
+ "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==",
"cpu": [
"x64"
],
@@ -2852,9 +2978,9 @@
]
},
"node_modules/@rollup/rollup-openharmony-arm64": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.58.0.tgz",
- "integrity": "sha512-fd/zpJniln4ICdPkjWFhZYeY/bpnaN9pGa6ko+5WD38I0tTqk9lXMgXZg09MNdhpARngmxiCg0B0XUamNw/5BQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz",
+ "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==",
"cpu": [
"arm64"
],
@@ -2866,9 +2992,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.58.0.tgz",
- "integrity": "sha512-YpG8dUOip7DCz3nr/JUfPbIUo+2d/dy++5bFzgi4ugOGBIox+qMbbqt/JoORwvI/C9Kn2tz6+Bieoqd5+B1CjA==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz",
+ "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==",
"cpu": [
"arm64"
],
@@ -2880,9 +3006,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.58.0.tgz",
- "integrity": "sha512-b9DI8jpFQVh4hIXFr0/+N/TzLdpBIoPzjt0Rt4xJbW3mzguV3mduR9cNgiuFcuL/TeORejJhCWiAXe3E/6PxWA==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz",
+ "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==",
"cpu": [
"ia32"
],
@@ -2894,9 +3020,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-gnu": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.58.0.tgz",
- "integrity": "sha512-CSrVpmoRJFN06LL9xhkitkwUcTZtIotYAF5p6XOR2zW0Zz5mzb3IPpcoPhB02frzMHFNo1reQ9xSF5fFm3hUsQ==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz",
+ "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==",
"cpu": [
"x64"
],
@@ -2908,9 +3034,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.58.0.tgz",
- "integrity": "sha512-QFsBgQNTnh5K0t/sBsjJLq24YVqEIVkGpfN2VHsnN90soZyhaiA9UUHufcctVNL4ypJY0wrwad0wslx2KJQ1/w==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz",
+ "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==",
"cpu": [
"x64"
],
@@ -2929,9 +3055,9 @@
"license": "MIT"
},
"node_modules/@tailwindcss/node": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.0.tgz",
- "integrity": "sha512-Yv+fn/o2OmL5fh/Ir62VXItdShnUxfpkMA4Y7jdeC8O81WPB8Kf6TT6GSHvnqgSwDzlB5iT7kDpeXxLsUS0T6Q==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.1.tgz",
+ "integrity": "sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2941,37 +3067,37 @@
"lightningcss": "1.31.1",
"magic-string": "^0.30.21",
"source-map-js": "^1.2.1",
- "tailwindcss": "4.2.0"
+ "tailwindcss": "4.2.1"
}
},
"node_modules/@tailwindcss/oxide": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.0.tgz",
- "integrity": "sha512-AZqQzADaj742oqn2xjl5JbIOzZB/DGCYF/7bpvhA8KvjUj9HJkag6bBuwZvH1ps6dfgxNHyuJVlzSr2VpMgdTQ==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.1.tgz",
+ "integrity": "sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 20"
},
"optionalDependencies": {
- "@tailwindcss/oxide-android-arm64": "4.2.0",
- "@tailwindcss/oxide-darwin-arm64": "4.2.0",
- "@tailwindcss/oxide-darwin-x64": "4.2.0",
- "@tailwindcss/oxide-freebsd-x64": "4.2.0",
- "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.0",
- "@tailwindcss/oxide-linux-arm64-gnu": "4.2.0",
- "@tailwindcss/oxide-linux-arm64-musl": "4.2.0",
- "@tailwindcss/oxide-linux-x64-gnu": "4.2.0",
- "@tailwindcss/oxide-linux-x64-musl": "4.2.0",
- "@tailwindcss/oxide-wasm32-wasi": "4.2.0",
- "@tailwindcss/oxide-win32-arm64-msvc": "4.2.0",
- "@tailwindcss/oxide-win32-x64-msvc": "4.2.0"
+ "@tailwindcss/oxide-android-arm64": "4.2.1",
+ "@tailwindcss/oxide-darwin-arm64": "4.2.1",
+ "@tailwindcss/oxide-darwin-x64": "4.2.1",
+ "@tailwindcss/oxide-freebsd-x64": "4.2.1",
+ "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.1",
+ "@tailwindcss/oxide-linux-arm64-gnu": "4.2.1",
+ "@tailwindcss/oxide-linux-arm64-musl": "4.2.1",
+ "@tailwindcss/oxide-linux-x64-gnu": "4.2.1",
+ "@tailwindcss/oxide-linux-x64-musl": "4.2.1",
+ "@tailwindcss/oxide-wasm32-wasi": "4.2.1",
+ "@tailwindcss/oxide-win32-arm64-msvc": "4.2.1",
+ "@tailwindcss/oxide-win32-x64-msvc": "4.2.1"
}
},
"node_modules/@tailwindcss/oxide-android-arm64": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.0.tgz",
- "integrity": "sha512-F0QkHAVaW/JNBWl4CEKWdZ9PMb0khw5DCELAOnu+RtjAfx5Zgw+gqCHFvqg3AirU1IAd181fwOtJQ5I8Yx5wtw==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.1.tgz",
+ "integrity": "sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg==",
"cpu": [
"arm64"
],
@@ -2986,9 +3112,9 @@
}
},
"node_modules/@tailwindcss/oxide-darwin-arm64": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.0.tgz",
- "integrity": "sha512-I0QylkXsBsJMZ4nkUNSR04p6+UptjcwhcVo3Zu828ikiEqHjVmQL9RuQ6uT/cVIiKpvtVA25msu/eRV97JeNSA==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.1.tgz",
+ "integrity": "sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw==",
"cpu": [
"arm64"
],
@@ -3003,9 +3129,9 @@
}
},
"node_modules/@tailwindcss/oxide-darwin-x64": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.0.tgz",
- "integrity": "sha512-6TmQIn4p09PBrmnkvbYQ0wbZhLtbaksCDx7Y7R3FYYx0yxNA7xg5KP7dowmQ3d2JVdabIHvs3Hx4K3d5uCf8xg==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.1.tgz",
+ "integrity": "sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw==",
"cpu": [
"x64"
],
@@ -3020,9 +3146,9 @@
}
},
"node_modules/@tailwindcss/oxide-freebsd-x64": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.0.tgz",
- "integrity": "sha512-qBudxDvAa2QwGlq9y7VIzhTvp2mLJ6nD/G8/tI70DCDoneaUeLWBJaPcbfzqRIWraj+o969aDQKvKW9dvkUizw==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.1.tgz",
+ "integrity": "sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA==",
"cpu": [
"x64"
],
@@ -3037,9 +3163,9 @@
}
},
"node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.0.tgz",
- "integrity": "sha512-7XKkitpy5NIjFZNUQPeUyNJNJn1CJeV7rmMR+exHfTuOsg8rxIO9eNV5TSEnqRcaOK77zQpsyUkBWmPy8FgdSg==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.1.tgz",
+ "integrity": "sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw==",
"cpu": [
"arm"
],
@@ -3054,9 +3180,9 @@
}
},
"node_modules/@tailwindcss/oxide-linux-arm64-gnu": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.0.tgz",
- "integrity": "sha512-Mff5a5Q3WoQR01pGU1gr29hHM1N93xYrKkGXfPw/aRtK4bOc331Ho4Tgfsm5WDGvpevqMpdlkCojT3qlCQbCpA==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.1.tgz",
+ "integrity": "sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ==",
"cpu": [
"arm64"
],
@@ -3071,9 +3197,9 @@
}
},
"node_modules/@tailwindcss/oxide-linux-arm64-musl": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.0.tgz",
- "integrity": "sha512-XKcSStleEVnbH6W/9DHzZv1YhjE4eSS6zOu2eRtYAIh7aV4o3vIBs+t/B15xlqoxt6ef/0uiqJVB6hkHjWD/0A==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.1.tgz",
+ "integrity": "sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ==",
"cpu": [
"arm64"
],
@@ -3088,9 +3214,9 @@
}
},
"node_modules/@tailwindcss/oxide-linux-x64-gnu": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.0.tgz",
- "integrity": "sha512-/hlXCBqn9K6fi7eAM0RsobHwJYa5V/xzWspVTzxnX+Ft9v6n+30Pz8+RxCn7sQL/vRHHLS30iQPrHQunu6/vJA==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.1.tgz",
+ "integrity": "sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g==",
"cpu": [
"x64"
],
@@ -3105,9 +3231,9 @@
}
},
"node_modules/@tailwindcss/oxide-linux-x64-musl": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.0.tgz",
- "integrity": "sha512-lKUaygq4G7sWkhQbfdRRBkaq4LY39IriqBQ+Gk6l5nKq6Ay2M2ZZb1tlIyRNgZKS8cbErTwuYSor0IIULC0SHw==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.1.tgz",
+ "integrity": "sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g==",
"cpu": [
"x64"
],
@@ -3122,9 +3248,9 @@
}
},
"node_modules/@tailwindcss/oxide-wasm32-wasi": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.0.tgz",
- "integrity": "sha512-xuDjhAsFdUuFP5W9Ze4k/o4AskUtI8bcAGU4puTYprr89QaYFmhYOPfP+d1pH+k9ets6RoE23BXZM1X1jJqoyw==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.1.tgz",
+ "integrity": "sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q==",
"bundleDependencies": [
"@napi-rs/wasm-runtime",
"@emnapi/core",
@@ -3152,9 +3278,9 @@
}
},
"node_modules/@tailwindcss/oxide-win32-arm64-msvc": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.0.tgz",
- "integrity": "sha512-2UU/15y1sWDEDNJXxEIrfWKC2Yb4YgIW5Xz2fKFqGzFWfoMHWFlfa1EJlGO2Xzjkq/tvSarh9ZTjvbxqWvLLXA==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.1.tgz",
+ "integrity": "sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA==",
"cpu": [
"arm64"
],
@@ -3169,9 +3295,9 @@
}
},
"node_modules/@tailwindcss/oxide-win32-x64-msvc": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.0.tgz",
- "integrity": "sha512-CrFadmFoc+z76EV6LPG1jx6XceDsaCG3lFhyLNo/bV9ByPrE+FnBPckXQVP4XRkN76h3Fjt/a+5Er/oA/nCBvQ==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.1.tgz",
+ "integrity": "sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ==",
"cpu": [
"x64"
],
@@ -3186,17 +3312,17 @@
}
},
"node_modules/@tailwindcss/postcss": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.2.0.tgz",
- "integrity": "sha512-u6YBacGpOm/ixPfKqfgrJEjMfrYmPD7gEFRoygS/hnQaRtV0VCBdpkx5Ouw9pnaLRwwlgGCuJw8xLpaR0hOrQg==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.2.1.tgz",
+ "integrity": "sha512-OEwGIBnXnj7zJeonOh6ZG9woofIjGrd2BORfvE5p9USYKDCZoQmfqLcfNiRWoJlRWLdNPn2IgVZuWAOM4iTYMw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
- "@tailwindcss/node": "4.2.0",
- "@tailwindcss/oxide": "4.2.0",
+ "@tailwindcss/node": "4.2.1",
+ "@tailwindcss/oxide": "4.2.1",
"postcss": "^8.5.6",
- "tailwindcss": "4.2.0"
+ "tailwindcss": "4.2.1"
}
},
"node_modules/@tanstack/query-core": {
@@ -3390,6 +3516,16 @@
"assertion-error": "^2.0.1"
}
},
+ "node_modules/@types/debug": {
+ "version": "4.1.12",
+ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
+ "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/ms": "*"
+ }
+ },
"node_modules/@types/deep-eql": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz",
@@ -3411,10 +3547,27 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@types/mdast": {
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+ "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/@types/ms": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
+ "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/node": {
- "version": "25.3.0",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.0.tgz",
- "integrity": "sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==",
+ "version": "25.3.3",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.3.tgz",
+ "integrity": "sha512-DpzbrH7wIcBaJibpKo9nnSQL0MTRdnWttGyE5haGwK86xgMOkFLp7vEyfQPGLOJh5wNYiJ3V9PmUMDhV9u8kkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3441,18 +3594,25 @@
"@types/react": "^19.2.0"
}
},
+ "node_modules/@types/unist": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+ "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz",
- "integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz",
+ "integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.12.2",
- "@typescript-eslint/scope-manager": "8.56.0",
- "@typescript-eslint/type-utils": "8.56.0",
- "@typescript-eslint/utils": "8.56.0",
- "@typescript-eslint/visitor-keys": "8.56.0",
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/type-utils": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
"ignore": "^7.0.5",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.4.0"
@@ -3465,22 +3625,22 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "@typescript-eslint/parser": "^8.56.0",
+ "@typescript-eslint/parser": "^8.56.1",
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/parser": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz",
- "integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz",
+ "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.56.0",
- "@typescript-eslint/types": "8.56.0",
- "@typescript-eslint/typescript-estree": "8.56.0",
- "@typescript-eslint/visitor-keys": "8.56.0",
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -3496,14 +3656,14 @@
}
},
"node_modules/@typescript-eslint/project-service": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz",
- "integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz",
+ "integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/tsconfig-utils": "^8.56.0",
- "@typescript-eslint/types": "^8.56.0",
+ "@typescript-eslint/tsconfig-utils": "^8.56.1",
+ "@typescript-eslint/types": "^8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -3518,14 +3678,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz",
- "integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz",
+ "integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.56.0",
- "@typescript-eslint/visitor-keys": "8.56.0"
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -3536,9 +3696,9 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz",
- "integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz",
+ "integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -3553,15 +3713,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz",
- "integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz",
+ "integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.56.0",
- "@typescript-eslint/typescript-estree": "8.56.0",
- "@typescript-eslint/utils": "8.56.0",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1",
"debug": "^4.4.3",
"ts-api-utils": "^2.4.0"
},
@@ -3578,9 +3738,9 @@
}
},
"node_modules/@typescript-eslint/types": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz",
- "integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz",
+ "integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -3592,18 +3752,18 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz",
- "integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz",
+ "integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/project-service": "8.56.0",
- "@typescript-eslint/tsconfig-utils": "8.56.0",
- "@typescript-eslint/types": "8.56.0",
- "@typescript-eslint/visitor-keys": "8.56.0",
+ "@typescript-eslint/project-service": "8.56.1",
+ "@typescript-eslint/tsconfig-utils": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3",
- "minimatch": "^9.0.5",
+ "minimatch": "^10.2.2",
"semver": "^7.7.3",
"tinyglobby": "^0.2.15",
"ts-api-utils": "^2.4.0"
@@ -3620,16 +3780,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz",
- "integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz",
+ "integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.9.1",
- "@typescript-eslint/scope-manager": "8.56.0",
- "@typescript-eslint/types": "8.56.0",
- "@typescript-eslint/typescript-estree": "8.56.0"
+ "@typescript-eslint/scope-manager": "8.56.1",
+ "@typescript-eslint/types": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -3644,13 +3804,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz",
- "integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz",
+ "integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.56.0",
+ "@typescript-eslint/types": "8.56.1",
"eslint-visitor-keys": "^5.0.0"
},
"engines": {
@@ -4001,9 +4161,9 @@
}
},
"node_modules/ast-v8-to-istanbul": {
- "version": "0.3.11",
- "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.11.tgz",
- "integrity": "sha512-Qya9fkoofMjCBNVdWINMjB5KZvkYfaO9/anwkWnjxibpWUxo5iHl2sOdP7/uAqaRuUYuoo8rDwnbaaKVFxoUvw==",
+ "version": "0.3.12",
+ "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.12.tgz",
+ "integrity": "sha512-BRRC8VRZY2R4Z4lFIL35MwNXmwVqBityvOIwETtsCSwvjl0IdgFsy9NhdaA6j74nUdtJJlIypeRhpDam19Wq3g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4026,9 +4186,9 @@
"license": "MIT"
},
"node_modules/autoprefixer": {
- "version": "10.4.24",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz",
- "integrity": "sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==",
+ "version": "10.4.27",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz",
+ "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==",
"dev": true,
"funding": [
{
@@ -4047,7 +4207,7 @@
"license": "MIT",
"dependencies": {
"browserslist": "^4.28.1",
- "caniuse-lite": "^1.0.30001766",
+ "caniuse-lite": "^1.0.30001774",
"fraction.js": "^5.3.4",
"picocolors": "^1.1.1",
"postcss-value-parser": "^4.2.0"
@@ -4063,9 +4223,9 @@
}
},
"node_modules/axios": {
- "version": "1.13.5",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
- "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
+ "version": "1.13.6",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.6.tgz",
+ "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.11",
@@ -4074,11 +4234,14 @@
}
},
"node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
+ "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
"dev": true,
- "license": "MIT"
+ "license": "MIT",
+ "engines": {
+ "node": "18 || 20 || >=22"
+ }
},
"node_modules/baseline-browser-mapping": {
"version": "2.10.0",
@@ -4104,13 +4267,16 @@
}
},
"node_modules/brace-expansion": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
- "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "version": "5.0.4",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
+ "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "balanced-match": "^1.0.0"
+ "balanced-match": "^4.0.2"
+ },
+ "engines": {
+ "node": "18 || 20 || >=22"
}
},
"node_modules/braces": {
@@ -4184,9 +4350,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001770",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001770.tgz",
- "integrity": "sha512-x/2CLQ1jHENRbHg5PSId2sXq1CIO1CISvwWAj027ltMVG2UNgW+w9oH2+HzgEIRFembL8bUlXtfbBHR1fCg2xw==",
+ "version": "1.0.30001775",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001775.tgz",
+ "integrity": "sha512-s3Qv7Lht9zbVKE9XoTyRG6wVDCKdtOFIjBGg3+Yhn6JaytuNKPIjBMTMIY1AnOH3seL5mvF+x33oGAyK3hVt3A==",
"dev": true,
"funding": [
{
@@ -4204,6 +4370,17 @@
],
"license": "CC-BY-4.0"
},
+ "node_modules/ccount": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
+ "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/chai": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
@@ -4231,6 +4408,17 @@
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
+ "node_modules/character-entities": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
+ "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/class-variance-authority": {
"version": "0.7.1",
"resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz",
@@ -4340,6 +4528,13 @@
"node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
}
},
+ "node_modules/css-tree/node_modules/mdn-data": {
+ "version": "2.12.2",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz",
+ "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==",
+ "dev": true,
+ "license": "CC0-1.0"
+ },
"node_modules/css.escape": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz",
@@ -4348,16 +4543,16 @@
"license": "MIT"
},
"node_modules/cssstyle": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-6.0.1.tgz",
- "integrity": "sha512-IoJs7La+oFp/AB033wBStxNOJt4+9hHMxsXUPANcoXL2b3W4DZKghlJ2cI/eyeRZIQ9ysvYEorVhjrcYctWbog==",
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-6.1.0.tgz",
+ "integrity": "sha512-Ml4fP2UT2K3CUBQnVlbdV/8aFDdlY69E+YnwJM+3VUWl08S3J8c8aRuJqCkD9Py8DHZ7zNNvsfKl8psocHZEFg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@asamuzakjp/css-color": "^4.1.2",
- "@csstools/css-syntax-patches-for-csstree": "^1.0.26",
+ "@asamuzakjp/css-color": "^5.0.0",
+ "@csstools/css-syntax-patches-for-csstree": "^1.0.28",
"css-tree": "^3.1.0",
- "lru-cache": "^11.2.5"
+ "lru-cache": "^11.2.6"
},
"engines": {
"node": ">=20"
@@ -4428,6 +4623,20 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/decode-named-character-reference": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz",
+ "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "character-entities": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/deep-is": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
@@ -4470,6 +4679,20 @@
"integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==",
"license": "MIT"
},
+ "node_modules/devlop": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz",
+ "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "dequal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/dom-accessibility-api": {
"version": "0.5.16",
"resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz",
@@ -4500,9 +4723,9 @@
"license": "ISC"
},
"node_modules/enhanced-resolve": {
- "version": "5.19.0",
- "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz",
- "integrity": "sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==",
+ "version": "5.20.0",
+ "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.0.tgz",
+ "integrity": "sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4724,13 +4947,13 @@
}
},
"node_modules/eslint-plugin-react-refresh": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.5.0.tgz",
- "integrity": "sha512-ZYvmh7VfVgqR/7wR71I3Zl6hK/C5CcxdWYKZSpHawS5JCNgE4efhQWg/+/WPpgGAp9Ngp/rRZYyaIwmPQBq/lA==",
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.5.2.tgz",
+ "integrity": "sha512-hmgTH57GfzoTFjVN0yBwTggnsVUF2tcqi7RJZHqi9lIezSs4eFyAMktA68YD4r5kNw1mxyY4dmkyoFDb3FIqrA==",
"dev": true,
"license": "MIT",
"peerDependencies": {
- "eslint": ">=9"
+ "eslint": "^9 || ^10"
}
},
"node_modules/eslint-scope": {
@@ -4763,6 +4986,13 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/eslint/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/eslint/node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
@@ -4798,9 +5028,9 @@
}
},
"node_modules/eslint/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -4968,6 +5198,20 @@
"reusify": "^1.0.4"
}
},
+ "node_modules/fault": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz",
+ "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "format": "^0.2.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/fd-package-json": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fd-package-json/-/fd-package-json-2.0.0.tgz",
@@ -5103,6 +5347,15 @@
"node": ">= 6"
}
},
+ "node_modules/format": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz",
+ "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.4.x"
+ }
+ },
"node_modules/formatly": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/formatly/-/formatly-0.3.0.tgz",
@@ -5213,6 +5466,13 @@
"node": ">= 0.4"
}
},
+ "node_modules/github-slugger": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
+ "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==",
+ "dev": true,
+ "license": "ISC"
+ },
"node_modules/glob-parent": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
@@ -6061,6 +6321,17 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/longest-streak": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
+ "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/lru-cache": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
@@ -6129,6 +6400,17 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/markdown-table": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
+ "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
@@ -6138,10 +6420,255 @@
"node": ">= 0.4"
}
},
+ "node_modules/mdast-util-find-and-replace": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
+ "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "escape-string-regexp": "^5.0.0",
+ "unist-util-is": "^6.0.0",
+ "unist-util-visit-parents": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+ "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/mdast-util-from-markdown": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.3.tgz",
+ "integrity": "sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-to-string": "^4.0.0",
+ "micromark": "^4.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-decode-string": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0",
+ "unist-util-stringify-position": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-frontmatter": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz",
+ "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "escape-string-regexp": "^5.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "micromark-extension-frontmatter": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-frontmatter/node_modules/escape-string-regexp": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+ "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/mdast-util-gfm": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz",
+ "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-gfm-autolink-literal": "^2.0.0",
+ "mdast-util-gfm-footnote": "^2.0.0",
+ "mdast-util-gfm-strikethrough": "^2.0.0",
+ "mdast-util-gfm-table": "^2.0.0",
+ "mdast-util-gfm-task-list-item": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-autolink-literal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz",
+ "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "ccount": "^2.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-find-and-replace": "^3.0.0",
+ "micromark-util-character": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-footnote": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz",
+ "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.1.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-strikethrough": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz",
+ "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-table": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz",
+ "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "markdown-table": "^3.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-task-list-item": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz",
+ "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-phrasing": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
+ "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "unist-util-is": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-to-markdown": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
+ "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "longest-streak": "^3.0.0",
+ "mdast-util-phrasing": "^4.0.0",
+ "mdast-util-to-string": "^4.0.0",
+ "micromark-util-classify-character": "^2.0.0",
+ "micromark-util-decode-string": "^2.0.0",
+ "unist-util-visit": "^5.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-to-string": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+ "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mdast": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/mdn-data": {
- "version": "2.12.2",
- "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz",
- "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==",
+ "version": "2.23.0",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.23.0.tgz",
+ "integrity": "sha512-786vq1+4079JSeu2XdcDjrhi/Ry7BWtjDl9WtGPWLiIHb2T66GvIVflZTBoSNZ5JqTtJGYEVMuFA/lbQlMOyDQ==",
"dev": true,
"license": "CC0-1.0"
},
@@ -6155,6 +6682,614 @@
"node": ">= 8"
}
},
+ "node_modules/micromark": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
+ "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "@types/debug": "^4.0.0",
+ "debug": "^4.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "micromark-core-commonmark": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-combine-extensions": "^2.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-encode": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-subtokenize": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-core-commonmark": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
+ "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "micromark-factory-destination": "^2.0.0",
+ "micromark-factory-label": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-factory-title": "^2.0.0",
+ "micromark-factory-whitespace": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-classify-character": "^2.0.0",
+ "micromark-util-html-tag-name": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-subtokenize": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-extension-frontmatter": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz",
+ "integrity": "sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fault": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
+ "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "micromark-extension-gfm-autolink-literal": "^2.0.0",
+ "micromark-extension-gfm-footnote": "^2.0.0",
+ "micromark-extension-gfm-strikethrough": "^2.0.0",
+ "micromark-extension-gfm-table": "^2.0.0",
+ "micromark-extension-gfm-tagfilter": "^2.0.0",
+ "micromark-extension-gfm-task-list-item": "^2.0.0",
+ "micromark-util-combine-extensions": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-autolink-literal": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz",
+ "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-footnote": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz",
+ "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-core-commonmark": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-strikethrough": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz",
+ "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-classify-character": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-table": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz",
+ "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-tagfilter": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz",
+ "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-task-list-item": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz",
+ "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-factory-destination": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
+ "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-label": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
+ "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-space": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+ "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-title": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
+ "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-whitespace": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
+ "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-character": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+ "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-chunked": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
+ "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-classify-character": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
+ "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-combine-extensions": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
+ "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-decode-numeric-character-reference": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
+ "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-decode-string": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
+ "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "decode-named-character-reference": "^1.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-encode": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+ "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/micromark-util-html-tag-name": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
+ "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/micromark-util-normalize-identifier": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
+ "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-resolve-all": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
+ "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-sanitize-uri": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+ "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-encode": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-subtokenize": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
+ "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-symbol": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+ "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/micromark-util-types": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+ "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
"node_modules/micromatch": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
@@ -6214,16 +7349,16 @@
}
},
"node_modules/minimatch": {
- "version": "9.0.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
- "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "version": "10.2.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
+ "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"dev": true,
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "brace-expansion": "^2.0.1"
+ "brace-expansion": "^5.0.2"
},
"engines": {
- "node": ">=16 || 14 >=14.17"
+ "node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
@@ -6319,35 +7454,35 @@
}
},
"node_modules/oxc-resolver": {
- "version": "11.18.0",
- "resolved": "https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.18.0.tgz",
- "integrity": "sha512-Fv/b05AfhpYoCDvsog6tgsDm2yIwIeJafpMFLncNwKHRYu+Y1xQu5Q/rgUn7xBfuhNgjtPO7C0jCf7p2fLDj1g==",
+ "version": "11.19.1",
+ "resolved": "https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.19.1.tgz",
+ "integrity": "sha512-qE/CIg/spwrTBFt5aKmwe3ifeDdLfA2NESN30E42X/lII5ClF8V7Wt6WIJhcGZjp0/Q+nQ+9vgxGk//xZNX2hg==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/Boshen"
},
"optionalDependencies": {
- "@oxc-resolver/binding-android-arm-eabi": "11.18.0",
- "@oxc-resolver/binding-android-arm64": "11.18.0",
- "@oxc-resolver/binding-darwin-arm64": "11.18.0",
- "@oxc-resolver/binding-darwin-x64": "11.18.0",
- "@oxc-resolver/binding-freebsd-x64": "11.18.0",
- "@oxc-resolver/binding-linux-arm-gnueabihf": "11.18.0",
- "@oxc-resolver/binding-linux-arm-musleabihf": "11.18.0",
- "@oxc-resolver/binding-linux-arm64-gnu": "11.18.0",
- "@oxc-resolver/binding-linux-arm64-musl": "11.18.0",
- "@oxc-resolver/binding-linux-ppc64-gnu": "11.18.0",
- "@oxc-resolver/binding-linux-riscv64-gnu": "11.18.0",
- "@oxc-resolver/binding-linux-riscv64-musl": "11.18.0",
- "@oxc-resolver/binding-linux-s390x-gnu": "11.18.0",
- "@oxc-resolver/binding-linux-x64-gnu": "11.18.0",
- "@oxc-resolver/binding-linux-x64-musl": "11.18.0",
- "@oxc-resolver/binding-openharmony-arm64": "11.18.0",
- "@oxc-resolver/binding-wasm32-wasi": "11.18.0",
- "@oxc-resolver/binding-win32-arm64-msvc": "11.18.0",
- "@oxc-resolver/binding-win32-ia32-msvc": "11.18.0",
- "@oxc-resolver/binding-win32-x64-msvc": "11.18.0"
+ "@oxc-resolver/binding-android-arm-eabi": "11.19.1",
+ "@oxc-resolver/binding-android-arm64": "11.19.1",
+ "@oxc-resolver/binding-darwin-arm64": "11.19.1",
+ "@oxc-resolver/binding-darwin-x64": "11.19.1",
+ "@oxc-resolver/binding-freebsd-x64": "11.19.1",
+ "@oxc-resolver/binding-linux-arm-gnueabihf": "11.19.1",
+ "@oxc-resolver/binding-linux-arm-musleabihf": "11.19.1",
+ "@oxc-resolver/binding-linux-arm64-gnu": "11.19.1",
+ "@oxc-resolver/binding-linux-arm64-musl": "11.19.1",
+ "@oxc-resolver/binding-linux-ppc64-gnu": "11.19.1",
+ "@oxc-resolver/binding-linux-riscv64-gnu": "11.19.1",
+ "@oxc-resolver/binding-linux-riscv64-musl": "11.19.1",
+ "@oxc-resolver/binding-linux-s390x-gnu": "11.19.1",
+ "@oxc-resolver/binding-linux-x64-gnu": "11.19.1",
+ "@oxc-resolver/binding-linux-x64-musl": "11.19.1",
+ "@oxc-resolver/binding-openharmony-arm64": "11.19.1",
+ "@oxc-resolver/binding-wasm32-wasi": "11.19.1",
+ "@oxc-resolver/binding-win32-arm64-msvc": "11.19.1",
+ "@oxc-resolver/binding-win32-ia32-msvc": "11.19.1",
+ "@oxc-resolver/binding-win32-x64-msvc": "11.19.1"
}
},
"node_modules/p-limit": {
@@ -6747,9 +7882,9 @@
}
},
"node_modules/react-router": {
- "version": "7.13.0",
- "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz",
- "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==",
+ "version": "7.13.1",
+ "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.1.tgz",
+ "integrity": "sha512-td+xP4X2/6BJvZoX6xw++A2DdEi++YypA69bJUV5oVvqf6/9/9nNlD70YO1e9d3MyamJEBQFEzk6mbfDYbqrSA==",
"license": "MIT",
"dependencies": {
"cookie": "^1.0.1",
@@ -6769,12 +7904,12 @@
}
},
"node_modules/react-router-dom": {
- "version": "7.13.0",
- "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.0.tgz",
- "integrity": "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g==",
+ "version": "7.13.1",
+ "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.1.tgz",
+ "integrity": "sha512-UJnV3Rxc5TgUPJt2KJpo1Jpy0OKQr0AjgbZzBFjaPJcFOb2Y8jA5H3LT8HUJAiRLlWrEXWHbF1Z4SCZaQjWDHw==",
"license": "MIT",
"dependencies": {
- "react-router": "7.13.0"
+ "react-router": "7.13.1"
},
"engines": {
"node": ">=20.0.0"
@@ -6852,9 +7987,9 @@
}
},
"node_modules/rollup": {
- "version": "4.58.0",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.58.0.tgz",
- "integrity": "sha512-wbT0mBmWbIvvq8NeEYWWvevvxnOyhKChir47S66WCxw1SXqhw7ssIYejnQEVt7XYQpsj2y8F9PM+Cr3SNEa0gw==",
+ "version": "4.59.0",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz",
+ "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6868,31 +8003,31 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.58.0",
- "@rollup/rollup-android-arm64": "4.58.0",
- "@rollup/rollup-darwin-arm64": "4.58.0",
- "@rollup/rollup-darwin-x64": "4.58.0",
- "@rollup/rollup-freebsd-arm64": "4.58.0",
- "@rollup/rollup-freebsd-x64": "4.58.0",
- "@rollup/rollup-linux-arm-gnueabihf": "4.58.0",
- "@rollup/rollup-linux-arm-musleabihf": "4.58.0",
- "@rollup/rollup-linux-arm64-gnu": "4.58.0",
- "@rollup/rollup-linux-arm64-musl": "4.58.0",
- "@rollup/rollup-linux-loong64-gnu": "4.58.0",
- "@rollup/rollup-linux-loong64-musl": "4.58.0",
- "@rollup/rollup-linux-ppc64-gnu": "4.58.0",
- "@rollup/rollup-linux-ppc64-musl": "4.58.0",
- "@rollup/rollup-linux-riscv64-gnu": "4.58.0",
- "@rollup/rollup-linux-riscv64-musl": "4.58.0",
- "@rollup/rollup-linux-s390x-gnu": "4.58.0",
- "@rollup/rollup-linux-x64-gnu": "4.58.0",
- "@rollup/rollup-linux-x64-musl": "4.58.0",
- "@rollup/rollup-openbsd-x64": "4.58.0",
- "@rollup/rollup-openharmony-arm64": "4.58.0",
- "@rollup/rollup-win32-arm64-msvc": "4.58.0",
- "@rollup/rollup-win32-ia32-msvc": "4.58.0",
- "@rollup/rollup-win32-x64-gnu": "4.58.0",
- "@rollup/rollup-win32-x64-msvc": "4.58.0",
+ "@rollup/rollup-android-arm-eabi": "4.59.0",
+ "@rollup/rollup-android-arm64": "4.59.0",
+ "@rollup/rollup-darwin-arm64": "4.59.0",
+ "@rollup/rollup-darwin-x64": "4.59.0",
+ "@rollup/rollup-freebsd-arm64": "4.59.0",
+ "@rollup/rollup-freebsd-x64": "4.59.0",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.59.0",
+ "@rollup/rollup-linux-arm-musleabihf": "4.59.0",
+ "@rollup/rollup-linux-arm64-gnu": "4.59.0",
+ "@rollup/rollup-linux-arm64-musl": "4.59.0",
+ "@rollup/rollup-linux-loong64-gnu": "4.59.0",
+ "@rollup/rollup-linux-loong64-musl": "4.59.0",
+ "@rollup/rollup-linux-ppc64-gnu": "4.59.0",
+ "@rollup/rollup-linux-ppc64-musl": "4.59.0",
+ "@rollup/rollup-linux-riscv64-gnu": "4.59.0",
+ "@rollup/rollup-linux-riscv64-musl": "4.59.0",
+ "@rollup/rollup-linux-s390x-gnu": "4.59.0",
+ "@rollup/rollup-linux-x64-gnu": "4.59.0",
+ "@rollup/rollup-linux-x64-musl": "4.59.0",
+ "@rollup/rollup-openbsd-x64": "4.59.0",
+ "@rollup/rollup-openharmony-arm64": "4.59.0",
+ "@rollup/rollup-win32-arm64-msvc": "4.59.0",
+ "@rollup/rollup-win32-ia32-msvc": "4.59.0",
+ "@rollup/rollup-win32-x64-gnu": "4.59.0",
+ "@rollup/rollup-win32-x64-msvc": "4.59.0",
"fsevents": "~2.3.2"
}
},
@@ -7097,9 +8232,9 @@
}
},
"node_modules/tailwindcss": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.0.tgz",
- "integrity": "sha512-yYzTZ4++b7fNYxFfpnberEEKu43w44aqDMNM9MHMmcKuCH7lL8jJ4yJ7LGHv7rSwiqM0nkiobF9I6cLlpS2P7Q==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.1.tgz",
+ "integrity": "sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw==",
"dev": true,
"license": "MIT"
},
@@ -7275,16 +8410,16 @@
}
},
"node_modules/typescript-eslint": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.0.tgz",
- "integrity": "sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==",
+ "version": "8.56.1",
+ "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.1.tgz",
+ "integrity": "sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/eslint-plugin": "8.56.0",
- "@typescript-eslint/parser": "8.56.0",
- "@typescript-eslint/typescript-estree": "8.56.0",
- "@typescript-eslint/utils": "8.56.0"
+ "@typescript-eslint/eslint-plugin": "8.56.1",
+ "@typescript-eslint/parser": "8.56.1",
+ "@typescript-eslint/typescript-estree": "8.56.1",
+ "@typescript-eslint/utils": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -7315,6 +8450,65 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/unist-util-is": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz",
+ "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/unist": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-stringify-position": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+ "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/unist": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-visit": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz",
+ "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-is": "^6.0.0",
+ "unist-util-visit-parents": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-visit-parents": {
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz",
+ "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-is": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/update-browserslist-db": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
@@ -7745,6 +8939,17 @@
"peerDependencies": {
"zod": "^3.25.0 || ^4.0.0"
}
+ },
+ "node_modules/zwitch": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
+ "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
}
}
}
diff --git a/frontend/package.json b/frontend/package.json
index 047b39b7..79ec151e 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -34,7 +34,7 @@
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-tooltip": "^1.2.8",
"@tanstack/react-query": "^5.90.21",
- "axios": "^1.13.5",
+ "axios": "^1.13.6",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"date-fns": "^4.1.0",
@@ -46,36 +46,39 @@
"react-hook-form": "^7.71.2",
"react-hot-toast": "^2.6.0",
"react-i18next": "^16.5.4",
- "react-router-dom": "^7.13.0",
+ "react-router-dom": "^7.13.1",
"tailwind-merge": "^3.5.0",
"tldts": "^7.0.23"
},
"devDependencies": {
+ "@eslint/css": "^0.14.1",
"@eslint/js": "^9.39.3 <10.0.0",
+ "@eslint/json": "^1.0.1",
+ "@eslint/markdown": "^7.5.1",
"@playwright/test": "^1.58.2",
- "@tailwindcss/postcss": "^4.2.0",
+ "@tailwindcss/postcss": "^4.2.1",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@testing-library/user-event": "^14.6.1",
- "@types/node": "^25.3.0",
+ "@types/node": "^25.3.3",
"@types/react": "^19.2.14",
"@types/react-dom": "^19.2.3",
- "@typescript-eslint/eslint-plugin": "^8.56.0",
- "@typescript-eslint/parser": "^8.56.0",
+ "@typescript-eslint/eslint-plugin": "^8.56.1",
+ "@typescript-eslint/parser": "^8.56.1",
"@vitejs/plugin-react": "^5.1.4",
"@vitest/coverage-istanbul": "^4.0.18",
"@vitest/coverage-v8": "^4.0.18",
"@vitest/ui": "^4.0.18",
- "autoprefixer": "^10.4.24",
+ "autoprefixer": "^10.4.27",
"eslint": "^9.39.3 <10.0.0",
"eslint-plugin-react-hooks": "^7.0.1",
- "eslint-plugin-react-refresh": "^0.5.0",
+ "eslint-plugin-react-refresh": "^0.5.2",
"jsdom": "28.1.0",
"knip": "^5.85.0",
"postcss": "^8.5.6",
- "tailwindcss": "^4.2.0",
+ "tailwindcss": "^4.2.1",
"typescript": "^5.9.3",
- "typescript-eslint": "^8.56.0",
+ "typescript-eslint": "^8.56.1",
"vite": "^7.3.1",
"vitest": "^4.0.18"
}
diff --git a/frontend/src/api/__tests__/notifications.test.ts b/frontend/src/api/__tests__/notifications.test.ts
index 3a3eb73e..5339161a 100644
--- a/frontend/src/api/__tests__/notifications.test.ts
+++ b/frontend/src/api/__tests__/notifications.test.ts
@@ -52,9 +52,9 @@ describe('notifications api', () => {
await testProvider({ id: '2', name: 'test', type: 'discord' })
expect(client.post).toHaveBeenCalledWith('/notifications/providers/test', { id: '2', name: 'test', type: 'discord' })
- await expect(createProvider({ name: 'x', type: 'slack' })).rejects.toThrow('Only discord notification providers are supported')
- await expect(updateProvider('2', { name: 'updated', type: 'generic' })).rejects.toThrow('Only discord notification providers are supported')
- await expect(testProvider({ id: '2', name: 'test', type: 'telegram' })).rejects.toThrow('Only discord notification providers are supported')
+ await expect(createProvider({ name: 'x', type: 'slack' })).rejects.toThrow('Unsupported notification provider type: slack')
+ await expect(updateProvider('2', { name: 'updated', type: 'generic' })).rejects.toThrow('Unsupported notification provider type: generic')
+ await expect(testProvider({ id: '2', name: 'test', type: 'telegram' })).rejects.toThrow('Unsupported notification provider type: telegram')
})
it('templates and previews use merged payloads', async () => {
@@ -68,7 +68,10 @@ describe('notifications api', () => {
expect(preview).toEqual({ preview: 'ok' })
expect(client.post).toHaveBeenCalledWith('/notifications/providers/preview', { name: 'provider', type: 'discord', data: { user: 'alice' } })
- await expect(previewProvider({ name: 'provider', type: 'webhook' }, { user: 'alice' })).rejects.toThrow('Only discord notification providers are supported')
+ vi.mocked(client.post).mockResolvedValueOnce({ data: { preview: 'webhook-ok' } })
+ const webhookPreview = await previewProvider({ name: 'provider', type: 'webhook' }, { user: 'alice' })
+ expect(webhookPreview).toEqual({ preview: 'webhook-ok' })
+ expect(client.post).toHaveBeenCalledWith('/notifications/providers/preview', { name: 'provider', type: 'webhook', data: { user: 'alice' } })
})
it('external template endpoints shape payloads', async () => {
diff --git a/frontend/src/api/__tests__/user.test.ts b/frontend/src/api/__tests__/user.test.ts
new file mode 100644
index 00000000..ee43f501
--- /dev/null
+++ b/frontend/src/api/__tests__/user.test.ts
@@ -0,0 +1,69 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest'
+import client from '../client'
+import { getProfile, regenerateApiKey, updateProfile } from '../user'
+
+vi.mock('../client', () => ({
+ default: {
+ get: vi.fn(),
+ post: vi.fn(),
+ },
+}))
+
+describe('user api', () => {
+ beforeEach(() => {
+ vi.clearAllMocks()
+ })
+
+ it('fetches profile using masked API key fields', async () => {
+ vi.mocked(client.get).mockResolvedValueOnce({
+ data: {
+ id: 1,
+ email: 'admin@example.com',
+ name: 'Admin',
+ role: 'admin',
+ has_api_key: true,
+ api_key_masked: '********',
+ },
+ })
+
+ const profile = await getProfile()
+
+ expect(client.get).toHaveBeenCalledWith('/user/profile')
+ expect(profile.has_api_key).toBe(true)
+ expect(profile.api_key_masked).toBe('********')
+ })
+
+ it('regenerates API key and returns metadata-only response', async () => {
+ vi.mocked(client.post).mockResolvedValueOnce({
+ data: {
+ message: 'API key regenerated successfully',
+ has_api_key: true,
+ api_key_masked: '********',
+ api_key_updated: '2026-02-25T00:00:00Z',
+ },
+ })
+
+ const result = await regenerateApiKey()
+
+ expect(client.post).toHaveBeenCalledWith('/user/api-key')
+ expect(result.has_api_key).toBe(true)
+ expect(result.api_key_masked).toBe('********')
+ expect(result.api_key_updated).toBe('2026-02-25T00:00:00Z')
+ })
+
+ it('updates profile with optional current password', async () => {
+ vi.mocked(client.post).mockResolvedValueOnce({ data: { message: 'ok' } })
+
+ await updateProfile({
+ name: 'Updated Name',
+ email: 'updated@example.com',
+ current_password: 'current-password',
+ })
+
+ expect(client.post).toHaveBeenCalledWith('/user/profile', {
+ name: 'Updated Name',
+ email: 'updated@example.com',
+ current_password: 'current-password',
+ })
+ })
+})
diff --git a/frontend/src/api/__tests__/users.test.ts b/frontend/src/api/__tests__/users.test.ts
index ab4b3f81..bab06a01 100644
--- a/frontend/src/api/__tests__/users.test.ts
+++ b/frontend/src/api/__tests__/users.test.ts
@@ -50,7 +50,7 @@ describe('users api', () => {
})
it('invites users and updates permissions', async () => {
- vi.mocked(client.post).mockResolvedValueOnce({ data: { invite_token: 't', invite_url: 'https://charon.example.com/accept-invite?token=t' } })
+ vi.mocked(client.post).mockResolvedValueOnce({ data: { invite_token_masked: '********', invite_url: '[REDACTED]' } })
await inviteUser({ email: 'i', permission_mode: 'allow_all' })
expect(client.post).toHaveBeenCalledWith('/users/invite', { email: 'i', permission_mode: 'allow_all' })
diff --git a/frontend/src/api/notifications.test.ts b/frontend/src/api/notifications.test.ts
index 59d4861c..36a01b60 100644
--- a/frontend/src/api/notifications.test.ts
+++ b/frontend/src/api/notifications.test.ts
@@ -88,14 +88,38 @@ describe('notifications api', () => {
expect(mockedClient.delete).toHaveBeenCalledWith('/notifications/providers/new')
})
- it('rejects non-discord type before submit for provider mutations and preview', async () => {
- await expect(createProvider({ name: 'Bad', type: 'slack' })).rejects.toThrow('Only discord notification providers are supported')
- await expect(updateProvider('bad', { type: 'generic' })).rejects.toThrow('Only discord notification providers are supported')
- await expect(testProvider({ id: 'bad', type: 'email' })).rejects.toThrow('Only discord notification providers are supported')
- await expect(previewProvider({ id: 'bad', type: 'gotify' })).rejects.toThrow('Only discord notification providers are supported')
+ it('supports discord, gotify, and webhook while enforcing token payload contract', async () => {
+ mockedClient.post.mockResolvedValue({ data: { id: 'ok' } })
+ mockedClient.put.mockResolvedValue({ data: { id: 'ok' } })
- expect(mockedClient.post).not.toHaveBeenCalled()
- expect(mockedClient.put).not.toHaveBeenCalled()
+ await createProvider({ name: 'Gotify', type: 'gotify', gotify_token: 'secret-token' })
+ expect(mockedClient.post).toHaveBeenCalledWith('/notifications/providers', {
+ name: 'Gotify',
+ type: 'gotify',
+ token: 'secret-token',
+ })
+
+ await updateProvider('ok', { type: 'webhook', url: 'https://example.com/webhook', gotify_token: 'should-not-send' })
+ expect(mockedClient.put).toHaveBeenCalledWith('/notifications/providers/ok', {
+ type: 'webhook',
+ url: 'https://example.com/webhook',
+ })
+
+ await testProvider({ id: 'ok', type: 'gotify', gotify_token: 'should-not-send' })
+ expect(mockedClient.post).toHaveBeenCalledWith('/notifications/providers/test', {
+ id: 'ok',
+ type: 'gotify',
+ })
+
+ await previewProvider({ id: 'ok', type: 'gotify', gotify_token: 'should-not-send' })
+ expect(mockedClient.post).toHaveBeenCalledWith('/notifications/providers/preview', {
+ id: 'ok',
+ type: 'gotify',
+ })
+
+ await expect(createProvider({ name: 'Bad', type: 'slack' })).rejects.toThrow('Unsupported notification provider type: slack')
+ await expect(updateProvider('bad', { type: 'generic' })).rejects.toThrow('Unsupported notification provider type: generic')
+ await expect(testProvider({ id: 'bad', type: 'email' })).rejects.toThrow('Unsupported notification provider type: email')
})
it('fetches templates and previews provider payloads with data', async () => {
diff --git a/frontend/src/api/notifications.ts b/frontend/src/api/notifications.ts
index ab2dcd59..2490c243 100644
--- a/frontend/src/api/notifications.ts
+++ b/frontend/src/api/notifications.ts
@@ -1,6 +1,24 @@
import client from './client';
-const DISCORD_PROVIDER_TYPE = 'discord' as const;
+export const SUPPORTED_NOTIFICATION_PROVIDER_TYPES = ['discord', 'gotify', 'webhook'] as const;
+export type SupportedNotificationProviderType = (typeof SUPPORTED_NOTIFICATION_PROVIDER_TYPES)[number];
+const DEFAULT_PROVIDER_TYPE: SupportedNotificationProviderType = 'discord';
+
+const isSupportedNotificationProviderType = (type: string | undefined): type is SupportedNotificationProviderType =>
+ typeof type === 'string' && SUPPORTED_NOTIFICATION_PROVIDER_TYPES.includes(type.toLowerCase() as SupportedNotificationProviderType);
+
+const resolveProviderTypeOrThrow = (type: string | undefined): SupportedNotificationProviderType => {
+ if (typeof type === 'undefined') {
+ return DEFAULT_PROVIDER_TYPE;
+ }
+
+ const normalizedType = type.toLowerCase();
+ if (isSupportedNotificationProviderType(normalizedType)) {
+ return normalizedType;
+ }
+
+ throw new Error(`Unsupported notification provider type: ${type}`);
+};
/** Notification provider configuration. */
export interface NotificationProvider {
@@ -10,6 +28,9 @@ export interface NotificationProvider {
url: string;
config?: string;
template?: string;
+ gotify_token?: string;
+ token?: string;
+ has_token?: boolean;
enabled: boolean;
notify_proxy_hosts: boolean;
notify_remote_servers: boolean;
@@ -23,19 +44,39 @@ export interface NotificationProvider {
created_at: string;
}
-const withDiscordType = (data: Partial): Partial => {
- const normalizedType = typeof data.type === 'string' ? data.type.toLowerCase() : undefined;
- if (normalizedType !== DISCORD_PROVIDER_TYPE) {
- return { ...data, type: DISCORD_PROVIDER_TYPE };
+const sanitizeProviderForWriteAction = (data: Partial): Partial => {
+ const type = resolveProviderTypeOrThrow(data.type);
+ const payload: Partial = {
+ ...data,
+ type,
+ };
+
+ const normalizedToken = typeof payload.gotify_token === 'string' && payload.gotify_token.trim().length > 0
+ ? payload.gotify_token.trim()
+ : typeof payload.token === 'string' && payload.token.trim().length > 0
+ ? payload.token.trim()
+ : undefined;
+
+ delete payload.gotify_token;
+
+ if (type !== 'gotify') {
+ delete payload.token;
+ return payload;
}
- return { ...data, type: DISCORD_PROVIDER_TYPE };
+ if (normalizedToken) {
+ payload.token = normalizedToken;
+ } else {
+ delete payload.token;
+ }
+
+ return payload;
};
-const assertDiscordOnlyInput = (data: Partial): void => {
- if (typeof data.type === 'string' && data.type.toLowerCase() !== DISCORD_PROVIDER_TYPE) {
- throw new Error('Only discord notification providers are supported');
- }
+const sanitizeProviderForReadLikeAction = (data: Partial): Partial => {
+ const payload = sanitizeProviderForWriteAction(data);
+ delete payload.token;
+ return payload;
};
/**
@@ -55,8 +96,7 @@ export const getProviders = async () => {
* @throws {AxiosError} If creation fails
*/
export const createProvider = async (data: Partial) => {
- assertDiscordOnlyInput(data);
- const response = await client.post('/notifications/providers', withDiscordType(data));
+ const response = await client.post('/notifications/providers', sanitizeProviderForWriteAction(data));
return response.data;
};
@@ -68,8 +108,7 @@ export const createProvider = async (data: Partial) => {
* @throws {AxiosError} If update fails or provider not found
*/
export const updateProvider = async (id: string, data: Partial) => {
- assertDiscordOnlyInput(data);
- const response = await client.put(`/notifications/providers/${id}`, withDiscordType(data));
+ const response = await client.put(`/notifications/providers/${id}`, sanitizeProviderForWriteAction(data));
return response.data;
};
@@ -88,8 +127,7 @@ export const deleteProvider = async (id: string) => {
* @throws {AxiosError} If test fails
*/
export const testProvider = async (provider: Partial) => {
- assertDiscordOnlyInput(provider);
- await client.post('/notifications/providers/test', withDiscordType(provider));
+ await client.post('/notifications/providers/test', sanitizeProviderForReadLikeAction(provider));
};
/**
@@ -116,8 +154,7 @@ export interface NotificationTemplate {
* @throws {AxiosError} If preview fails
*/
export const previewProvider = async (provider: Partial, data?: Record) => {
- assertDiscordOnlyInput(provider);
- const payload: Record = withDiscordType(provider) as Record;
+ const payload: Record = sanitizeProviderForReadLikeAction(provider) as Record;
if (data) payload.data = data;
const response = await client.post('/notifications/providers/preview', payload);
return response.data;
diff --git a/frontend/src/api/proxyHosts.ts b/frontend/src/api/proxyHosts.ts
index 70ea6e06..d8a3dd23 100644
--- a/frontend/src/api/proxyHosts.ts
+++ b/frontend/src/api/proxyHosts.ts
@@ -42,11 +42,17 @@ export interface ProxyHost {
enabled: boolean;
certificate_id?: number | null;
certificate?: Certificate | null;
- access_list_id?: number | null;
- security_header_profile_id?: number | null;
+ access_list_id?: number | string | null;
+ access_list?: {
+ uuid: string;
+ name: string;
+ description: string;
+ type: string;
+ } | null;
+ security_header_profile_id?: number | string | null;
dns_provider_id?: number | null;
security_header_profile?: {
- id: number;
+ id?: number;
uuid: string;
name: string;
description: string;
diff --git a/frontend/src/api/user.ts b/frontend/src/api/user.ts
index d3cd3f11..0477d6c5 100644
--- a/frontend/src/api/user.ts
+++ b/frontend/src/api/user.ts
@@ -6,7 +6,8 @@ export interface UserProfile {
email: string
name: string
role: string
- api_key: string
+ has_api_key: boolean
+ api_key_masked: string
}
/**
@@ -24,8 +25,15 @@ export const getProfile = async (): Promise => {
* @returns Promise resolving to object containing the new API key
* @throws {AxiosError} If regeneration fails
*/
-export const regenerateApiKey = async (): Promise<{ api_key: string }> => {
- const response = await client.post('/user/api-key')
+export interface RegenerateApiKeyResponse {
+ message: string
+ has_api_key: boolean
+ api_key_masked: string
+ api_key_updated: string
+}
+
+export const regenerateApiKey = async (): Promise => {
+ const response = await client.post('/user/api-key')
return response.data
}
diff --git a/frontend/src/api/users.test.ts b/frontend/src/api/users.test.ts
index 6ff9baa8..09f014de 100644
--- a/frontend/src/api/users.test.ts
+++ b/frontend/src/api/users.test.ts
@@ -50,7 +50,7 @@ describe('users api', () => {
it('creates, invites, updates, and deletes users', async () => {
mockedClient.post
.mockResolvedValueOnce({ data: { id: 3, uuid: 'u3', email: 'c@example.com', name: 'C', role: 'user', enabled: true, permission_mode: 'allow_all', created_at: '', updated_at: '' } })
- .mockResolvedValueOnce({ data: { id: 4, uuid: 'u4', email: 'invite@example.com', role: 'user', invite_token: 'token', invite_url: 'https://charon.example.com/accept-invite?token=token', email_sent: true, expires_at: '' } })
+ .mockResolvedValueOnce({ data: { id: 4, uuid: 'u4', email: 'invite@example.com', role: 'user', invite_token_masked: '********', invite_url: '[REDACTED]', email_sent: true, expires_at: '' } })
mockedClient.put.mockResolvedValueOnce({ data: { message: 'updated' } })
mockedClient.delete.mockResolvedValueOnce({ data: { message: 'deleted' } })
@@ -61,7 +61,7 @@ describe('users api', () => {
const invite = await inviteUser({ email: 'invite@example.com', role: 'user' })
expect(mockedClient.post).toHaveBeenCalledWith('/users/invite', { email: 'invite@example.com', role: 'user' })
- expect(invite.invite_token).toBe('token')
+ expect(invite.invite_token_masked).toBe('********')
await updateUser(3, { enabled: false })
expect(mockedClient.put).toHaveBeenCalledWith('/users/3', { enabled: false })
diff --git a/frontend/src/api/users.ts b/frontend/src/api/users.ts
index 12d708e7..e9aebc27 100644
--- a/frontend/src/api/users.ts
+++ b/frontend/src/api/users.ts
@@ -44,8 +44,8 @@ export interface InviteUserResponse {
uuid: string
email: string
role: string
- invite_token: string
- invite_url: string
+ invite_token_masked: string
+ invite_url?: string
email_sent: boolean
expires_at: string
}
diff --git a/frontend/src/components/AccessListSelector.tsx b/frontend/src/components/AccessListSelector.tsx
index 960b3157..282bde45 100644
--- a/frontend/src/components/AccessListSelector.tsx
+++ b/frontend/src/components/AccessListSelector.tsx
@@ -9,27 +9,117 @@ import {
} from './ui/Select';
interface AccessListSelectorProps {
- value: number | null;
- onChange: (id: number | null) => void;
+ value: number | string | null;
+ onChange: (id: number | string | null) => void;
+}
+
+function resolveAccessListToken(
+ value: number | string | null | undefined,
+ accessLists?: Array<{ id?: number | string; uuid?: string }>
+): string {
+ if (value === null || value === undefined) {
+ return 'none';
+ }
+
+ if (typeof value === 'number') {
+ return `id:${value}`;
+ }
+
+ const trimmed = value.trim();
+ if (trimmed === '') {
+ return 'none';
+ }
+
+ if (trimmed.startsWith('id:')) {
+ return trimmed;
+ }
+
+ if (trimmed.startsWith('uuid:')) {
+ const uuid = trimmed.slice(5);
+ const matchingACL = accessLists?.find((acl) => acl.uuid === uuid);
+ const matchingToken = matchingACL ? getOptionToken(matchingACL) : null;
+ return matchingToken ?? trimmed;
+ }
+
+ if (/^\d+$/.test(trimmed)) {
+ const parsed = Number.parseInt(trimmed, 10);
+ return `id:${parsed}`;
+ }
+
+ const matchingACL = accessLists?.find((acl) => acl.uuid === trimmed);
+ const matchingToken = matchingACL ? getOptionToken(matchingACL) : null;
+ return matchingToken ?? `uuid:${trimmed}`;
+}
+
+function getOptionToken(acl: { id?: number | string; uuid?: string }): string | null {
+ if (typeof acl.id === 'number' && Number.isFinite(acl.id)) {
+ return `id:${acl.id}`;
+ }
+
+ if (typeof acl.id === 'string') {
+ const trimmed = acl.id.trim();
+ if (trimmed !== '' && /^\d+$/.test(trimmed)) {
+ const parsed = Number.parseInt(trimmed, 10);
+ if (!Number.isNaN(parsed)) {
+ return `id:${parsed}`;
+ }
+ }
+ }
+
+ if (acl.uuid) {
+ return `uuid:${acl.uuid}`;
+ }
+
+ return null;
}
export default function AccessListSelector({ value, onChange }: AccessListSelectorProps) {
const { data: accessLists } = useAccessLists();
- const selectedACL = accessLists?.find((acl) => acl.id === value);
+ const selectedToken = resolveAccessListToken(value, accessLists);
+ const selectedACL = accessLists?.find((acl) => getOptionToken(acl) === selectedToken);
- // Convert between component's string-based value and the prop's number|null
- const selectValue = value === null || value === undefined ? 'none' : String(value);
+ // Keep select value stable for both numeric-ID and UUID-only payload shapes.
+ const selectValue = selectedToken;
const handleValueChange = (newValue: string) => {
if (newValue === 'none') {
onChange(null);
- } else {
- const numericId = parseInt(newValue, 10);
- if (!isNaN(numericId)) {
+ return;
+ }
+
+ if (newValue.startsWith('id:')) {
+ const numericId = Number.parseInt(newValue.slice(3), 10);
+ if (!Number.isNaN(numericId)) {
onChange(numericId);
}
+ return;
}
+
+ if (newValue.startsWith('uuid:')) {
+ const selectedUUID = newValue.slice(5);
+ const matchingACL = accessLists?.find((acl) => acl.uuid === selectedUUID);
+ const matchingToken = matchingACL ? getOptionToken(matchingACL) : null;
+
+ if (matchingToken?.startsWith('id:')) {
+ const numericId = Number.parseInt(matchingToken.slice(3), 10);
+ if (!Number.isNaN(numericId)) {
+ onChange(numericId);
+ return;
+ }
+ }
+
+ onChange(selectedUUID);
+ return;
+ }
+
+ if (/^\d+$/.test(newValue)) {
+ const numericId = Number.parseInt(newValue, 10);
+ onChange(numericId);
+ return;
+ }
+
+ onChange(newValue);
};
return (
@@ -49,11 +139,18 @@ export default function AccessListSelector({ value, onChange }: AccessListSelect
No Access Control (Public)
{accessLists
?.filter((acl) => acl.enabled)
- .map((acl) => (
-
- {acl.name} ({acl.type.replace('_', ' ')})
-
- ))}
+ .map((acl) => {
+ const optionToken = getOptionToken(acl);
+ if (!optionToken) {
+ return null;
+ }
+
+ return (
+
+ {acl.name} ({acl.type.replace('_', ' ')})
+
+ );
+ })}
diff --git a/frontend/src/components/CredentialManager.tsx b/frontend/src/components/CredentialManager.tsx
index becfcfb4..1e2c4c5f 100644
--- a/frontend/src/components/CredentialManager.tsx
+++ b/frontend/src/components/CredentialManager.tsx
@@ -271,7 +271,7 @@ export default function CredentialManager({
{/* Delete Confirmation Dialog */}
{deleteConfirm !== null && (
- setDeleteConfirm(null)}>
+ setDeleteConfirm(null)}>
{t('credentials.deleteConfirm', 'Delete Credential?')}
diff --git a/frontend/src/components/ProxyHostForm.tsx b/frontend/src/components/ProxyHostForm.tsx
index 86eee761..cfdbeb28 100644
--- a/frontend/src/components/ProxyHostForm.tsx
+++ b/frontend/src/components/ProxyHostForm.tsx
@@ -101,9 +101,12 @@ interface ProxyHostFormProps {
onCancel: () => void
}
-export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFormProps) {
- type ProxyHostFormState = Partial & { addUptime?: boolean; uptimeInterval?: number; uptimeMaxRetries?: number }
- const [formData, setFormData] = useState({
+function buildInitialFormData(host?: ProxyHost): Partial & {
+ addUptime?: boolean
+ uptimeInterval?: number
+ uptimeMaxRetries?: number
+} {
+ return {
name: host?.name || '',
domain_names: host?.domain_names || '',
forward_scheme: host?.forward_scheme || 'http',
@@ -120,10 +123,143 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
advanced_config: host?.advanced_config || '',
enabled: host?.enabled ?? true,
certificate_id: host?.certificate_id,
- access_list_id: host?.access_list_id,
- security_header_profile_id: host?.security_header_profile_id,
+ access_list_id: host?.access_list?.uuid ?? host?.access_list_id,
+ security_header_profile_id: host?.security_header_profile?.uuid ?? host?.security_header_profile_id,
dns_provider_id: host?.dns_provider_id || null,
- })
+ }
+}
+
+function normalizeNullableID(value: unknown): number | null | undefined {
+ if (value === undefined) {
+ return undefined
+ }
+
+ if (value === null) {
+ return null
+ }
+
+ if (typeof value === 'number') {
+ return Number.isFinite(value) ? value : null
+ }
+
+ if (typeof value === 'string') {
+ const trimmed = value.trim()
+ if (trimmed === '') {
+ return null
+ }
+
+ if (!/^\d+$/.test(trimmed)) {
+ return undefined
+ }
+
+ const parsed = Number.parseInt(trimmed, 10)
+ return Number.isNaN(parsed) ? undefined : parsed
+ }
+
+ return undefined
+}
+
+function normalizeAccessListReference(value: unknown): number | string | null | undefined {
+ const numericValue = normalizeNullableID(value)
+ if (numericValue !== undefined) {
+ return numericValue
+ }
+
+ if (typeof value !== 'string') {
+ return undefined
+ }
+
+ const trimmed = value.trim()
+ return trimmed === '' ? null : trimmed
+}
+
+function normalizeSecurityHeaderReference(value: unknown): number | string | null | undefined {
+ const numericValue = normalizeNullableID(value)
+ if (numericValue !== undefined) {
+ return numericValue
+ }
+
+ if (typeof value !== 'string') {
+ return undefined
+ }
+
+ const trimmed = value.trim()
+ return trimmed === '' ? null : trimmed
+}
+
+function resolveSelectToken(value: number | string | null | undefined): string {
+ if (value === null || value === undefined) {
+ return 'none'
+ }
+
+ if (typeof value === 'number') {
+ return `id:${value}`
+ }
+
+ const trimmed = value.trim()
+ if (trimmed === '') {
+ return 'none'
+ }
+
+ if (trimmed.startsWith('id:') || trimmed.startsWith('uuid:')) {
+ return trimmed
+ }
+
+ if (/^\d+$/.test(trimmed)) {
+ const parsed = Number.parseInt(trimmed, 10)
+ return `id:${parsed}`
+ }
+
+ return `uuid:${trimmed}`
+}
+
+function resolveTokenToFormValue(value: string): number | string | null {
+ if (value === 'none') {
+ return null
+ }
+
+ if (value.startsWith('id:')) {
+ const parsed = Number.parseInt(value.slice(3), 10)
+ return Number.isNaN(parsed) ? null : parsed
+ }
+
+ if (value.startsWith('uuid:')) {
+ return value.slice(5)
+ }
+
+ if (/^\d+$/.test(value)) {
+ const parsed = Number.parseInt(value, 10)
+ return Number.isNaN(parsed) ? value : parsed
+ }
+
+ return value
+}
+
+function getEntityToken(entity: { id?: number; uuid?: string }): string | null {
+ if (typeof entity.id === 'number' && Number.isFinite(entity.id)) {
+ return `id:${entity.id}`
+ }
+
+ if (entity.uuid) {
+ return `uuid:${entity.uuid}`
+ }
+
+ return null
+}
+
+export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFormProps) {
+ type ProxyHostFormState = Omit, 'access_list_id' | 'security_header_profile_id'> & {
+ access_list_id?: number | string | null
+ security_header_profile_id?: number | string | null
+ addUptime?: boolean
+ uptimeInterval?: number
+ uptimeMaxRetries?: number
+ }
+ const [formData, setFormData] = useState(buildInitialFormData(host))
+
+ useEffect(() => {
+ setFormData(buildInitialFormData(host))
+ }, [host?.uuid])
// Charon internal IP for config helpers (previously CPMP internal IP)
const [charonInternalIP, setCharonInternalIP] = useState('')
@@ -420,7 +556,14 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
// strip temporary uptime-only flags from payload by destructuring
const { addUptime: _addUptime, uptimeInterval: _uptimeInterval, uptimeMaxRetries: _uptimeMaxRetries, ...payloadWithoutUptime } = payload as ProxyHostFormState
void _addUptime; void _uptimeInterval; void _uptimeMaxRetries;
- const res = await onSubmit(payloadWithoutUptime)
+
+ const submitPayload: Partial = {
+ ...payloadWithoutUptime,
+ access_list_id: normalizeAccessListReference(payloadWithoutUptime.access_list_id),
+ security_header_profile_id: normalizeSecurityHeaderReference(payloadWithoutUptime.security_header_profile_id),
+ }
+
+ const res = await onSubmit(submitPayload)
// if user asked to add uptime, request server to sync monitors
if (addUptime) {
@@ -508,15 +651,15 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
// Try to apply the preset logic (auto-populate or prompt)
tryApplyPreset(detectedPreset)
- setFormData({
- ...formData,
+ setFormData(prev => ({
+ ...prev,
forward_host: host,
forward_port: port,
forward_scheme: 'http',
domain_names: newDomainNames,
application: detectedPreset,
- websocket_support: needsWebsockets || formData.websocket_support,
- })
+ websocket_support: needsWebsockets || prev.websocket_support,
+ }))
}
}
@@ -651,7 +794,11 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
Troubleshooting: Ensure Docker is running and the socket is accessible.
- If running in a container, mount /var/run/docker.sock.
+ If running in a container, mount /var/run/docker.sock and
+ ensure the container has access to the Docker socket group
+ (e.g., group_add in
+ Compose or --group-add with
+ Docker CLI).
@@ -820,7 +967,7 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
{/* Access Control List */}
setFormData(prev => ({ ...prev, access_list_id: id }))}
/>
@@ -832,41 +979,58 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
{
- const value = e === "0" ? null : parseInt(e) || null
- setFormData(prev => ({ ...prev, security_header_profile_id: value }))
+ value={resolveSelectToken(formData.security_header_profile_id as number | string | null | undefined)}
+ onValueChange={(value) => {
+ setFormData(prev => ({
+ ...prev,
+ security_header_profile_id: resolveTokenToFormValue(value),
+ }))
}}
>
- None (No Security Headers)
+ None (No Security Headers)
{securityProfiles
?.filter(p => p.is_preset)
.sort((a, b) => a.security_score - b.security_score)
- .map(profile => (
-
- {profile.name} (Score: {profile.security_score}/100)
-
- ))}
+ .map(profile => {
+ const optionToken = getEntityToken(profile)
+ if (!optionToken) {
+ return null
+ }
+
+ return (
+
+ {profile.name} (Score: {profile.security_score}/100)
+
+ )
+ })}
{(securityProfiles?.filter(p => !p.is_preset) || []).length > 0 && (
<>
{(securityProfiles || [])
.filter(p => !p.is_preset)
- .map(profile => (
-
- {profile.name} (Score: {profile.security_score}/100)
-
- ))}
+ .map(profile => {
+ const optionToken = getEntityToken(profile)
+ if (!optionToken) {
+ return null
+ }
+
+ return (
+
+ {profile.name} (Score: {profile.security_score}/100)
+
+ )
+ })}
>
)}
{formData.security_header_profile_id && (() => {
- const selected = securityProfiles?.find(p => p.id === formData.security_header_profile_id)
+ const selectedToken = resolveSelectToken(formData.security_header_profile_id)
+ const selected = securityProfiles?.find(p => getEntityToken(p) === selectedToken)
if (!selected) return null
return (
@@ -885,7 +1049,8 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor
{/* Mobile App Compatibility Warning for Strict/Paranoid profiles */}
{formData.security_header_profile_id && (() => {
- const selected = securityProfiles?.find(p => p.id === formData.security_header_profile_id)
+ const selectedToken = resolveSelectToken(formData.security_header_profile_id)
+ const selected = securityProfiles?.find(p => getEntityToken(p) === selectedToken)
if (!selected) return null
const isRestrictive = selected.preset_type === 'strict' || selected.preset_type === 'paranoid'
diff --git a/frontend/src/components/__tests__/AccessListSelector-token-coverage.test.tsx b/frontend/src/components/__tests__/AccessListSelector-token-coverage.test.tsx
new file mode 100644
index 00000000..fdb48b3b
--- /dev/null
+++ b/frontend/src/components/__tests__/AccessListSelector-token-coverage.test.tsx
@@ -0,0 +1,100 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { render, screen } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import AccessListSelector from '../AccessListSelector';
+import * as useAccessListsHook from '../../hooks/useAccessLists';
+
+vi.mock('../../hooks/useAccessLists');
+
+vi.mock('../ui/Select', () => {
+ const findText = (children: React.ReactNode): string => {
+ if (typeof children === 'string') {
+ return children;
+ }
+
+ if (Array.isArray(children)) {
+ return children.map((child) => findText(child)).join(' ');
+ }
+
+ if (children && typeof children === 'object' && 'props' in children) {
+ const node = children as { props?: { children?: React.ReactNode } };
+ return findText(node.props?.children);
+ }
+
+ return '';
+ };
+
+ const Select = ({ value, onValueChange, children }: { value?: string; onValueChange?: (value: string) => void; children?: React.ReactNode }) => {
+ const text = findText(children);
+ const isAccessList = text.includes('No Access Control (Public)');
+
+ return (
+
+ {isAccessList && (
+ <>
+
{value}
+
onValueChange?.('uuid:acl-uuid-7')}>emit-uuid-token
+
onValueChange?.('123')}>emit-numeric-token
+
onValueChange?.('custom-token')}>emit-custom-token
+ >
+ )}
+ {children}
+
+ );
+ };
+
+ const SelectTrigger = ({ children, ...rest }: React.ComponentProps<'button'>) => {children} ;
+ const SelectContent = ({ children }: { children?: React.ReactNode }) => {children}
;
+ const SelectItem = ({ children }: { value: string; children?: React.ReactNode }) => {children}
;
+ const SelectValue = ({ placeholder }: { placeholder?: string }) => {placeholder} ;
+
+ return {
+ Select,
+ SelectTrigger,
+ SelectContent,
+ SelectItem,
+ SelectValue,
+ };
+});
+
+describe('AccessListSelector token coverage branches', () => {
+ beforeEach(() => {
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: [
+ {
+ id: 7,
+ uuid: 'acl-uuid-7',
+ name: 'ACL Seven',
+ description: 'Coverage ACL',
+ type: 'whitelist',
+ enabled: true,
+ },
+ ],
+ } as unknown as ReturnType);
+ });
+
+ it('normalizes whitespace and prefixed UUID values in resolver', () => {
+ const onChange = vi.fn();
+ const { rerender } = render( );
+
+ expect(screen.getByTestId('access-list-select-value')).toHaveTextContent('none');
+
+ rerender( );
+ expect(screen.getByTestId('access-list-select-value')).toHaveTextContent('id:7');
+ });
+
+ it('maps emitted UUID, numeric, and fallback tokens through handleValueChange', async () => {
+ const onChange = vi.fn();
+ const user = userEvent.setup();
+
+ render( );
+
+ await user.click(screen.getByRole('button', { name: 'emit-uuid-token' }));
+ await user.click(screen.getByRole('button', { name: 'emit-numeric-token' }));
+ await user.click(screen.getByRole('button', { name: 'emit-custom-token' }));
+
+ expect(onChange).toHaveBeenNthCalledWith(1, 7);
+ expect(onChange).toHaveBeenNthCalledWith(2, 123);
+ expect(onChange).toHaveBeenNthCalledWith(3, 'custom-token');
+ });
+});
diff --git a/frontend/src/components/__tests__/AccessListSelector.test.tsx b/frontend/src/components/__tests__/AccessListSelector.test.tsx
index 4ba93d3d..15c06316 100644
--- a/frontend/src/components/__tests__/AccessListSelector.test.tsx
+++ b/frontend/src/components/__tests__/AccessListSelector.test.tsx
@@ -126,4 +126,312 @@ describe('AccessListSelector', () => {
expect(screen.getByText('This is selected')).toBeInTheDocument();
expect(screen.getByText(/Countries: US,CA/)).toBeInTheDocument();
});
+
+ it('should normalize string numeric ACL ids to numeric selection values', async () => {
+ const mockLists = [
+ {
+ id: '7',
+ uuid: 'uuid-7',
+ name: 'String ID ACL',
+ description: 'String-based ID shape from API',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const mockOnChange = vi.fn();
+ const Wrapper = createWrapper();
+ const user = userEvent.setup();
+
+ render(
+
+
+
+ );
+
+ await user.click(screen.getByRole('combobox', { name: /Access Control List/i }));
+ await user.click(await screen.findByRole('option', { name: 'String ID ACL (whitelist)' }));
+
+ expect(mockOnChange).toHaveBeenCalledWith(7);
+ });
+
+ it('keeps a UUID-leading-digit selection stable in the trigger', () => {
+ const uuid = '9f63b8c9-1d26-4b2f-a2c8-001122334455';
+ const mockLists = [
+ {
+ id: undefined,
+ uuid,
+ name: 'UUID Digit Prefix ACL',
+ description: 'UUID-only ACL payload',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const mockOnChange = vi.fn();
+ const Wrapper = createWrapper();
+
+ render(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('UUID Digit Prefix ACL');
+ });
+
+ it('maps UUID form values to ID-backed option tokens when available', () => {
+ const uuid = 'acl-uuid-42';
+ const mockLists = [
+ {
+ id: 42,
+ uuid,
+ name: 'Hybrid ACL',
+ description: 'Includes UUID and numeric ID',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const mockOnChange = vi.fn();
+ const Wrapper = createWrapper();
+
+ render(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('Hybrid ACL');
+ });
+
+ it('handles prefixed and numeric-string form values as stable selections', () => {
+ const mockLists = [
+ {
+ id: 7,
+ uuid: 'uuid-7',
+ name: 'ACL Seven',
+ description: 'Has both ID and UUID',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const Wrapper = createWrapper();
+ const mockOnChange = vi.fn();
+
+ const { rerender } = render(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('ACL Seven');
+
+ rerender(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('ACL Seven');
+ });
+
+ it('treats whitespace-only values as no selection', () => {
+ const mockLists = [
+ {
+ id: 1,
+ uuid: 'uuid-1',
+ name: 'ACL One',
+ description: 'Baseline ACL',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const Wrapper = createWrapper();
+ const mockOnChange = vi.fn();
+
+ render(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('No Access Control (Public)');
+ });
+
+ it('resolves prefixed uuid values to matching id-backed ACL tokens', () => {
+ const mockLists = [
+ {
+ id: 42,
+ uuid: 'acl-uuid-42',
+ name: 'Resolved ACL',
+ description: 'UUID maps to numeric token',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const Wrapper = createWrapper();
+ const mockOnChange = vi.fn();
+
+ render(
+
+
+
+ );
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('Resolved ACL');
+ });
+
+ it('supports UUID-only ACL selection and local-network details', async () => {
+ const uuidOnly = '9f63b8c9-1d26-4b2f-a2c8-001122334455';
+ const mockLists = [
+ {
+ id: undefined,
+ uuid: uuidOnly,
+ name: 'Local UUID ACL',
+ description: 'Only internal network',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: true,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const mockOnChange = vi.fn();
+ const Wrapper = createWrapper();
+ const user = userEvent.setup();
+
+ const { rerender } = render(
+
+
+
+ );
+
+ await user.click(screen.getByRole('combobox', { name: /Access Control List/i }));
+ await user.click(await screen.findByRole('option', { name: 'Local UUID ACL (whitelist)' }));
+
+ expect(mockOnChange).toHaveBeenCalledWith(uuidOnly);
+
+ rerender(
+
+
+
+ );
+
+ expect(screen.getByText(/Local Network Only \(RFC1918\)/)).toBeInTheDocument();
+ });
+
+ it('skips malformed ACL entries without id or uuid tokens', async () => {
+ const mockLists = [
+ {
+ id: 4,
+ uuid: 'valid-uuid-4',
+ name: 'Valid ACL',
+ description: 'valid option',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ {
+ id: undefined,
+ uuid: undefined,
+ name: 'Malformed ACL',
+ description: 'should be ignored',
+ type: 'whitelist',
+ ip_rules: '[]',
+ country_codes: '',
+ local_network_only: false,
+ enabled: true,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ },
+ ];
+
+ vi.mocked(useAccessListsHook.useAccessLists).mockReturnValue({
+ data: mockLists as unknown as AccessList[],
+ } as unknown as ReturnType);
+
+ const mockOnChange = vi.fn();
+ const Wrapper = createWrapper();
+ const user = userEvent.setup();
+
+ render(
+
+
+
+ );
+
+ await user.click(screen.getByRole('combobox', { name: /Access Control List/i }));
+
+ expect(screen.getByRole('option', { name: 'Valid ACL (whitelist)' })).toBeInTheDocument();
+ expect(screen.queryByRole('option', { name: 'Malformed ACL (whitelist)' })).not.toBeInTheDocument();
+ });
});
diff --git a/frontend/src/components/__tests__/ProxyHostForm-dns.test.tsx b/frontend/src/components/__tests__/ProxyHostForm-dns.test.tsx
index 30c0aead..77bb92a5 100644
--- a/frontend/src/components/__tests__/ProxyHostForm-dns.test.tsx
+++ b/frontend/src/components/__tests__/ProxyHostForm-dns.test.tsx
@@ -5,6 +5,7 @@ import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import ProxyHostForm from '../ProxyHostForm'
import type { ProxyHost } from '../../api/proxyHosts'
import { mockRemoteServers } from '../../test/mockData'
+import { toast } from 'react-hot-toast'
// Mock the hooks
vi.mock('../../hooks/useRemoteServers', () => ({
@@ -103,6 +104,36 @@ vi.mock('../../hooks/useDNSDetection', () => ({
})),
}))
+vi.mock('../DNSDetectionResult', () => ({
+ DNSDetectionResult: ({ result, onUseSuggested, onSelectManually }: {
+ result?: { suggested_provider?: { id: number; name: string } }
+ isLoading: boolean
+ onUseSuggested: (provider: { id: number; name: string }) => void
+ onSelectManually: () => void
+ }) => (
+
+ {
+ if (result?.suggested_provider) {
+ onUseSuggested(result.suggested_provider)
+ }
+ }}
+ >
+ Use Suggested DNS
+
+ Select Manually DNS
+
+ ),
+}))
+
+vi.mock('react-hot-toast', () => ({
+ toast: {
+ success: vi.fn(),
+ error: vi.fn(),
+ },
+}))
+
vi.mock('../../api/dnsDetection', () => ({
detectDNSProvider: vi.fn().mockResolvedValue({
domain: 'example.com',
@@ -436,4 +467,139 @@ describe('ProxyHostForm - DNS Provider Integration', () => {
})
})
})
+
+ describe('DNS Detection Branches', () => {
+ it('skips detection call when wildcard has provider set and no suggestion', async () => {
+ vi.useFakeTimers()
+ const { useDetectDNSProvider } = await import('../../hooks/useDNSDetection')
+ const detectSpy = vi.fn().mockResolvedValue({
+ domain: 'example.com',
+ detected: false,
+ nameservers: [],
+ confidence: 'none',
+ })
+
+ vi.mocked(useDetectDNSProvider).mockReturnValue({
+ mutateAsync: detectSpy,
+ isPending: false,
+ data: undefined,
+ reset: vi.fn(),
+ } as unknown as ReturnType)
+
+ const existingHost: ProxyHost = {
+ uuid: 'test-uuid-skip-detect',
+ name: 'Existing Wildcard Provider',
+ domain_names: '*.example.com',
+ forward_scheme: 'http',
+ forward_host: '192.168.1.100',
+ forward_port: 8080,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: false,
+ block_exploits: true,
+ websocket_support: false,
+ application: 'none',
+ locations: [],
+ enabled: true,
+ dns_provider_id: 1,
+ created_at: '2025-01-01T00:00:00Z',
+ updated_at: '2025-01-01T00:00:00Z',
+ }
+
+ renderWithClient(
+
+ )
+
+ await vi.advanceTimersByTimeAsync(600)
+
+ expect(detectSpy).not.toHaveBeenCalled()
+ vi.useRealTimers()
+ })
+
+ it('logs detection errors when detectProvider rejects', async () => {
+ const { useDetectDNSProvider } = await import('../../hooks/useDNSDetection')
+ const detectSpy = vi.fn().mockRejectedValue(new Error('detect failed'))
+ const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
+
+ vi.mocked(useDetectDNSProvider).mockReturnValue({
+ mutateAsync: detectSpy,
+ isPending: false,
+ data: undefined,
+ reset: vi.fn(),
+ } as unknown as ReturnType)
+
+ renderWithClient( )
+
+ const domainInput = screen.getByPlaceholderText('example.com, www.example.com')
+ await userEvent.type(domainInput, '*.example.com')
+
+ await new Promise((resolve) => setTimeout(resolve, 700))
+
+ await waitFor(() => {
+ expect(errorSpy).toHaveBeenCalledWith('DNS detection failed:', expect.any(Error))
+ })
+
+ errorSpy.mockRestore()
+ })
+
+ it('auto-selects high confidence suggestion and emits success toast', async () => {
+ const { useDetectDNSProvider } = await import('../../hooks/useDNSDetection')
+ vi.mocked(useDetectDNSProvider).mockReturnValue({
+ mutateAsync: vi.fn().mockResolvedValue({}),
+ isPending: false,
+ data: {
+ domain: 'example.com',
+ detected: true,
+ nameservers: ['ns1.cloudflare.com'],
+ confidence: 'high',
+ suggested_provider: { id: 1, name: 'Cloudflare' },
+ },
+ reset: vi.fn(),
+ } as unknown as ReturnType)
+
+ renderWithClient( )
+
+ await userEvent.type(screen.getByPlaceholderText('My Service'), 'Auto Select')
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), '*.example.com')
+ await userEvent.type(screen.getByLabelText(/^Host$/), '192.168.1.100')
+ await userEvent.clear(screen.getByLabelText(/^Port$/))
+ await userEvent.type(screen.getByLabelText(/^Port$/), '8080')
+ await userEvent.click(screen.getByText('Save'))
+
+ await waitFor(() => {
+ expect(toast.success).toHaveBeenCalledWith('Auto-selected: Cloudflare')
+ expect(mockOnSubmit).toHaveBeenCalledWith(expect.objectContaining({ dns_provider_id: 1 }))
+ })
+ })
+
+ it('handles suggested and manual selection callbacks from detection result card', async () => {
+ const { useDetectDNSProvider } = await import('../../hooks/useDNSDetection')
+ vi.mocked(useDetectDNSProvider).mockReturnValue({
+ mutateAsync: vi.fn().mockResolvedValue({}),
+ isPending: false,
+ data: {
+ domain: 'example.com',
+ detected: true,
+ nameservers: ['ns1.cloudflare.com'],
+ confidence: 'medium',
+ suggested_provider: { id: 1, name: 'Cloudflare' },
+ },
+ reset: vi.fn(),
+ } as unknown as ReturnType)
+
+ renderWithClient( )
+
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), '*.example.com')
+
+ await waitFor(() => {
+ expect(screen.getByRole('button', { name: 'Use Suggested DNS' })).toBeInTheDocument()
+ })
+
+ await userEvent.click(screen.getByRole('button', { name: 'Use Suggested DNS' }))
+ expect(toast.success).toHaveBeenCalledWith('Selected: Cloudflare')
+
+ await userEvent.click(screen.getByRole('button', { name: 'Select Manually DNS' }))
+ })
+ })
})
diff --git a/frontend/src/components/__tests__/ProxyHostForm-dropdown-changes.test.tsx b/frontend/src/components/__tests__/ProxyHostForm-dropdown-changes.test.tsx
index d76109de..fa97d136 100644
--- a/frontend/src/components/__tests__/ProxyHostForm-dropdown-changes.test.tsx
+++ b/frontend/src/components/__tests__/ProxyHostForm-dropdown-changes.test.tsx
@@ -6,6 +6,8 @@ import ProxyHostForm from '../ProxyHostForm'
import type { ProxyHost } from '../../api/proxyHosts'
import type { AccessList } from '../../api/accessLists'
import type { SecurityHeaderProfile } from '../../api/securityHeaders'
+import { useAccessLists } from '../../hooks/useAccessLists'
+import { useSecurityHeaderProfiles } from '../../hooks/useSecurityHeaders'
// Mock all required hooks
vi.mock('../../hooks/useRemoteServers', () => ({
@@ -179,6 +181,18 @@ describe('ProxyHostForm Dropdown Change Bug Fix', () => {
beforeEach(() => {
mockOnSubmit = vi.fn<(data: Partial) => Promise>()
mockOnCancel = vi.fn<() => void>()
+
+ vi.mocked(useAccessLists).mockReturnValue({
+ data: mockAccessLists,
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ vi.mocked(useSecurityHeaderProfiles).mockReturnValue({
+ data: mockSecurityProfiles,
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
})
it('allows changing ACL selection after initial selection', async () => {
@@ -410,4 +424,386 @@ describe('ProxyHostForm Dropdown Change Bug Fix', () => {
)
})
})
+
+ it('persists null to value transitions for ACL and security headers in edit flow', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const existingHostWithNulls: ProxyHost = {
+ uuid: 'host-uuid-null-fields',
+ name: 'Existing Null Fields',
+ domain_names: 'existing-null.com',
+ forward_scheme: 'http',
+ forward_host: 'localhost',
+ forward_port: 8080,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: false,
+ enable_standard_headers: true,
+ application: 'none',
+ advanced_config: '',
+ enabled: true,
+ locations: [],
+ certificate_id: null,
+ access_list_id: null,
+ security_header_profile_id: null,
+ dns_provider_id: null,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ }
+
+ render(
+
+
+
+ )
+
+ const aclTrigger = screen.getByRole('combobox', { name: /Access Control List/i })
+ await user.click(aclTrigger)
+ await user.click(await screen.findByRole('option', { name: /Office Network/i }))
+
+ const headersTrigger = screen.getByRole('combobox', { name: /Security Headers/i })
+ await user.click(headersTrigger)
+ await user.click(await screen.findByRole('option', { name: /Strict Security/i }))
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({
+ access_list_id: 1,
+ security_header_profile_id: 2,
+ })
+ )
+ })
+ })
+
+ it('resets ACL/security header form state when editing target host changes', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const firstHost: ProxyHost = {
+ uuid: 'host-uuid-first',
+ name: 'First Host',
+ domain_names: 'first.example.com',
+ forward_scheme: 'http',
+ forward_host: 'localhost',
+ forward_port: 8080,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: false,
+ enable_standard_headers: true,
+ application: 'none',
+ advanced_config: '',
+ enabled: true,
+ locations: [],
+ certificate_id: null,
+ access_list_id: 1,
+ security_header_profile_id: 1,
+ dns_provider_id: null,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ }
+
+ const secondHost: ProxyHost = {
+ ...firstHost,
+ uuid: 'host-uuid-second',
+ name: 'Second Host',
+ domain_names: 'second.example.com',
+ access_list_id: null,
+ security_header_profile_id: null,
+ }
+
+ const { rerender } = render(
+
+
+
+ )
+
+ // Mutate first host state in the form before switching targets.
+ await user.click(screen.getByRole('combobox', { name: /Access Control List/i }))
+ await user.click(await screen.findByRole('option', { name: /VPN Users/i }))
+
+ await user.click(screen.getByRole('combobox', { name: /Security Headers/i }))
+ await user.click(await screen.findByRole('option', { name: /Strict Security/i }))
+
+ rerender(
+
+
+
+ )
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({
+ access_list_id: null,
+ security_header_profile_id: null,
+ })
+ )
+ })
+ })
+
+ it('persists ACL and security header selections with UUID-only option payloads', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const uuidOnlyAccessLists = [
+ {
+ ...mockAccessLists[0],
+ id: undefined,
+ uuid: '9f63b8c9-1d26-4b2f-a2c8-001122334455',
+ name: 'UUID Office Network',
+ },
+ ]
+
+ const uuidOnlySecurityProfiles = [
+ {
+ ...mockSecurityProfiles[0],
+ id: undefined,
+ uuid: 'profile-uuid-only',
+ name: 'UUID Basic Security',
+ },
+ ]
+
+ vi.mocked(useAccessLists).mockReturnValue({
+ data: uuidOnlyAccessLists as unknown as AccessList[],
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ vi.mocked(useSecurityHeaderProfiles).mockReturnValue({
+ data: uuidOnlySecurityProfiles as unknown as SecurityHeaderProfile[],
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ render(
+
+
+
+ )
+
+ await user.type(screen.getByLabelText(/^Name/), 'UUID Test Service')
+ await user.type(screen.getByLabelText(/Domain Names/), 'test.com')
+ await user.type(screen.getByLabelText(/^Host$/), 'localhost')
+ await user.clear(screen.getByLabelText(/^Port$/))
+ await user.type(screen.getByLabelText(/^Port$/), '8080')
+
+ const aclTrigger = screen.getByRole('combobox', { name: /Access Control List/i })
+ await user.click(aclTrigger)
+ await user.click(await screen.findByRole('option', { name: /UUID Office Network/i }))
+
+ const headersTrigger = screen.getByRole('combobox', { name: /Security Headers/i })
+ await user.click(headersTrigger)
+ await user.click(await screen.findByRole('option', { name: /UUID Basic Security/i }))
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('UUID Office Network')
+ expect(screen.getByRole('combobox', { name: /Security Headers/i })).toHaveTextContent('UUID Basic Security')
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalled()
+ })
+ })
+
+ it('submits numeric ACL value when ACL option id is a numeric string', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const stringIdAccessLists = [
+ {
+ ...mockAccessLists[0],
+ id: '2',
+ uuid: 'acl-string-id-2',
+ name: 'String ID ACL',
+ },
+ ]
+
+ vi.mocked(useAccessLists).mockReturnValue({
+ data: stringIdAccessLists as unknown as AccessList[],
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ render(
+
+
+
+ )
+
+ await user.type(screen.getByLabelText(/^Name/), 'String ID ACL Host')
+ await user.type(screen.getByLabelText(/Domain Names/), 'test.com')
+ await user.type(screen.getByLabelText(/^Host$/), 'localhost')
+ await user.clear(screen.getByLabelText(/^Port$/))
+ await user.type(screen.getByLabelText(/^Port$/), '8080')
+
+ await user.click(screen.getByRole('combobox', { name: /Access Control List/i }))
+ await user.click(await screen.findByRole('option', { name: /String ID ACL/i }))
+
+ await user.click(screen.getByRole('combobox', { name: /Security Headers/i }))
+ await user.click(await screen.findByRole('option', { name: /Basic Security/i }))
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({
+ access_list_id: 2,
+ security_header_profile_id: 1,
+ })
+ )
+ })
+ })
+
+ it('initializes edit mode from nested ACL and security header UUID references', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const existingHost = {
+ uuid: 'host-uuid-nested-ref',
+ name: 'Nested Ref Host',
+ domain_names: 'test.com',
+ forward_scheme: 'http',
+ forward_host: 'localhost',
+ forward_port: 8080,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: false,
+ enable_standard_headers: true,
+ application: 'none',
+ advanced_config: '',
+ enabled: true,
+ locations: [],
+ certificate_id: null,
+ access_list_id: null,
+ security_header_profile_id: null,
+ access_list: { uuid: 'acl-uuid-2' },
+ security_header_profile: { uuid: 'profile-uuid-2' },
+ dns_provider_id: null,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ } as unknown as ProxyHost
+
+ render(
+
+
+
+ )
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('VPN Users')
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({
+ access_list_id: 'acl-uuid-2',
+ security_header_profile_id: 'profile-uuid-2',
+ })
+ )
+ })
+ })
+
+ it('normalizes empty and numeric-string ACL/security references on submit', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ const hostWithStringReferences = {
+ uuid: 'host-uuid-string-refs',
+ name: 'String Ref Host',
+ domain_names: 'test.com',
+ forward_scheme: 'http',
+ forward_host: 'localhost',
+ forward_port: 8080,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: false,
+ enable_standard_headers: true,
+ application: 'none',
+ advanced_config: '',
+ enabled: true,
+ locations: [],
+ certificate_id: null,
+ access_list_id: '2',
+ security_header_profile_id: ' ',
+ dns_provider_id: null,
+ created_at: '2024-01-01',
+ updated_at: '2024-01-01',
+ } as unknown as ProxyHost
+
+ render(
+
+
+
+ )
+
+ expect(screen.getByRole('combobox', { name: /Access Control List/i })).toHaveTextContent('VPN Users')
+
+ await user.click(screen.getByRole('button', { name: /Save/i }))
+
+ await waitFor(() => {
+ expect(mockOnSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({
+ access_list_id: 2,
+ security_header_profile_id: null,
+ })
+ )
+ })
+ })
+
+ it('filters out security profiles missing both id and uuid', async () => {
+ const user = userEvent.setup()
+ const Wrapper = createWrapper()
+
+ vi.mocked(useSecurityHeaderProfiles).mockReturnValue({
+ data: [
+ {
+ ...mockSecurityProfiles[0],
+ id: undefined,
+ uuid: undefined,
+ name: 'Broken Profile',
+ },
+ {
+ ...mockSecurityProfiles[1],
+ id: 2,
+ uuid: 'profile-uuid-2',
+ name: 'Strict Security',
+ },
+ ] as unknown as SecurityHeaderProfile[],
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ render(
+
+
+
+ )
+
+ await user.type(screen.getByLabelText(/^Name/), 'Filter Profile Host')
+ await user.type(screen.getByLabelText(/Domain Names/), 'test.com')
+ await user.type(screen.getByLabelText(/^Host$/), 'localhost')
+ await user.clear(screen.getByLabelText(/^Port$/))
+ await user.type(screen.getByLabelText(/^Port$/), '8080')
+
+ await user.click(screen.getByRole('combobox', { name: /Security Headers/i }))
+
+ expect(screen.queryByRole('option', { name: /Broken Profile/i })).not.toBeInTheDocument()
+ expect(screen.getByRole('option', { name: /Strict Security/i })).toBeInTheDocument()
+ })
})
diff --git a/frontend/src/components/__tests__/ProxyHostForm-token-coverage.test.tsx b/frontend/src/components/__tests__/ProxyHostForm-token-coverage.test.tsx
new file mode 100644
index 00000000..a659b8af
--- /dev/null
+++ b/frontend/src/components/__tests__/ProxyHostForm-token-coverage.test.tsx
@@ -0,0 +1,248 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { render, screen, waitFor } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
+import ProxyHostForm from '../ProxyHostForm';
+import type { ProxyHost } from '../../api/proxyHosts';
+
+vi.mock('../../hooks/useRemoteServers', () => ({
+ useRemoteServers: vi.fn(() => ({
+ servers: [],
+ isLoading: false,
+ error: null,
+ })),
+}));
+
+vi.mock('../../hooks/useDocker', () => ({
+ useDocker: vi.fn(() => ({
+ containers: [],
+ isLoading: false,
+ error: null,
+ refetch: vi.fn(),
+ })),
+}));
+
+vi.mock('../../hooks/useDomains', () => ({
+ useDomains: vi.fn(() => ({
+ domains: [{ uuid: 'domain-1', name: 'test.com' }],
+ createDomain: vi.fn().mockResolvedValue({}),
+ isLoading: false,
+ error: null,
+ })),
+}));
+
+vi.mock('../../hooks/useCertificates', () => ({
+ useCertificates: vi.fn(() => ({
+ certificates: [],
+ isLoading: false,
+ error: null,
+ })),
+}));
+
+vi.mock('../../hooks/useDNSDetection', () => ({
+ useDetectDNSProvider: vi.fn(() => ({
+ mutateAsync: vi.fn(),
+ isPending: false,
+ data: undefined,
+ reset: vi.fn(),
+ })),
+}));
+
+vi.mock('../../hooks/useAccessLists', () => ({
+ useAccessLists: vi.fn(() => ({
+ data: [
+ {
+ id: 1,
+ uuid: 'acl-uuid-1',
+ name: 'Office Network',
+ description: 'Office IP range',
+ type: 'whitelist',
+ enabled: true,
+ },
+ ],
+ isLoading: false,
+ error: null,
+ })),
+}));
+
+vi.mock('../../hooks/useSecurityHeaders', () => ({
+ useSecurityHeaderProfiles: vi.fn(() => ({
+ data: [
+ {
+ id: 1,
+ uuid: 'profile-uuid-1',
+ name: 'Basic Security',
+ description: 'Basic security headers',
+ is_preset: true,
+ preset_type: 'basic',
+ security_score: 60,
+ },
+ {
+ id: undefined,
+ uuid: undefined,
+ name: 'Malformed Custom',
+ description: 'Should be skipped in options map',
+ is_preset: false,
+ preset_type: 'custom',
+ security_score: 10,
+ },
+ ],
+ isLoading: false,
+ error: null,
+ })),
+}));
+
+vi.mock('../ui/Select', () => {
+ const findText = (children: React.ReactNode): string => {
+ if (typeof children === 'string') {
+ return children;
+ }
+
+ if (Array.isArray(children)) {
+ return children.map((child) => findText(child)).join(' ');
+ }
+
+ if (children && typeof children === 'object' && 'props' in children) {
+ const node = children as { props?: { children?: React.ReactNode } };
+ return findText(node.props?.children);
+ }
+
+ return '';
+ };
+
+ const Select = ({ value, onValueChange, children }: { value?: string; onValueChange?: (value: string) => void; children?: React.ReactNode }) => {
+ const text = findText(children);
+ const isSecurityHeaders = text.includes('None (No Security Headers)');
+
+ return (
+
+ {isSecurityHeaders && (
+ <>
+
{value}
+
onValueChange?.('42')}>emit-security-plain-numeric
+
onValueChange?.('custom-header-token')}>emit-security-custom
+ >
+ )}
+ {children}
+
+ );
+ };
+
+ const SelectTrigger = ({ children, ...rest }: React.ComponentProps<'button'>) => {children} ;
+ const SelectContent = ({ children }: { children?: React.ReactNode }) => {children}
;
+ const SelectItem = ({ children }: { value: string; children?: React.ReactNode }) => {children}
;
+ const SelectValue = () => ;
+
+ return {
+ Select,
+ SelectTrigger,
+ SelectContent,
+ SelectItem,
+ SelectValue,
+ };
+});
+
+vi.stubGlobal('fetch', vi.fn(() => Promise.resolve({ json: () => Promise.resolve({ internal_ip: '127.0.0.1' }) })));
+
+const createWrapper = () => {
+ const queryClient = new QueryClient({
+ defaultOptions: {
+ queries: { retry: false },
+ mutations: { retry: false },
+ },
+ });
+
+ return ({ children }: { children: React.ReactNode }) => (
+ {children}
+ );
+};
+
+const fillRequiredFields = async () => {
+ await userEvent.type(screen.getByLabelText(/^Name/), 'Coverage Host');
+ await userEvent.type(screen.getByLabelText(/Domain Names/), 'test.com');
+ await userEvent.type(screen.getByLabelText(/^Host$/), 'localhost');
+ await userEvent.clear(screen.getByLabelText(/^Port$/));
+ await userEvent.type(screen.getByLabelText(/^Port$/), '8080');
+};
+
+describe('ProxyHostForm token coverage branches', () => {
+ const onCancel = vi.fn();
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ });
+
+ it('normalizes prefixed and numeric-string security header IDs', async () => {
+ const onSubmit = vi.fn<(data: Partial) => Promise>().mockResolvedValue();
+ const Wrapper = createWrapper();
+
+ const { rerender } = render(
+
+
+
+ );
+
+ expect(screen.getByTestId('security-select-value')).toHaveTextContent('id:7');
+
+ rerender(
+
+
+
+ );
+
+ expect(screen.getByTestId('security-select-value')).toHaveTextContent('id:12');
+ });
+
+ it('converts plain numeric and custom security tokens on submit', async () => {
+ const onSubmit = vi.fn<(data: Partial) => Promise>().mockResolvedValue();
+ const Wrapper = createWrapper();
+
+ render(
+
+
+
+ );
+
+ await fillRequiredFields();
+
+ await userEvent.click(screen.getByRole('button', { name: 'emit-security-plain-numeric' }));
+ await userEvent.click(screen.getByRole('button', { name: /Save/i }));
+
+ await waitFor(() => {
+ expect(onSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({ security_header_profile_id: 42 })
+ );
+ });
+
+ onSubmit.mockClear();
+
+ await userEvent.click(screen.getByRole('button', { name: 'emit-security-custom' }));
+ await userEvent.click(screen.getByRole('button', { name: /Save/i }));
+
+ await waitFor(() => {
+ expect(onSubmit).toHaveBeenCalledWith(
+ expect.objectContaining({ security_header_profile_id: 'custom-header-token' })
+ );
+ });
+ });
+});
diff --git a/frontend/src/components/__tests__/ProxyHostForm-uptime.test.tsx b/frontend/src/components/__tests__/ProxyHostForm-uptime.test.tsx
index 0dd6eacb..5d77e3c5 100644
--- a/frontend/src/components/__tests__/ProxyHostForm-uptime.test.tsx
+++ b/frontend/src/components/__tests__/ProxyHostForm-uptime.test.tsx
@@ -109,4 +109,39 @@ describe('ProxyHostForm Add Uptime flow', () => {
expect(submittedPayload).not.toHaveProperty('uptimeInterval')
expect(submittedPayload).not.toHaveProperty('uptimeMaxRetries')
})
+
+ it('shows uptime sync fallback error toast when monitor request fails with empty string error', async () => {
+ const onSubmit = vi.fn(() => Promise.resolve())
+ const onCancel = vi.fn()
+
+ const uptime = await import('../../api/uptime')
+ const syncMock = uptime.syncMonitors as unknown as import('vitest').Mock
+ syncMock.mockRejectedValueOnce('')
+
+ const toastModule = await import('react-hot-toast')
+ const errorSpy = vi.spyOn(toastModule.toast, 'error')
+
+ const queryClient = new QueryClient({ defaultOptions: { queries: { retry: false } } })
+
+ render(
+
+
+
+ )
+
+ await userEvent.type(screen.getByPlaceholderText('My Service'), 'My Service')
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'example.com')
+ await userEvent.type(screen.getByLabelText(/^Host$/), '127.0.0.1')
+ await userEvent.clear(screen.getByLabelText(/^Port$/))
+ await userEvent.type(screen.getByLabelText(/^Port$/), '8080')
+
+ await userEvent.click(screen.getByLabelText(/Add Uptime monitoring for this host/i))
+ await userEvent.click(screen.getByRole('button', { name: 'Save' }))
+
+ await waitFor(() => {
+ expect(onSubmit).toHaveBeenCalled()
+ expect(syncMock).toHaveBeenCalled()
+ expect(errorSpy).toHaveBeenCalledWith('Failed to request uptime creation')
+ })
+ })
})
diff --git a/frontend/src/components/__tests__/ProxyHostForm.test.tsx b/frontend/src/components/__tests__/ProxyHostForm.test.tsx
index 60ad09f5..c579f072 100644
--- a/frontend/src/components/__tests__/ProxyHostForm.test.tsx
+++ b/frontend/src/components/__tests__/ProxyHostForm.test.tsx
@@ -123,6 +123,13 @@ vi.mock('../../api/proxyHosts', () => ({
testProxyHostConnection: vi.fn(),
}))
+vi.mock('react-hot-toast', () => ({
+ toast: {
+ success: vi.fn(),
+ error: vi.fn(),
+ },
+}))
+
// Mock global fetch for health API
const mockFetch = vi.fn()
vi.stubGlobal('fetch', mockFetch)
@@ -552,6 +559,51 @@ describe('ProxyHostForm', () => {
})
})
+ it('closes preset overwrite modal when cancel is clicked', async () => {
+ const existingHost = {
+ uuid: 'test-uuid',
+ name: 'CancelOverwrite',
+ domain_names: 'test.example.com',
+ forward_scheme: 'http',
+ forward_host: '192.168.1.2',
+ forward_port: 8080,
+ advanced_config: '{"handler":"headers","request":{"set":{"X-Test":"value"}}}',
+ advanced_config_backup: '',
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: false,
+ block_exploits: true,
+ websocket_support: true,
+ application: 'none' as const,
+ locations: [],
+ enabled: true,
+ created_at: '2025-01-01',
+ updated_at: '2025-01-01',
+ }
+
+ renderWithClient(
+
+ )
+
+ await selectComboboxOption(/Application Preset/i, 'Plex - Media server with remote access')
+
+ await waitFor(() => {
+ expect(screen.getByText('Confirm Preset Overwrite')).toBeInTheDocument()
+ })
+
+ const modal = screen.getByText('Confirm Preset Overwrite').closest('div')?.parentElement
+ if (!modal) {
+ throw new Error('Preset overwrite modal not found')
+ }
+
+ await userEvent.click(within(modal).getByRole('button', { name: 'Cancel' }))
+
+ await waitFor(() => {
+ expect(screen.queryByText('Confirm Preset Overwrite')).not.toBeInTheDocument()
+ })
+ })
+
it('restores previous advanced_config from backup when clicking restore', async () => {
const existingHost = {
uuid: 'test-uuid',
@@ -700,6 +752,83 @@ describe('ProxyHostForm', () => {
expect(screen.getByText('Copied!')).toBeInTheDocument()
})
})
+
+ it('copies plex trusted proxy IP helper snippet', async () => {
+ const mockWriteText = vi.fn().mockResolvedValue(undefined)
+ Object.assign(navigator, {
+ clipboard: { writeText: mockWriteText },
+ })
+
+ renderWithClient(
+
+ )
+
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'apps.mydomain.com')
+
+ await selectComboboxOption(/Application Preset/i, 'Plex - Media server with remote access')
+ await userEvent.click(screen.getAllByRole('button', { name: /Copy/i })[1])
+
+ await waitFor(() => {
+ expect(mockWriteText).toHaveBeenCalledWith('192.168.1.50')
+ })
+ })
+
+ it('copies jellyfin trusted proxy IP helper snippet', async () => {
+ const mockWriteText = vi.fn().mockResolvedValue(undefined)
+ Object.assign(navigator, {
+ clipboard: { writeText: mockWriteText },
+ })
+
+ renderWithClient(
+
+ )
+
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'apps.mydomain.com')
+ await selectComboboxOption(/Application Preset/i, 'Jellyfin - Open source media server')
+ await userEvent.click(screen.getByRole('button', { name: /Copy/i }))
+
+ await waitFor(() => {
+ expect(mockWriteText).toHaveBeenCalledWith('192.168.1.50')
+ })
+ })
+
+ it('copies home assistant helper yaml snippet', async () => {
+ const mockWriteText = vi.fn().mockResolvedValue(undefined)
+ Object.assign(navigator, {
+ clipboard: { writeText: mockWriteText },
+ })
+
+ renderWithClient(
+
+ )
+
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'apps.mydomain.com')
+ await selectComboboxOption(/Application Preset/i, 'Home Assistant - Home automation')
+ await userEvent.click(screen.getByRole('button', { name: /Copy/i }))
+
+ await waitFor(() => {
+ expect(mockWriteText).toHaveBeenCalledWith('http:\n use_x_forwarded_for: true\n trusted_proxies:\n - 192.168.1.50')
+ })
+ })
+
+ it('copies nextcloud helper php snippet', async () => {
+ const mockWriteText = vi.fn().mockResolvedValue(undefined)
+ Object.assign(navigator, {
+ clipboard: { writeText: mockWriteText },
+ })
+
+ renderWithClient(
+
+ )
+
+ await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'apps.mydomain.com')
+ await selectComboboxOption(/Application Preset/i, 'Nextcloud - File sync and share')
+ await userEvent.click(screen.getByRole('button', { name: /Copy/i }))
+
+ await waitFor(() => {
+ expect(mockWriteText).toHaveBeenCalledWith("'trusted_proxies' => ['192.168.1.50'],\n'overwriteprotocol' => 'https',")
+ })
+ })
})
describe('Security Options', () => {
@@ -943,6 +1072,85 @@ describe('ProxyHostForm', () => {
await selectComboboxOption(/Security Headers/i, 'Custom Profile (Score: 70/100)')
expect(screen.getByRole('combobox', { name: /Security Headers/i })).toHaveTextContent('Custom Profile')
})
+
+ it('resolves prefixed security header id tokens from existing host values', async () => {
+ const existingHost = {
+ uuid: 'security-token-host',
+ name: 'Token Host',
+ domain_names: 'token.example.com',
+ forward_scheme: 'http',
+ forward_host: '127.0.0.1',
+ forward_port: 80,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: true,
+ application: 'none' as const,
+ locations: [],
+ enabled: true,
+ security_header_profile_id: 'id:100',
+ created_at: '2025-01-01',
+ updated_at: '2025-01-01',
+ }
+
+ renderWithClient(
+
+ )
+
+ expect(screen.getByRole('combobox', { name: /Security Headers/i })).toHaveTextContent('Strict Profile')
+ })
+
+ it('resolves numeric-string security header ids from existing host values', async () => {
+ const existingHost = {
+ uuid: 'security-numeric-host',
+ name: 'Numeric Host',
+ domain_names: 'numeric.example.com',
+ forward_scheme: 'http',
+ forward_host: '127.0.0.1',
+ forward_port: 80,
+ ssl_forced: true,
+ http2_support: true,
+ hsts_enabled: true,
+ hsts_subdomains: true,
+ block_exploits: true,
+ websocket_support: true,
+ application: 'none' as const,
+ locations: [],
+ enabled: true,
+ security_header_profile_id: '100',
+ created_at: '2025-01-01',
+ updated_at: '2025-01-01',
+ }
+
+ renderWithClient(
+
+ )
+
+ expect(screen.getByRole('combobox', { name: /Security Headers/i })).toHaveTextContent('Strict Profile')
+ })
+
+ it('skips non-preset profiles that have neither id nor uuid', async () => {
+ const { useSecurityHeaderProfiles } = await import('../../hooks/useSecurityHeaders')
+ vi.mocked(useSecurityHeaderProfiles).mockReturnValue({
+ data: [
+ { id: 100, name: 'Strict Profile', description: 'Very strict', security_score: 90, is_preset: true, preset_type: 'strict' },
+ { name: 'Invalid Custom', description: 'No identity token', security_score: 10, is_preset: false },
+ ],
+ isLoading: false,
+ error: null,
+ } as unknown as ReturnType)
+
+ renderWithClient(
+
+ )
+
+ await userEvent.click(screen.getByRole('combobox', { name: /Security Headers/i }))
+
+ expect(screen.queryByRole('option', { name: /Invalid Custom/i })).not.toBeInTheDocument()
+ })
+
})
describe('Edit Mode vs Create Mode', () => {
@@ -1232,12 +1440,17 @@ describe('ProxyHostForm', () => {
)
- await userEvent.type(screen.getByLabelText(/^Name/), 'Remote Mapping')
- await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'remote.existing.com')
+ fireEvent.change(screen.getByLabelText(/^Name/), { target: { value: 'Remote Mapping' } })
+ fireEvent.change(screen.getByPlaceholderText('example.com, www.example.com'), { target: { value: 'remote.existing.com' } })
await selectComboboxOption('Source', 'Local Docker Registry (localhost)')
await selectComboboxOption('Containers', 'remote-app (nginx:latest)')
+ await waitFor(() => {
+ expect(screen.getByLabelText(/^Host$/)).toHaveValue('localhost')
+ expect(screen.getByLabelText(/^Port$/)).toHaveValue(18080)
+ })
+
await userEvent.click(screen.getByText('Save'))
await waitFor(() => {
@@ -1246,6 +1459,55 @@ describe('ProxyHostForm', () => {
forward_port: 18080,
}))
})
+ }, 15000)
+
+ it('updates domain using selected container when base domain changes', async () => {
+ const { useDocker } = await import('../../hooks/useDocker')
+ vi.mocked(useDocker).mockReturnValue({
+ containers: [
+ {
+ id: 'container-123',
+ names: ['my-app'],
+ image: 'nginx:latest',
+ state: 'running',
+ status: 'Up 2 hours',
+ network: 'bridge',
+ ip: '172.17.0.2',
+ ports: [{ private_port: 80, public_port: 8080, type: 'tcp' }],
+ },
+ ],
+ isLoading: false,
+ error: null,
+ refetch: vi.fn(),
+ })
+
+ await renderWithClientAct(
+
+ )
+
+ await selectComboboxOption('Source', 'Local (Docker Socket)')
+ await selectComboboxOption('Containers', 'my-app (nginx:latest)')
+ await selectComboboxOption(/Base Domain/i, 'existing.com')
+
+ expect(screen.getByLabelText(/Domain Names/i)).toHaveValue('my-app.existing.com')
+ })
+
+ it('prompts to save a new base domain when user enters a base domain directly', async () => {
+ localStorage.removeItem('charon_dont_ask_domain')
+ localStorage.removeItem('cpmp_dont_ask_domain')
+
+ await renderWithClientAct(
+
+ )
+
+ const domainInput = screen.getByPlaceholderText('example.com, www.example.com')
+ await userEvent.type(domainInput, 'brandnewdomain.com')
+ await userEvent.tab()
+
+ await waitFor(() => {
+ expect(screen.getByText('New Base Domain Detected')).toBeInTheDocument()
+ expect(screen.getByText('brandnewdomain.com')).toBeInTheDocument()
+ })
})
})
@@ -1343,4 +1605,32 @@ describe('ProxyHostForm', () => {
})
})
})
+
+ describe('Docker Connection Failed troubleshooting', () => {
+ it('renders supplemental group guidance when docker error is present', async () => {
+ const { useDocker } = await import('../../hooks/useDocker')
+ vi.mocked(useDocker).mockReturnValue({
+ containers: [],
+ isLoading: false,
+ error: new Error('Docker socket permission denied'),
+ refetch: vi.fn(),
+ })
+
+ await renderWithClientAct(
+
+ )
+
+ // Select Local Docker Socket source to trigger error panel
+ await selectComboboxOption('Source', 'Local (Docker Socket)')
+
+ await waitFor(() => {
+ expect(screen.getByText('Docker Connection Failed')).toBeInTheDocument()
+ })
+
+ expect(screen.getByText(/Troubleshooting:/)).toBeInTheDocument()
+ expect(screen.getByText(/Docker socket group/)).toBeInTheDocument()
+ expect(screen.getByText('group_add')).toBeInTheDocument()
+ expect(screen.getByText('--group-add')).toBeInTheDocument()
+ })
+ })
})
diff --git a/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx b/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx
index 61d09a15..52cb1c68 100644
--- a/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx
+++ b/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx
@@ -78,14 +78,15 @@ describe('Security Notification Settings on Notifications page', () => {
expect(document.querySelector('.fixed.inset-0')).toBeNull();
});
- it('keeps provider setup focused on the Discord webhook flow', async () => {
+ it('defaults to Discord webhook flow while exposing supported provider modes', async () => {
const user = userEvent.setup();
renderPage();
await user.click(await screen.findByTestId('add-provider-btn'));
const typeSelect = screen.getByTestId('provider-type') as HTMLSelectElement;
- expect(Array.from(typeSelect.options).map((option) => option.value)).toEqual(['discord']);
+ expect(Array.from(typeSelect.options).map((option) => option.value)).toEqual(['discord', 'gotify', 'webhook']);
+ expect(typeSelect.value).toBe('discord');
const webhookInput = screen.getByTestId('provider-url') as HTMLInputElement;
expect(webhookInput.placeholder).toContain('discord.com/api/webhooks');
diff --git a/frontend/src/components/ui/Select.tsx b/frontend/src/components/ui/Select.tsx
index 6f453f63..1b0893e0 100644
--- a/frontend/src/components/ui/Select.tsx
+++ b/frontend/src/components/ui/Select.tsx
@@ -83,7 +83,7 @@ const SelectContent = React.forwardRef<
= ({ children }) => {
}
}, [fetchSessionUser]);
- const logout = async () => {
+ const logout = useCallback(async () => {
invalidateAuthRequests();
localStorage.removeItem('charon_auth_token');
setAuthToken(null);
@@ -121,7 +121,7 @@ export const AuthProvider: FC<{ children: ReactNode }> = ({ children }) => {
} catch (error) {
console.error("Logout failed", error);
}
- };
+ }, [invalidateAuthRequests]);
const changePassword = async (oldPassword: string, newPassword: string) => {
try {
@@ -174,7 +174,7 @@ export const AuthProvider: FC<{ children: ReactNode }> = ({ children }) => {
window.removeEventListener(event, handleActivity);
});
};
- }, [user]);
+ }, [user, logout]);
return (
diff --git a/frontend/src/hooks/__tests__/useDocker.test.tsx b/frontend/src/hooks/__tests__/useDocker.test.tsx
index fe48c6fe..5ae6321d 100644
--- a/frontend/src/hooks/__tests__/useDocker.test.tsx
+++ b/frontend/src/hooks/__tests__/useDocker.test.tsx
@@ -152,6 +152,35 @@ describe('useDocker', () => {
expect(errorMessage).toContain('Docker is running');
});
+ it('extracts supplemental-group details from 503 error', async () => {
+ const mockError = {
+ response: {
+ status: 503,
+ data: {
+ error: 'Docker daemon unavailable',
+ details: 'Process groups do not include socket gid 988; run container with matching supplemental group (e.g., --group-add 988).'
+ }
+ }
+ };
+ vi.mocked(dockerApi.listContainers).mockRejectedValue(mockError);
+
+ const { result } = renderHook(() => useDocker('local'), {
+ wrapper: createWrapper(),
+ });
+
+ await waitFor(
+ () => {
+ expect(result.current.isLoading).toBe(false);
+ },
+ { timeout: 3000 }
+ );
+
+ expect(result.current.error).toBeTruthy();
+ const errorMessage = (result.current.error as Error)?.message;
+ expect(errorMessage).toContain('--group-add');
+ expect(errorMessage).toContain('supplemental group');
+ });
+
it('provides refetch function', async () => {
vi.mocked(dockerApi.listContainers).mockResolvedValue(mockContainers);
diff --git a/frontend/src/locales/de/translation.json b/frontend/src/locales/de/translation.json
index 33af5ccb..e40b3da1 100644
--- a/frontend/src/locales/de/translation.json
+++ b/frontend/src/locales/de/translation.json
@@ -423,7 +423,9 @@
"triggerCheck": "Sofortige Gesundheitsprüfung auslösen",
"healthCheckTriggered": "Gesundheitsprüfung ausgelöst",
"monitorDeleted": "Monitor gelöscht",
- "deleteConfirm": "Diesen Monitor löschen? Dies kann nicht rückgängig gemacht werden."
+ "deleteConfirm": "Diesen Monitor löschen? Dies kann nicht rückgängig gemacht werden.",
+ "pending": "PRÜFUNG...",
+ "pendingFirstCheck": "Warten auf erste Prüfung..."
},
"domains": {
"title": "Domänen",
@@ -768,6 +770,13 @@
"newTab": "Neuer Tab (Standard)",
"newWindow": "Neues Fenster",
"domainLinkBehaviorHelper": "Steuern Sie, wie Domain-Links in der Proxy-Hosts-Liste geöffnet werden.",
+ "keepaliveIdle": "Keepalive Idle (Optional)",
+ "keepaliveIdleHelper": "Optionale Caddy-Dauer (z. B. 2m, 30s). Leer lassen, um Backend-Standardwerte zu verwenden.",
+ "keepaliveIdleError": "Geben Sie eine gültige Dauer ein (z. B. 30s, 2m, 1h).",
+ "keepaliveCount": "Keepalive Count (Optional)",
+ "keepaliveCountHelper": "Optionale maximale Keepalive-Tests (1-1000). Leer lassen, um Backend-Standardwerte zu verwenden.",
+ "keepaliveCountError": "Geben Sie eine ganze Zahl zwischen 1 und 1000 ein.",
+ "keepaliveValidationFailed": "Keepalive-Einstellungen enthalten ungültige Werte.",
"languageHelper": "Wählen Sie Ihre bevorzugte Sprache. Änderungen werden sofort wirksam."
},
"applicationUrl": {
diff --git a/frontend/src/locales/en/translation.json b/frontend/src/locales/en/translation.json
index fb769b1d..04eca004 100644
--- a/frontend/src/locales/en/translation.json
+++ b/frontend/src/locales/en/translation.json
@@ -498,7 +498,9 @@
"monitorUrl": "URL",
"monitorTypeHttp": "HTTP",
"monitorTypeTcp": "TCP",
- "urlPlaceholder": "https://example.com or tcp://host:port"
+ "urlPlaceholder": "https://example.com or tcp://host:port",
+ "pending": "CHECKING...",
+ "pendingFirstCheck": "Waiting for first check..."
},
"domains": {
"title": "Domains",
@@ -542,6 +544,11 @@
"providerName": "Name",
"urlWebhook": "URL / Webhook",
"urlRequired": "URL is required",
+ "gotifyToken": "Gotify Token",
+ "gotifyTokenPlaceholder": "Enter new token",
+ "gotifyTokenWriteOnlyHint": "Token is write-only and only sent on save.",
+ "gotifyTokenStored": "Token saved. Leave blank to keep current token.",
+ "gotifyTokenKeepPlaceholder": "Leave blank to keep current token",
"invalidUrl": "Please enter a valid URL starting with http:// or https://",
"genericWebhook": "Generic Webhook",
"customWebhook": "Custom Webhook (JSON)",
@@ -876,6 +883,13 @@
"newTab": "New Tab (Default)",
"newWindow": "New Window",
"domainLinkBehaviorHelper": "Control how domain links open in the Proxy Hosts list.",
+ "keepaliveIdle": "Keepalive Idle (Optional)",
+ "keepaliveIdleHelper": "Optional Caddy duration (e.g., 2m, 30s). Leave blank to keep backend defaults.",
+ "keepaliveIdleError": "Enter a valid duration (for example: 30s, 2m, 1h).",
+ "keepaliveCount": "Keepalive Count (Optional)",
+ "keepaliveCountHelper": "Optional max keepalive probes (1-1000). Leave blank to keep backend defaults.",
+ "keepaliveCountError": "Enter a whole number between 1 and 1000.",
+ "keepaliveValidationFailed": "Keepalive settings contain invalid values.",
"languageHelper": "Select your preferred language. Changes take effect immediately."
},
"applicationUrl": {
diff --git a/frontend/src/locales/es/translation.json b/frontend/src/locales/es/translation.json
index d30ca0f2..a9067bbe 100644
--- a/frontend/src/locales/es/translation.json
+++ b/frontend/src/locales/es/translation.json
@@ -423,7 +423,9 @@
"triggerCheck": "Activar verificación de salud inmediata",
"healthCheckTriggered": "Verificación de salud activada",
"monitorDeleted": "Monitor eliminado",
- "deleteConfirm": "¿Eliminar este monitor? Esto no se puede deshacer."
+ "deleteConfirm": "¿Eliminar este monitor? Esto no se puede deshacer.",
+ "pending": "VERIFICANDO...",
+ "pendingFirstCheck": "Esperando primera verificación..."
},
"domains": {
"title": "Dominios",
@@ -768,6 +770,13 @@
"newTab": "Nueva Pestaña (Por defecto)",
"newWindow": "Nueva Ventana",
"domainLinkBehaviorHelper": "Controla cómo se abren los enlaces de dominio en la lista de Hosts Proxy.",
+ "keepaliveIdle": "Keepalive Idle (Opcional)",
+ "keepaliveIdleHelper": "Duración opcional de Caddy (por ejemplo, 2m, 30s). Déjelo vacío para mantener los valores predeterminados del backend.",
+ "keepaliveIdleError": "Ingrese una duración válida (por ejemplo: 30s, 2m, 1h).",
+ "keepaliveCount": "Keepalive Count (Opcional)",
+ "keepaliveCountHelper": "Número máximo opcional de sondeos keepalive (1-1000). Déjelo vacío para mantener los valores predeterminados del backend.",
+ "keepaliveCountError": "Ingrese un número entero entre 1 y 1000.",
+ "keepaliveValidationFailed": "La configuración de keepalive contiene valores no válidos.",
"languageHelper": "Selecciona tu idioma preferido. Los cambios surten efecto inmediatamente."
}, "applicationUrl": {
"title": "URL de aplicación",
diff --git a/frontend/src/locales/fr/translation.json b/frontend/src/locales/fr/translation.json
index ab379313..525cec3f 100644
--- a/frontend/src/locales/fr/translation.json
+++ b/frontend/src/locales/fr/translation.json
@@ -423,7 +423,9 @@
"triggerCheck": "Déclencher une vérification de santé immédiate",
"healthCheckTriggered": "Vérification de santé déclenchée",
"monitorDeleted": "Moniteur supprimé",
- "deleteConfirm": "Supprimer ce moniteur? Cette action est irréversible."
+ "deleteConfirm": "Supprimer ce moniteur? Cette action est irréversible.",
+ "pending": "VÉRIFICATION...",
+ "pendingFirstCheck": "En attente de la première vérification..."
},
"domains": {
"title": "Domaines",
@@ -768,6 +770,13 @@
"newTab": "Nouvel Onglet (Par défaut)",
"newWindow": "Nouvelle Fenêtre",
"domainLinkBehaviorHelper": "Contrôle comment les liens de domaine s'ouvrent dans la liste des Hôtes Proxy.",
+ "keepaliveIdle": "Keepalive Idle (Optionnel)",
+ "keepaliveIdleHelper": "Durée Caddy optionnelle (ex. 2m, 30s). Laissez vide pour conserver les valeurs par défaut du backend.",
+ "keepaliveIdleError": "Entrez une durée valide (par exemple : 30s, 2m, 1h).",
+ "keepaliveCount": "Keepalive Count (Optionnel)",
+ "keepaliveCountHelper": "Nombre maximal optionnel de sondes keepalive (1-1000). Laissez vide pour conserver les valeurs par défaut du backend.",
+ "keepaliveCountError": "Entrez un nombre entier entre 1 et 1000.",
+ "keepaliveValidationFailed": "Les paramètres keepalive contiennent des valeurs invalides.",
"languageHelper": "Sélectionnez votre langue préférée. Les modifications prennent effet immédiatement."
}, "applicationUrl": {
"title": "URL de l'application",
diff --git a/frontend/src/locales/zh/translation.json b/frontend/src/locales/zh/translation.json
index b74471c4..885d64b9 100644
--- a/frontend/src/locales/zh/translation.json
+++ b/frontend/src/locales/zh/translation.json
@@ -423,7 +423,9 @@
"triggerCheck": "触发即时健康检查",
"healthCheckTriggered": "健康检查已触发",
"monitorDeleted": "监控器已删除",
- "deleteConfirm": "删除此监控器?此操作无法撤销。"
+ "deleteConfirm": "删除此监控器?此操作无法撤销。",
+ "pending": "检查中...",
+ "pendingFirstCheck": "等待首次检查..."
},
"domains": {
"title": "域名",
@@ -768,6 +770,13 @@
"newTab": "新标签页(默认)",
"newWindow": "新窗口",
"domainLinkBehaviorHelper": "控制代理主机列表中的域名链接如何打开。",
+ "keepaliveIdle": "Keepalive Idle(可选)",
+ "keepaliveIdleHelper": "可选的 Caddy 时长(例如 2m、30s)。留空可使用后端默认值。",
+ "keepaliveIdleError": "请输入有效时长(例如:30s、2m、1h)。",
+ "keepaliveCount": "Keepalive Count(可选)",
+ "keepaliveCountHelper": "可选的 keepalive 最大探测次数(1-1000)。留空可使用后端默认值。",
+ "keepaliveCountError": "请输入 1 到 1000 之间的整数。",
+ "keepaliveValidationFailed": "keepalive 设置包含无效值。",
"languageHelper": "选择您的首选语言。更改立即生效。"
},
"applicationUrl": {
diff --git a/frontend/src/pages/Account.tsx b/frontend/src/pages/Account.tsx
index fa621ee3..571dde00 100644
--- a/frontend/src/pages/Account.tsx
+++ b/frontend/src/pages/Account.tsx
@@ -11,7 +11,7 @@ import { Skeleton } from '../components/ui/Skeleton'
import { toast } from '../utils/toast'
import { getProfile, regenerateApiKey, updateProfile } from '../api/user'
import { getSettings, updateSetting } from '../api/settings'
-import { Copy, RefreshCw, Shield, Mail, User, AlertTriangle, Key } from 'lucide-react'
+import { RefreshCw, Shield, Mail, User, AlertTriangle, Key } from 'lucide-react'
import { PasswordStrengthMeter } from '../components/PasswordStrengthMeter'
import { isValidEmail } from '../utils/validation'
import { useAuth } from '../hooks/useAuth'
@@ -242,13 +242,6 @@ export default function Account() {
}
}
- const copyApiKey = () => {
- if (profile?.api_key) {
- navigator.clipboard.writeText(profile.api_key)
- toast.success(t('account.apiKeyCopied'))
- }
- }
-
if (isLoadingProfile) {
return (
@@ -444,13 +437,10 @@ export default function Account() {
-
-
-
{
- if (!providerType) return false;
- return providerType.toLowerCase() === DISCORD_PROVIDER_TYPE;
-};
-
-const isNonDiscordProvider = (providerType: string | undefined): boolean => {
+const isSupportedProviderType = (providerType: string | undefined): providerType is SupportedNotificationProviderType => {
if (!providerType) {
return false;
}
- return providerType.toLowerCase() !== DISCORD_PROVIDER_TYPE;
+ return SUPPORTED_NOTIFICATION_PROVIDER_TYPES.includes(providerType.toLowerCase() as SupportedNotificationProviderType);
};
-const normalizeProviderType = (providerType: string | undefined): typeof DISCORD_PROVIDER_TYPE => {
- if (!providerType || providerType.toLowerCase() !== DISCORD_PROVIDER_TYPE) {
+// supportsJSONTemplates returns true if the provider type can use JSON templates
+const supportsJSONTemplates = (providerType: string | undefined): boolean => {
+ if (!providerType) return false;
+ const t = providerType.toLowerCase();
+ return t === 'discord' || t === 'gotify' || t === 'webhook';
+};
+
+const isUnsupportedProviderType = (providerType: string | undefined): boolean => !isSupportedProviderType(providerType);
+
+const normalizeProviderType = (providerType: string | undefined): SupportedNotificationProviderType => {
+ if (!isSupportedProviderType(providerType)) {
return DISCORD_PROVIDER_TYPE;
}
- return DISCORD_PROVIDER_TYPE;
+ return providerType.toLowerCase() as SupportedNotificationProviderType;
+};
+
+const normalizeProviderPayloadForSubmit = (data: Partial): Partial => {
+ const type = normalizeProviderType(data.type);
+ const payload: Partial = {
+ ...data,
+ type,
+ };
+
+ if (type === 'gotify') {
+ const normalizedToken = typeof payload.gotify_token === 'string' ? payload.gotify_token.trim() : '';
+
+ if (normalizedToken.length > 0) {
+ payload.token = normalizedToken;
+ } else {
+ delete payload.token;
+ }
+ } else {
+ delete payload.token;
+ }
+
+ delete payload.gotify_token;
+ return payload;
};
const defaultProviderValues: Partial = {
type: DISCORD_PROVIDER_TYPE,
enabled: true,
config: '',
+ gotify_token: '',
template: 'minimal',
notify_proxy_hosts: true,
notify_remote_servers: true,
@@ -64,7 +91,7 @@ const ProviderForm: FC<{
useEffect(() => {
// Reset form state per open/edit to avoid event checkbox leakage between runs.
const normalizedInitialData = initialData
- ? { ...defaultProviderValues, ...initialData, type: normalizeProviderType(initialData.type) }
+ ? { ...defaultProviderValues, ...initialData, type: normalizeProviderType(initialData.type), gotify_token: '' }
: defaultProviderValues;
reset(normalizedInitialData);
@@ -79,15 +106,16 @@ const ProviderForm: FC<{
setTestStatus('success');
setTimeout(() => setTestStatus('idle'), 3000);
},
- onError: () => {
+ onError: (err: Error) => {
setTestStatus('error');
+ toast.error(err.message || t('notificationProviders.testFailed'));
setTimeout(() => setTestStatus('idle'), 3000);
}
});
const handleTest = () => {
const formData = watch();
- testMutation.mutate({ ...formData, type: DISCORD_PROVIDER_TYPE } as Partial);
+ testMutation.mutate({ ...formData, type: normalizeProviderType(formData.type) } as Partial);
};
const handlePreview = async () => {
@@ -100,7 +128,7 @@ const ProviderForm: FC<{
const res = await previewExternalTemplate(formData.template, undefined, undefined);
if (res.parsed) setPreviewContent(JSON.stringify(res.parsed, null, 2)); else setPreviewContent(res.rendered);
} else {
- const res = await previewProvider({ ...formData, type: DISCORD_PROVIDER_TYPE } as Partial);
+ const res = await previewProvider({ ...formData, type: normalizeProviderType(formData.type) } as Partial);
if (res.parsed) setPreviewContent(JSON.stringify(res.parsed, null, 2)); else setPreviewContent(res.rendered);
}
} catch (err: unknown) {
@@ -109,10 +137,11 @@ const ProviderForm: FC<{
}
};
- const type = watch('type');
+ const type = normalizeProviderType(watch('type'));
+ const isGotify = type === 'gotify';
useEffect(() => {
- if (type !== DISCORD_PROVIDER_TYPE) {
- setValue('type', DISCORD_PROVIDER_TYPE, { shouldDirty: false, shouldTouch: false });
+ if (type !== 'gotify') {
+ setValue('gotify_token', '', { shouldDirty: false, shouldTouch: false });
}
}, [type, setValue]);
@@ -141,9 +170,9 @@ const ProviderForm: FC<{
};
return (
-