diff --git a/.docker/compose/docker-compose.local.yml b/.docker/compose/docker-compose.local.yml index a7c0f73d..162cca22 100644 --- a/.docker/compose/docker-compose.local.yml +++ b/.docker/compose/docker-compose.local.yml @@ -47,7 +47,7 @@ services: # - :/import/Caddyfile:ro # - :/import/sites:ro # If your Caddyfile imports other files healthcheck: - test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"] + test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"] interval: 30s timeout: 10s retries: 3 diff --git a/.docker/compose/docker-compose.playwright-ci.yml b/.docker/compose/docker-compose.playwright-ci.yml index 94e7d5a3..bc3f80b7 100644 --- a/.docker/compose/docker-compose.playwright-ci.yml +++ b/.docker/compose/docker-compose.playwright-ci.yml @@ -87,7 +87,7 @@ services: - playwright_caddy_config:/config - /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests healthcheck: - test: ["CMD", "curl", "-sf", "http://localhost:8080/api/v1/health"] + test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"] interval: 5s timeout: 3s retries: 12 diff --git a/.docker/compose/docker-compose.playwright-local.yml b/.docker/compose/docker-compose.playwright-local.yml index 735fe6b6..f25f7488 100644 --- a/.docker/compose/docker-compose.playwright-local.yml +++ b/.docker/compose/docker-compose.playwright-local.yml @@ -48,11 +48,12 @@ services: tmpfs: # True tmpfs for E2E test data - fresh on every run, in-memory only # mode=1777 allows any user to write (container runs as non-root) - - /app/data:size=100M,mode=1777 + # 256M gives headroom for the backup service's 100MB disk-space check + - /app/data:size=256M,mode=1777 volumes: - /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests healthcheck: - test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"] + test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"] interval: 5s timeout: 5s retries: 10 diff --git a/.docker/compose/docker-compose.yml b/.docker/compose/docker-compose.yml index 852e83a5..e7d9d3fa 100644 --- a/.docker/compose/docker-compose.yml +++ b/.docker/compose/docker-compose.yml @@ -52,7 +52,7 @@ services: # - ./my-existing-Caddyfile:/import/Caddyfile:ro # - ./sites:/import/sites:ro # If your Caddyfile imports other files healthcheck: - test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"] + test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"] interval: 30s timeout: 10s retries: 3 diff --git a/.docker/docker-entrypoint.sh b/.docker/docker-entrypoint.sh index a5e74e7e..cf794707 100755 --- a/.docker/docker-entrypoint.sh +++ b/.docker/docker-entrypoint.sh @@ -365,7 +365,7 @@ echo "Caddy started (PID: $CADDY_PID)" echo "Waiting for Caddy admin API..." i=1 while [ "$i" -le 30 ]; do - if curl -sf http://127.0.0.1:2019/config/ > /dev/null 2>&1; then + if wget -qO /dev/null http://127.0.0.1:2019/config/ 2>/dev/null; then echo "Caddy is ready!" break fi diff --git a/.github/agents/Backend_Dev.agent.md b/.github/agents/Backend_Dev.agent.md index cebe76c0..7589fb70 100644 --- a/.github/agents/Backend_Dev.agent.md +++ b/.github/agents/Backend_Dev.agent.md @@ -2,7 +2,8 @@ name: 'Backend Dev' description: 'Senior Go Engineer focused on high-performance, secure backend implementation.' argument-hint: 'The specific backend task from the Plan (e.g., "Implement ProxyHost CRUD endpoints")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode diff --git a/.github/agents/DevOps.agent.md b/.github/agents/DevOps.agent.md index 68dd8b40..216a0b64 100644 --- a/.github/agents/DevOps.agent.md +++ b/.github/agents/DevOps.agent.md @@ -2,7 +2,8 @@ name: 'DevOps' description: 'DevOps specialist for CI/CD pipelines, deployment debugging, and GitOps workflows focused on making deployments boring and reliable' argument-hint: 'The CI/CD or infrastructure task (e.g., "Debug failing GitHub Action workflow")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode user-invocable: true diff --git a/.github/agents/Doc_Writer.agent.md b/.github/agents/Doc_Writer.agent.md index 38c5e1f2..ff7f7949 100644 --- a/.github/agents/Doc_Writer.agent.md +++ b/.github/agents/Doc_Writer.agent.md @@ -2,7 +2,8 @@ name: 'Docs Writer' description: 'User Advocate and Writer focused on creating simple, layman-friendly documentation.' argument-hint: 'The feature to document (e.g., "Write the guide for the new Real-Time Logs")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode user-invocable: true diff --git a/.github/agents/Frontend_Dev.agent.md b/.github/agents/Frontend_Dev.agent.md index 6ba7b4ae..be80075b 100644 --- a/.github/agents/Frontend_Dev.agent.md +++ b/.github/agents/Frontend_Dev.agent.md @@ -2,7 +2,8 @@ name: 'Frontend Dev' description: 'Senior React/TypeScript Engineer for frontend implementation.' argument-hint: 'The frontend feature or component to implement (e.g., "Implement the Real-Time Logs dashboard component")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode diff --git a/.github/agents/Management.agent.md b/.github/agents/Management.agent.md index 7830ef3a..9d1be657 100644 --- a/.github/agents/Management.agent.md +++ b/.github/agents/Management.agent.md @@ -73,6 +73,7 @@ You are "lazy" in the smartest way possible. You never do what a subordinate can - **Supervisor**: Call `Supervisor` to review the implementation against the plan. Provide feedback and ensure alignment with best practices. 6. **Phase 6: Audit**: + - Review Security: Read `security.md.instrutctions.md` and `SECURITY.md` to understand the security requirements and best practices for Charon. Ensure that any open concerns or issues are addressed in the QA Audit and `SECURITY.md` is updated accordingly. - **QA**: Call `QA_Security` to meticulously test current implementation as well as regression test. Run all linting, security tasks, and manual lefthook checks. Write a report to `docs/reports/qa_report.md`. Start back at Phase 1 if issues are found. 7. **Phase 7: Closure**: diff --git a/.github/agents/Planning.agent.md b/.github/agents/Planning.agent.md index 773f4d32..561e9fdf 100644 --- a/.github/agents/Planning.agent.md +++ b/.github/agents/Planning.agent.md @@ -2,7 +2,8 @@ name: 'Planning' description: 'Principal Architect for technical planning and design decisions.' argument-hint: 'The feature or system to plan (e.g., "Design the architecture for Real-Time Logs")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode diff --git a/.github/agents/Playwright_Dev.agent.md b/.github/agents/Playwright_Dev.agent.md index 657e8c40..dc402feb 100644 --- a/.github/agents/Playwright_Dev.agent.md +++ b/.github/agents/Playwright_Dev.agent.md @@ -3,7 +3,8 @@ name: 'Playwright Dev' description: 'E2E Testing Specialist for Playwright test automation.' argument-hint: 'The feature or flow to test (e.g., "Write E2E tests for the login flow")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode diff --git a/.github/agents/QA_Security.agent.md b/.github/agents/QA_Security.agent.md index 8dc46d54..213bbf0f 100644 --- a/.github/agents/QA_Security.agent.md +++ b/.github/agents/QA_Security.agent.md @@ -2,7 +2,8 @@ name: 'QA Security' description: 'Quality Assurance and Security Engineer for testing and vulnerability assessment.' argument-hint: 'The component or feature to test (e.g., "Run security scan on authentication endpoints")' -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo + target: vscode @@ -44,6 +45,7 @@ You are a QA AND SECURITY ENGINEER responsible for testing and vulnerability ass - Review test failure outputs with `test_failure` tool 4. **Security Scanning**: + - - Review Security: Read `security.md.instrutctions.md` and `SECURITY.md` to understand the security requirements and best practices for Charon. Ensure that any open concerns or issues are addressed in the QA Audit and `SECURITY.md` is updated accordingly. - **Conditional GORM Scan**: When backend model/database-related changes are in scope (`backend/internal/models/**`, GORM services, migrations), run GORM scanner in check mode and report pass/fail as DoD gate: diff --git a/.github/agents/Supervisor.agent.md b/.github/agents/Supervisor.agent.md index a0a51203..e510b818 100644 --- a/.github/agents/Supervisor.agent.md +++ b/.github/agents/Supervisor.agent.md @@ -2,8 +2,8 @@ name: 'Supervisor' description: 'Code Review Lead for quality assurance and PR review.' argument-hint: 'The PR or code change to review (e.g., "Review PR #123 for security issues")' +tools: vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, vscode/extensions, vscode/askQuestions, execute, read, edit, search, web, browser, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, playwright/*, github/*, io.github.goreleaser/mcp/*, mcp-refactor-typescript/*, microsoftdocs/mcp/*, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment, todo -tools: vscode/extensions, vscode/getProjectSetupInfo, vscode/installExtension, vscode/memory, vscode/runCommand, vscode/vscodeAPI, execute/getTerminalOutput, execute/awaitTerminal, execute/killTerminal, execute/runTask, execute/createAndRunTask, execute/runTests, execute/runNotebookCell, execute/testFailure, execute/runInTerminal, read/terminalSelection, read/terminalLastCommand, read/getTaskOutput, read/getNotebookSummary, read/problems, read/readFile, read/readNotebookCellOutput, vscode/askQuestions, agent/runSubagent, browser/openBrowserPage, edit/createDirectory, edit/createFile, edit/createJupyterNotebook, edit/editFiles, edit/editNotebook, edit/rename, search/changes, search/codebase, search/fileSearch, search/listDirectory, search/searchResults, search/textSearch, search/searchSubagent, search/usages, web/fetch, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_comment_to_pending_review, github/add_issue_comment, github/add_reply_to_pull_request_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_pull_request_with_copilot, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_copilot_job_status, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, io.github.goreleaser/mcp/check, playwright/browser_click, playwright/browser_close, playwright/browser_console_messages, playwright/browser_drag, playwright/browser_evaluate, playwright/browser_file_upload, playwright/browser_fill_form, playwright/browser_handle_dialog, playwright/browser_hover, playwright/browser_install, playwright/browser_navigate, playwright/browser_navigate_back, playwright/browser_network_requests, playwright/browser_press_key, playwright/browser_resize, playwright/browser_run_code, playwright/browser_select_option, playwright/browser_snapshot, playwright/browser_tabs, playwright/browser_take_screenshot, playwright/browser_type, playwright/browser_wait_for, github/add_comment_to_pending_review, github/add_issue_comment, github/assign_copilot_to_issue, github/create_branch, github/create_or_update_file, github/create_pull_request, github/create_repository, github/delete_file, github/fork_repository, github/get_commit, github/get_file_contents, github/get_label, github/get_latest_release, github/get_me, github/get_release_by_tag, github/get_tag, github/get_team_members, github/get_teams, github/issue_read, github/issue_write, github/list_branches, github/list_commits, github/list_issue_types, github/list_issues, github/list_pull_requests, github/list_releases, github/list_tags, github/merge_pull_request, github/pull_request_read, github/pull_request_review_write, github/push_files, github/request_copilot_review, github/search_code, github/search_issues, github/search_pull_requests, github/search_repositories, github/search_users, github/sub_issue_write, github/update_pull_request, github/update_pull_request_branch, github/add_reply_to_pull_request_comment, github/create_pull_request_with_copilot, github/get_copilot_job_status, microsoftdocs/mcp/microsoft_code_sample_search, microsoftdocs/mcp/microsoft_docs_fetch, microsoftdocs/mcp/microsoft_docs_search, mcp-refactor-typescript/code_quality, mcp-refactor-typescript/file_operations, mcp-refactor-typescript/refactoring, mcp-refactor-typescript/workspace, todo, vscode.mermaid-chat-features/renderMermaidDiagram, github.vscode-pull-request-github/issue_fetch, github.vscode-pull-request-github/labels_fetch, github.vscode-pull-request-github/notification_fetch, github.vscode-pull-request-github/doSearch, github.vscode-pull-request-github/activePullRequest, github.vscode-pull-request-github/pullRequestStatusChecks, github.vscode-pull-request-github/openPullRequest, ms-azuretools.vscode-containers/containerToolsConfig, ms-python.python/getPythonEnvironmentInfo, ms-python.python/getPythonExecutableCommand, ms-python.python/installPythonPackage, ms-python.python/configurePythonEnvironment target: vscode user-invocable: true diff --git a/.github/instructions/security.md.instructions.md b/.github/instructions/security.md.instructions.md new file mode 100644 index 00000000..34e1b1df --- /dev/null +++ b/.github/instructions/security.md.instructions.md @@ -0,0 +1,204 @@ +--- +applyTo: SECURITY.md +--- + +# Instructions: Maintaining `SECURITY.md` + +`SECURITY.md` is the project's living security record. It serves two audiences simultaneously: users who need to know what risks exist right now, and the broader community who need confidence that vulnerabilities are being tracked and remediated with discipline. Treat it like a changelog, but for security events — every known issue gets an entry, every resolved issue keeps its entry. + +--- + +## File Structure + +`SECURITY.md` must always contain the following top-level sections, in this order: + +1. A brief project security policy preamble (responsible disclosure contact, response SLA) +2. **`## Known Vulnerabilities`** — active, unpatched issues +3. **`## Patched Vulnerabilities`** — resolved issues, retained permanently for audit trail + +No other top-level sections are required. Do not collapse or remove sections even when they are empty — use the explicit empty-state placeholder defined below. + +--- + +## Section 1: Known Vulnerabilities + +This section lists every vulnerability that is currently unpatched or only partially mitigated. Entries must be sorted with the highest severity first, then by discovery date descending within the same severity tier. + +### Entry Format + +Each entry is an H3 heading followed by a structured block: + +```markdown +### [SEVERITY] CVE-XXXX-XXXXX · Short Title + +| Field | Value | +|--------------|-------| +| **ID** | CVE-XXXX-XXXXX (or `CHARON-YYYY-NNN` if no CVE assigned yet) | +| **Severity** | Critical / High / Medium / Low · CVSS v3.1 score if known (e.g. `8.1 · High`) | +| **Status** | Investigating / Fix In Progress / Awaiting Upstream / Mitigated (partial) | + +**What** +One to three sentences describing the vulnerability class and its impact. +Be specific: name the weakness type (e.g. SQL injection, path traversal, SSRF). + +**Who** +- Discovered by: [Reporter name or handle, or "Internal audit", or "Automated scan (tool name)"] +- Reported: YYYY-MM-DD +- Affects: [User roles, API consumers, unauthenticated users, etc.] + +**Where** +- Component: [Module or service name] +- File(s): `path/to/affected/file.go`, `path/to/other/file.ts` +- Versions affected: `>= X.Y.Z` (or "all versions" / "prior to X.Y.Z") + +**When** +- Discovered: YYYY-MM-DD +- Disclosed (if public): YYYY-MM-DD (or "Not yet publicly disclosed") +- Target fix: YYYY-MM-DD (or sprint/milestone reference) + +**How** +A concise technical description of the attack vector, prerequisites, and exploitation +method. Omit proof-of-concept code. Reference CVE advisories or upstream issue +trackers where appropriate. + +**Planned Remediation** +Describe the fix strategy: library upgrade, logic refactor, config change, etc. +If a workaround is available in the meantime, document it here. +Link to the tracking issue: [#NNN](https://github.com/owner/repo/issues/NNN) +``` + +### Empty State + +When there are no known vulnerabilities: + +```markdown +## Known Vulnerabilities + +No known unpatched vulnerabilities at this time. +Last reviewed: YYYY-MM-DD +``` + +--- + +## Section 2: Patched Vulnerabilities + +This section is a permanent, append-only ledger. Entries are never deleted. Sort newest-patched first. This section builds community trust by demonstrating that issues are resolved promptly and transparently. + +### Entry Format + +```markdown +### ✅ [SEVERITY] CVE-XXXX-XXXXX · Short Title + +| Field | Value | +|--------------|-------| +| **ID** | CVE-XXXX-XXXXX (or internal ID) | +| **Severity** | Critical / High / Medium / Low · CVSS v3.1 score | +| **Patched** | YYYY-MM-DD in `vX.Y.Z` | + +**What** +Same description carried over from the Known Vulnerabilities entry. + +**Who** +- Discovered by: [Reporter or method] +- Reported: YYYY-MM-DD + +**Where** +- Component: [Module or service name] +- File(s): `path/to/affected/file.go` +- Versions affected: `< X.Y.Z` + +**When** +- Discovered: YYYY-MM-DD +- Patched: YYYY-MM-DD +- Time to patch: N days + +**How** +Same technical description as the original entry. + +**Resolution** +Describe exactly what was changed to fix the issue. +- Commit: [`abc1234`](https://github.com/owner/repo/commit/abc1234) +- PR: [#NNN](https://github.com/owner/repo/pull/NNN) +- Release: [`vX.Y.Z`](https://github.com/owner/repo/releases/tag/vX.Y.Z) + +**Credit** +[Optional] Thank the reporter if they consented to attribution. +``` + +### Empty State + +```markdown +## Patched Vulnerabilities + +No patched vulnerabilities on record yet. +``` + +--- + +## Lifecycle: Moving an Entry from Known → Patched + +When a fix ships: + +1. Remove the entry from `## Known Vulnerabilities` entirely. +2. Add a new entry to the **top** of `## Patched Vulnerabilities` using the patched format above. +3. Carry forward all original fields verbatim — do not rewrite the history of the issue. +4. Add the `**Resolution**` and `**Credit**` blocks with patch details. +5. Update the `Last reviewed` date on the Known Vulnerabilities section if it is now empty. + +Do not edit or backfill existing Patched entries once they are committed. + +--- + +## Severity Classification + +Use the following definitions consistently: + +| Severity | CVSS Range | Meaning | +|----------|------------|---------| +| **Critical** | 9.0–10.0 | Remote code execution, auth bypass, full data exposure | +| **High** | 7.0–8.9 | Significant data exposure, privilege escalation, DoS | +| **Medium** | 4.0–6.9 | Limited data exposure, requires user interaction or auth | +| **Low** | 0.1–3.9 | Minimal impact, difficult to exploit, defense-in-depth | + +When a CVE CVSS score is not yet available, assign a preliminary severity based on these definitions and note it as `(preliminary)` until confirmed. + +--- + +## Internal IDs + +If a vulnerability has no CVE assigned, use the format `CHARON-YYYY-NNN` where `YYYY` is the year and `NNN` is a zero-padded sequence number starting at `001` for each year. Example: `CHARON-2025-003`. Assign a CVE ID in the entry retroactively if one is issued later, and add the internal ID as an alias in parentheses. + +--- + +## Responsible Disclosure Preamble + +The preamble at the top of `SECURITY.md` (before the vulnerability sections) must include: + +- The preferred contact method for reporting vulnerabilities (e.g. a GitHub private advisory link, a security email address, or both) +- An acknowledgment-first response commitment: confirm receipt within 48 hours, even if the full investigation takes longer +- A statement that reporters will not be penalized or publicly named without consent +- A link to the full disclosure policy if one exists + +Example: + +```markdown +## Reporting a Vulnerability + +To report a security issue, please use +[GitHub Private Security Advisories](https://github.com/owner/repo/security/advisories/new) +or email `security@example.com`. + +We will acknowledge your report within **48 hours** and provide a remediation +timeline within **7 days**. Reporters are credited with their consent. +We do not pursue legal action against good-faith security researchers. +``` + +--- + +## Maintenance Rules + +- **Review cadence**: Update the `Last reviewed` date in the Known Vulnerabilities section at least once per release cycle, even if no entries changed. +- **No silent patches**: Every security fix — no matter how minor — must produce an entry in `## Patched Vulnerabilities` before or alongside the release. +- **No redaction**: Do not redact or soften historical entries. Accuracy builds trust; minimizing past issues destroys it. +- **Dependency vulnerabilities**: Transitive dependency CVEs that affect Charon's exposed attack surface must be tracked here the same as first-party vulnerabilities. Pure dev-dependency CVEs with no runtime impact may be omitted at maintainer discretion, but must still be noted in the relevant dependency update PR. +- **Partial mitigations**: If a workaround is deployed but the root cause is not fixed, the entry stays in `## Known Vulnerabilities` with `Status: Mitigated (partial)` and the workaround documented in `**Planned Remediation**`. diff --git a/.github/renovate.json b/.github/renovate.json index 7def45de..45a9a1e8 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -130,6 +130,32 @@ "datasourceTemplate": "go", "versioningTemplate": "semver" }, + { + "customType": "regex", + "description": "Track gotestsum version in codecov workflow", + "managerFilePatterns": [ + "/^\\.github/workflows/codecov-upload\\.yml$/" + ], + "matchStrings": [ + "gotestsum@v(?[^\\s]+)" + ], + "depNameTemplate": "gotest.tools/gotestsum", + "datasourceTemplate": "go", + "versioningTemplate": "semver" + }, + { + "customType": "regex", + "description": "Track gotestsum version in quality checks workflow", + "managerFilePatterns": [ + "/^\\.github/workflows/quality-checks\\.yml$/" + ], + "matchStrings": [ + "gotestsum@v(?[^\\s]+)" + ], + "depNameTemplate": "gotest.tools/gotestsum", + "datasourceTemplate": "go", + "versioningTemplate": "semver" + }, { "customType": "regex", "description": "Track govulncheck version in scripts", @@ -255,6 +281,12 @@ "matchUpdateTypes": ["major"], "automerge": false, "labels": ["manual-review"] + }, + { + "description": "Fix Renovate lookup for geoip2-golang v2 module path", + "matchDatasources": ["go"], + "matchPackageNames": ["github.com/oschwald/geoip2-golang/v2"], + "sourceUrl": "https://github.com/oschwald/geoip2-golang" } ] } diff --git a/.github/skills/security-scan-docker-image-scripts/run.sh b/.github/skills/security-scan-docker-image-scripts/run.sh index b6575084..c77204dd 100755 --- a/.github/skills/security-scan-docker-image-scripts/run.sh +++ b/.github/skills/security-scan-docker-image-scripts/run.sh @@ -35,7 +35,7 @@ fi # Check Grype if ! command -v grype >/dev/null 2>&1; then log_error "Grype not found - install from: https://github.com/anchore/grype" - log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.109.1" + log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.110.0" error_exit "Grype is required for vulnerability scanning" 2 fi @@ -50,8 +50,8 @@ SYFT_INSTALLED_VERSION=$(syft version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\ GRYPE_INSTALLED_VERSION=$(grype version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown") # Set defaults matching CI workflow -set_default_env "SYFT_VERSION" "v1.42.2" -set_default_env "GRYPE_VERSION" "v0.109.1" +set_default_env "SYFT_VERSION" "v1.42.3" +set_default_env "GRYPE_VERSION" "v0.110.0" set_default_env "IMAGE_TAG" "charon:local" set_default_env "FAIL_ON_SEVERITY" "Critical,High" diff --git a/.github/workflows/auto-changelog.yml b/.github/workflows/auto-changelog.yml index 38d215e9..2c70e8b3 100644 --- a/.github/workflows/auto-changelog.yml +++ b/.github/workflows/auto-changelog.yml @@ -21,6 +21,6 @@ jobs: with: ref: ${{ github.event.workflow_run.head_sha || github.sha }} - name: Draft Release - uses: release-drafter/release-drafter@6a93d829887aa2e0748befe2e808c66c0ec6e4c7 # v6 + uses: release-drafter/release-drafter@139054aeaa9adc52ab36ddf67437541f039b88e2 # v7 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/auto-versioning.yml b/.github/workflows/auto-versioning.yml index ba0753a0..42786152 100644 --- a/.github/workflows/auto-versioning.yml +++ b/.github/workflows/auto-versioning.yml @@ -33,7 +33,7 @@ jobs: - name: Calculate Semantic Version id: semver - uses: paulhatch/semantic-version@f29500c9d60a99ed5168e39ee367e0976884c46e # v6.0.1 + uses: paulhatch/semantic-version@9f72830310d5ed81233b641ee59253644cd8a8fc # v6.0.2 with: # The prefix to use to create tags tag_prefix: "v" @@ -89,7 +89,7 @@ jobs: - name: Create GitHub Release (creates tag via API) if: ${{ steps.semver.outputs.changed == 'true' && steps.check_release.outputs.exists == 'false' }} - uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2 with: tag_name: ${{ steps.determine_tag.outputs.tag }} name: Release ${{ steps.determine_tag.outputs.tag }} diff --git a/.github/workflows/cerberus-integration.yml b/.github/workflows/cerberus-integration.yml index 071d5927..e43474a2 100644 --- a/.github/workflows/cerberus-integration.yml +++ b/.github/workflows/cerberus-integration.yml @@ -31,7 +31,7 @@ jobs: - name: Build Docker image (Local) run: | echo "Building image locally for integration tests..." - docker build -t charon:local . + docker build -t charon:local --build-arg CI="${CI:-false}" . echo "✅ Successfully built charon:local" - name: Run Cerberus integration tests diff --git a/.github/workflows/codecov-upload.yml b/.github/workflows/codecov-upload.yml index e4209e12..0e2aaec7 100644 --- a/.github/workflows/codecov-upload.yml +++ b/.github/workflows/codecov-upload.yml @@ -126,6 +126,9 @@ jobs: echo "__CHARON_EOF__" } >> "$GITHUB_ENV" + - name: Install gotestsum + run: go install gotest.tools/gotestsum@v1.13.0 + - name: Run Go tests with coverage working-directory: ${{ github.workspace }} env: @@ -134,8 +137,16 @@ jobs: bash scripts/go-test-coverage.sh 2>&1 | tee backend/test-output.txt exit "${PIPESTATUS[0]}" + - name: Upload test output artifact + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: backend-test-output + path: backend/test-output.txt + retention-days: 7 + - name: Upload backend coverage to Codecov - uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5 + uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5 with: token: ${{ secrets.CODECOV_TOKEN }} files: ./backend/coverage.txt @@ -172,7 +183,7 @@ jobs: exit "${PIPESTATUS[0]}" - name: Upload frontend coverage to Codecov - uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5 + uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5 with: token: ${{ secrets.CODECOV_TOKEN }} directory: ./frontend/coverage diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index fab63981..d67d6c6b 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -52,7 +52,7 @@ jobs: run: bash scripts/ci/check-codeql-parity.sh - name: Initialize CodeQL - uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4 + uses: github/codeql-action/init@38697555549f1db7851b81482ff19f1fa5c4fedc # v4 with: languages: ${{ matrix.language }} queries: security-and-quality @@ -92,10 +92,10 @@ jobs: run: mkdir -p sarif-results - name: Autobuild - uses: github/codeql-action/autobuild@0d579ffd059c29b07949a3cce3983f0780820c98 # v4 + uses: github/codeql-action/autobuild@38697555549f1db7851b81482ff19f1fa5c4fedc # v4 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4 + uses: github/codeql-action/analyze@38697555549f1db7851b81482ff19f1fa5c4fedc # v4 with: category: "/language:${{ matrix.language }}" output: sarif-results/${{ matrix.language }} diff --git a/.github/workflows/crowdsec-integration.yml b/.github/workflows/crowdsec-integration.yml index 5a2fc20c..868d8e94 100644 --- a/.github/workflows/crowdsec-integration.yml +++ b/.github/workflows/crowdsec-integration.yml @@ -31,7 +31,7 @@ jobs: - name: Build Docker image (Local) run: | echo "Building image locally for integration tests..." - docker build -t charon:local . + docker build -t charon:local --build-arg CI="${CI:-false}" . echo "✅ Successfully built charon:local" - name: Run CrowdSec integration tests diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 926f621a..9029ac24 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -23,7 +23,7 @@ name: Docker Build, Publish & Test on: pull_request: push: - branches: [main] + branches: [main, development] workflow_dispatch: workflow_run: workflows: ["Docker Lint"] @@ -42,7 +42,7 @@ env: TRIGGER_HEAD_SHA: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }} TRIGGER_REF: ${{ github.event_name == 'workflow_run' && format('refs/heads/{0}', github.event.workflow_run.head_branch) || github.ref }} TRIGGER_HEAD_REF: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref }} - TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || github.event.pull_request.number }} + TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || format('{0}', github.event.pull_request.number) }} TRIGGER_ACTOR: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.actor.login || github.actor }} jobs: @@ -234,7 +234,7 @@ jobs: - name: Build and push Docker image (with retry) if: steps.skip.outputs.skip_build != 'true' id: build-and-push - uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # v3.0.2 + uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0 with: timeout_minutes: 25 max_attempts: 3 @@ -565,7 +565,7 @@ jobs: - name: Upload Trivy results if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-results.sarif' category: '.github/workflows/docker-build.yml:build-and-push' @@ -574,7 +574,7 @@ jobs: # Generate SBOM (Software Bill of Materials) for supply chain security # Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml - name: Generate SBOM - uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1 + uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0 if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true' with: image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }} @@ -583,7 +583,7 @@ jobs: # Create verifiable attestation for the SBOM - name: Attest SBOM - uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0 + uses: actions/attest-sbom@c604332985a26aa8cf1bdc465b92731239ec6b9e # v4.1.0 if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true' with: subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }} @@ -724,14 +724,14 @@ jobs: - name: Upload Trivy scan results if: always() && steps.trivy-pr-check.outputs.exists == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-pr-results.sarif' category: 'docker-pr-image' - name: Upload Trivy compatibility results (docker-build category) if: always() && steps.trivy-pr-check.outputs.exists == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-pr-results.sarif' category: '.github/workflows/docker-build.yml:build-and-push' @@ -739,7 +739,7 @@ jobs: - name: Upload Trivy compatibility results (docker-publish alias) if: always() && steps.trivy-pr-check.outputs.exists == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-pr-results.sarif' category: '.github/workflows/docker-publish.yml:build-and-push' @@ -747,7 +747,7 @@ jobs: - name: Upload Trivy compatibility results (nightly alias) if: always() && steps.trivy-pr-check.outputs.exists == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-pr-results.sarif' category: 'trivy-nightly' diff --git a/.github/workflows/e2e-tests-split.yml b/.github/workflows/e2e-tests-split.yml index 861c0ac0..ed20bfeb 100644 --- a/.github/workflows/e2e-tests-split.yml +++ b/.github/workflows/e2e-tests-split.yml @@ -158,7 +158,7 @@ jobs: - name: Cache npm dependencies if: steps.resolve-image.outputs.image_source == 'build' - uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5 + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 with: path: ~/.npm key: npm-${{ hashFiles('package-lock.json') }} diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 3aff9b2f..44f8e896 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -263,7 +263,7 @@ jobs: - name: Generate SBOM id: sbom_primary continue-on-error: true - uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1 + uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0 with: image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.resolve_digest.outputs.digest }} format: cyclonedx-json @@ -282,7 +282,7 @@ jobs: echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback" - SYFT_VERSION="v1.42.2" + SYFT_VERSION="v1.42.3" OS="$(uname -s | tr '[:upper:]' '[:lower:]')" ARCH="$(uname -m)" case "$ARCH" in @@ -435,7 +435,7 @@ jobs: name: sbom-nightly - name: Scan with Grype - uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2 + uses: anchore/scan-action@e1165082ffb1fe366ebaf02d8526e7c4989ea9d2 # v7.4.0 with: sbom: sbom-nightly.json fail-build: false @@ -451,7 +451,7 @@ jobs: trivyignores: '.trivyignore' - name: Upload Trivy results - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-nightly.sarif' category: 'trivy-nightly' diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml index 37504472..d14dec74 100644 --- a/.github/workflows/quality-checks.yml +++ b/.github/workflows/quality-checks.yml @@ -148,14 +148,24 @@ jobs: run: | bash "scripts/repo_health_check.sh" + - name: Install gotestsum + run: go install gotest.tools/gotestsum@v1.13.0 + - name: Run Go tests id: go-tests working-directory: ${{ github.workspace }} env: CGO_ENABLED: 1 run: | - bash "scripts/go-test-coverage.sh" 2>&1 | tee backend/test-output.txt - exit "${PIPESTATUS[0]}" + bash "scripts/go-test-coverage.sh" 2>&1 | tee backend/test-output.txt; exit "${PIPESTATUS[0]}" + + - name: Upload test output artifact + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: backend-test-output + path: backend/test-output.txt + retention-days: 7 - name: Go Test Summary if: always() @@ -232,11 +242,12 @@ jobs: PERF_MAX_MS_GETSTATUS_P95_PARALLEL: 1500ms PERF_MAX_MS_LISTDECISIONS_P95: 2000ms run: | + go test -run TestPerf -v ./internal/api/handlers -count=1 2>&1 | tee perf-output.txt; PERF_STATUS="${PIPESTATUS[0]}" { echo "## 🔍 Running performance assertions (TestPerf)" - go test -run TestPerf -v ./internal/api/handlers -count=1 | tee perf-output.txt + cat perf-output.txt } >> "$GITHUB_STEP_SUMMARY" - exit "${PIPESTATUS[0]}" + exit "$PERF_STATUS" frontend-quality: name: Frontend (React) @@ -298,8 +309,7 @@ jobs: id: frontend-tests working-directory: ${{ github.workspace }} run: | - bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt - exit "${PIPESTATUS[0]}" + bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt; exit "${PIPESTATUS[0]}" - name: Frontend Test Summary if: always() diff --git a/.github/workflows/rate-limit-integration.yml b/.github/workflows/rate-limit-integration.yml index 8c74f3a7..fe49d0f6 100644 --- a/.github/workflows/rate-limit-integration.yml +++ b/.github/workflows/rate-limit-integration.yml @@ -31,7 +31,7 @@ jobs: - name: Build Docker image (Local) run: | echo "Building image locally for integration tests..." - docker build -t charon:local . + docker build -t charon:local --build-arg CI="${CI:-false}" . echo "✅ Successfully built charon:local" - name: Run rate limit integration tests @@ -68,7 +68,7 @@ jobs: echo "### Caddy Admin Config (rate_limit handlers)" echo '```json' - curl -s http://localhost:2119/config 2>/dev/null | grep -A 20 '"handler":"rate_limit"' | head -30 || echo "Could not retrieve Caddy config" + curl -s http://localhost:2119/config/ 2>/dev/null | grep -A 20 '"handler":"rate_limit"' | head -30 || echo "Could not retrieve Caddy config" echo '```' echo "" diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml index 2ffbf873..82e1bec5 100644 --- a/.github/workflows/renovate.yml +++ b/.github/workflows/renovate.yml @@ -25,7 +25,7 @@ jobs: fetch-depth: 1 - name: Run Renovate - uses: renovatebot/github-action@0b17c4eb901eca44d018fb25744a50a74b2042df # v46.1.4 + uses: renovatebot/github-action@68a3ea99af6ad249940b5a9fdf44fc6d7f14378b # v46.1.6 with: configurationFile: .github/renovate.json token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/security-pr.yml b/.github/workflows/security-pr.yml index b818cd3e..7e05d9de 100644 --- a/.github/workflows/security-pr.yml +++ b/.github/workflows/security-pr.yml @@ -240,7 +240,7 @@ jobs: - name: Download PR image artifact if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch' # actions/download-artifact v4.1.8 - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c + uses: actions/download-artifact@484a0b528fb4d7bd804637ccb632e47a0e638317 with: name: ${{ steps.check-artifact.outputs.artifact_name }} run-id: ${{ steps.check-artifact.outputs.run_id }} @@ -385,7 +385,7 @@ jobs: - name: Upload Trivy SARIF to GitHub Security if: always() && steps.trivy-sarif-check.outputs.exists == 'true' # github/codeql-action v4 - uses: github/codeql-action/upload-sarif@1a97b0f94ec9297d6f58aefe5a6b5441c045bed4 + uses: github/codeql-action/upload-sarif@eedab83377f873ae39009d167a89b7a5aab4638b with: sarif_file: 'trivy-binary-results.sarif' category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }} diff --git a/.github/workflows/security-weekly-rebuild.yml b/.github/workflows/security-weekly-rebuild.yml index 69c2ae4c..1efc9f40 100644 --- a/.github/workflows/security-weekly-rebuild.yml +++ b/.github/workflows/security-weekly-rebuild.yml @@ -113,7 +113,7 @@ jobs: version: 'v0.69.3' - name: Upload Trivy results to GitHub Security - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 with: sarif_file: 'trivy-weekly-results.sarif' diff --git a/.github/workflows/supply-chain-pr.yml b/.github/workflows/supply-chain-pr.yml index f4a8a3fa..24775acb 100644 --- a/.github/workflows/supply-chain-pr.yml +++ b/.github/workflows/supply-chain-pr.yml @@ -266,7 +266,7 @@ jobs: # Generate SBOM using official Anchore action (auto-updated by Renovate) - name: Generate SBOM if: steps.set-target.outputs.image_name != '' - uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1 + uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0 id: sbom with: image: ${{ steps.set-target.outputs.image_name }} @@ -285,7 +285,7 @@ jobs: - name: Install Grype if: steps.set-target.outputs.image_name != '' run: | - curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.109.1 + curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.110.0 - name: Scan for vulnerabilities if: steps.set-target.outputs.image_name != '' @@ -362,7 +362,7 @@ jobs: - name: Upload SARIF to GitHub Security if: steps.check-artifact.outputs.artifact_found == 'true' - uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4 + uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4 continue-on-error: true with: sarif_file: grype-results.sarif @@ -381,9 +381,12 @@ jobs: - name: Comment on PR if: steps.set-target.outputs.image_name != '' && steps.pr-number.outputs.is_push != 'true' && steps.pr-number.outputs.pr_number != '' + continue-on-error: true env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | + set -euo pipefail + PR_NUMBER="${{ steps.pr-number.outputs.pr_number }}" COMPONENT_COUNT="${{ steps.sbom-count.outputs.component_count }}" CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}" @@ -429,29 +432,38 @@ jobs: EOF ) - # Find and update existing comment or create new one - COMMENT_ID=$(gh api \ + # Fetch existing comments — skip gracefully on 403 / permission errors + COMMENTS_JSON="" + if ! COMMENTS_JSON=$(gh api \ -H "Accept: application/vnd.github+json" \ -H "X-GitHub-Api-Version: 2022-11-28" \ - "/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \ - --jq '.[] | select(.body | contains("Supply Chain Verification Results")) | .id' | head -1) + "/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" 2>/dev/null); then + echo "⚠️ Cannot access PR comments (likely token permissions / fork / event context). Skipping PR comment." + exit 0 + fi - if [[ -n "${COMMENT_ID}" ]]; then + COMMENT_ID=$(echo "${COMMENTS_JSON}" | jq -r '.[] | select(.body | contains("Supply Chain Verification Results")) | .id' | head -1) + + if [[ -n "${COMMENT_ID:-}" && "${COMMENT_ID}" != "null" ]]; then echo "📝 Updating existing comment..." - gh api \ - --method PATCH \ + if ! gh api --method PATCH \ -H "Accept: application/vnd.github+json" \ -H "X-GitHub-Api-Version: 2022-11-28" \ "/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" \ - -f body="${COMMENT_BODY}" + -f body="${COMMENT_BODY}"; then + echo "⚠️ Failed to update comment (permissions?). Skipping." + exit 0 + fi else echo "📝 Creating new comment..." - gh api \ - --method POST \ + if ! gh api --method POST \ -H "Accept: application/vnd.github+json" \ -H "X-GitHub-Api-Version: 2022-11-28" \ "/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \ - -f body="${COMMENT_BODY}" + -f body="${COMMENT_BODY}"; then + echo "⚠️ Failed to create comment (permissions?). Skipping." + exit 0 + fi fi echo "✅ PR comment posted" diff --git a/.github/workflows/supply-chain-verify.yml b/.github/workflows/supply-chain-verify.yml index 81c1d7fc..36f40cb3 100644 --- a/.github/workflows/supply-chain-verify.yml +++ b/.github/workflows/supply-chain-verify.yml @@ -119,7 +119,7 @@ jobs: # Generate SBOM using official Anchore action (auto-updated by Renovate) - name: Generate and Verify SBOM if: steps.image-check.outputs.exists == 'true' - uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1 + uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0 with: image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }} format: cyclonedx-json @@ -233,7 +233,7 @@ jobs: # Scan for vulnerabilities using official Anchore action (auto-updated by Renovate) - name: Scan for Vulnerabilities if: steps.validate-sbom.outputs.valid == 'true' - uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2 + uses: anchore/scan-action@e1165082ffb1fe366ebaf02d8526e7c4989ea9d2 # v7.4.0 id: scan with: sbom: sbom-verify.cyclonedx.json diff --git a/.github/workflows/waf-integration.yml b/.github/workflows/waf-integration.yml index 65b6fe79..509eb5ee 100644 --- a/.github/workflows/waf-integration.yml +++ b/.github/workflows/waf-integration.yml @@ -31,7 +31,7 @@ jobs: - name: Build Docker image (Local) run: | echo "Building image locally for integration tests..." - docker build -t charon:local . + docker build -t charon:local --build-arg CI="${CI:-false}" . echo "✅ Successfully built charon:local" - name: Run WAF integration tests diff --git a/.github/workflows/weekly-nightly-promotion.yml b/.github/workflows/weekly-nightly-promotion.yml index 47ad9fd6..1b6687d3 100644 --- a/.github/workflows/weekly-nightly-promotion.yml +++ b/.github/workflows/weekly-nightly-promotion.yml @@ -200,8 +200,8 @@ jobs: runs-on: ubuntu-latest if: needs.check-nightly-health.outputs.is_healthy == 'true' outputs: - pr_number: ${{ steps.create-pr.outputs.pr_number }} - pr_url: ${{ steps.create-pr.outputs.pr_url }} + pr_number: ${{ steps.create-pr.outputs.pr_number || steps.existing-pr.outputs.pr_number }} + pr_url: ${{ steps.create-pr.outputs.pr_url || steps.existing-pr.outputs.pr_url }} skipped: ${{ steps.check-diff.outputs.skipped }} steps: diff --git a/.grype.yaml b/.grype.yaml index 7701f01f..945b8297 100644 --- a/.grype.yaml +++ b/.grype.yaml @@ -4,136 +4,285 @@ # Documentation: https://github.com/anchore/grype#specifying-matches-to-ignore ignore: - # CVE-2026-22184: zlib Global Buffer Overflow in untgz utility - # Severity: CRITICAL - # Package: zlib 1.3.1-r2 (Alpine Linux base image) - # Status: No upstream fix available as of 2026-01-16 + # CVE-2026-2673: OpenSSL TLS 1.3 server key exchange group downgrade + # Severity: HIGH (CVSS 7.5) + # Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 (Alpine apk) + # Status: No upstream fix available — Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18 # # Vulnerability Details: - # - Global buffer overflow in TGZfname() function - # - Unbounded strcpy() allows attacker-controlled archive names - # - Can lead to memory corruption, DoS, potential RCE + # - When DEFAULT is in the TLS 1.3 group configuration, the OpenSSL server may select + # a weaker key exchange group than preferred, enabling a limited key exchange downgrade. + # - Only affects systems acting as a raw TLS 1.3 server using OpenSSL's server-side group negotiation. # - # Risk Assessment: ACCEPTED (Low exploitability in Charon context) - # - Charon does not use untgz utility directly - # - No untrusted tar archive processing in application code - # - Attack surface limited to OS-level utilities - # - Multiple layers of containerization and isolation + # Root Cause (No Fix Available): + # - Alpine upstream has not published a patched libcrypto3/libssl3 for Alpine 3.23. + # - Checked: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18. + # - Fix path: once Alpine publishes a patched libcrypto3/libssl3, rebuild the Docker image + # and remove this suppression. # - # Mitigation: - # - Monitor Alpine Linux security feed daily for zlib patches - # - Container runs with minimal privileges (no-new-privileges) - # - Read-only filesystem where possible - # - Network isolation via Docker networks - # - # Review: - # - Daily checks for Alpine security updates - # - Automatic re-scan via CI/CD on every commit - # - Manual review scheduled for 2026-01-23 (7 days) - # - # Removal Criteria: - # - Alpine releases zlib 1.3.1-r3 or higher with CVE fix - # - OR upstream zlib project releases patched version - # - Remove this suppression immediately after fix available - # - # References: - # - CVE: https://nvd.nist.gov/vuln/detail/CVE-2026-22184 - # - Alpine Security: https://security.alpinelinux.org/ - # - GitHub Issue: https://github.com/Wikid82/Charon/issues/TBD - - vulnerability: CVE-2026-22184 - package: - name: zlib - version: "1.3.1-r2" - type: apk # Alpine package - reason: | - CRITICAL buffer overflow in untgz utility. No fix available from Alpine - as of 2026-01-16. Risk accepted: Charon does not directly use untgz or - process untrusted tar archives. Attack surface limited to base OS utilities. - Monitoring Alpine security feed for upstream patch. - expiry: "2026-03-14" # Re-evaluate in 7 days - - # Action items when this suppression expires: - # 1. Check Alpine security feed: https://security.alpinelinux.org/ - # 2. Check zlib releases: https://github.com/madler/zlib/releases - # 3. If fix available: Update Dockerfile, rebuild, remove suppression - # 4. If no fix: Extend expiry by 7 days, document justification - # 5. If extended 3+ times: Escalate to security team for review - - # GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability - # Severity: HIGH (CVSS 8.1) - # Package: github.com/slackhq/nebula v1.9.7 (embedded in /usr/bin/caddy) - # Status: Cannot upgrade — smallstep/certificates v0.30.0-rc2 still pins nebula v1.9.x - # - # Vulnerability Details: - # - ECDSA signature malleability allows bypassing certificate blocklists - # - Attacker can forge alternate valid P256 ECDSA signatures for revoked - # certificates (CVSSv3: AV:N/AC:H/PR:L/UI:N/S:U/C:H/I:H/A:N) - # - Only affects configurations using Nebula-based certificate authorities - # (non-default and uncommon in Charon deployments) - # - # Root Cause (Compile-Time Dependency Lock): - # - Caddy is built with caddy-security plugin, which transitively requires - # github.com/smallstep/certificates. That package pins nebula v1.9.x. - # - Checked: smallstep/certificates v0.27.5 → v0.30.0-rc2 all require nebula v1.9.4–v1.9.7. - # The nebula v1.10 API removal breaks compilation in the - # authority/provisioner package; xcaddy build fails with upgrade attempted. - # - Dockerfile caddy-builder stage pins nebula@v1.9.7 (Renovate tracked) with - # an inline comment explaining the constraint (Dockerfile line 247). - # - Fix path: once smallstep/certificates releases a version requiring - # nebula v1.10+, remove the pin and this suppression simultaneously. - # - # Risk Assessment: ACCEPTED (Low exploitability in Charon context) - # - Charon uses standard ACME/Let's Encrypt TLS; Nebula VPN PKI is not - # enabled by default and rarely configured in Charon deployments. - # - Exploiting this requires a valid certificate sharing the same issuer as - # a revoked one — an uncommon and targeted attack scenario. + # Risk Assessment: ACCEPTED (No upstream fix; limited exposure in Charon context) + # - Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS 1.3 server. + # - The vulnerability requires the affected application to directly configure TLS 1.3 server + # group negotiation via OpenSSL, which Charon does not do. # - Container-level isolation reduces the attack surface further. # # Mitigation (active while suppression is in effect): - # - Monitor smallstep/certificates releases at https://github.com/smallstep/certificates/releases - # - Weekly CI security rebuild flags any new CVEs in the full image. - # - Renovate annotation in Dockerfile (datasource=go depName=github.com/slackhq/nebula) - # will surface the pin for review when xcaddy build becomes compatible. + # - Monitor Alpine security advisories: https://security.alpinelinux.org/vuln/CVE-2026-2673 + # - Weekly CI security rebuild (security-weekly-rebuild.yml) flags any new CVEs in the full image. # # Review: - # - Reviewed 2026-02-19: smallstep/certificates latest stable remains v0.27.5; - # no release requiring nebula v1.10+ has shipped. Suppression extended 14 days. - # - Next review: 2026-03-05. Remove suppression immediately once upstream fixes. + # - Reviewed 2026-03-18 (initial suppression): no upstream fix available. Set 30-day review. + # - Next review: 2026-04-18. Remove suppression immediately once upstream fixes. # # Removal Criteria: - # - smallstep/certificates releases a stable version requiring nebula v1.10+ - # - Update Dockerfile caddy-builder patch to use the new versions - # - Rebuild image, run security scan, confirm suppression no longer needed - # - Remove both this entry and the corresponding .trivyignore entry + # - Alpine publishes a patched version of libcrypto3 and libssl3 + # - Rebuild Docker image and verify CVE-2026-2673 no longer appears in grype-results.json + # - Remove both these entries and the corresponding .trivyignore entry simultaneously # # References: - # - GHSA: https://github.com/advisories/GHSA-69x3-g4r3-p962 - # - CVE-2026-25793: https://nvd.nist.gov/vuln/detail/CVE-2026-25793 - # - smallstep/certificates: https://github.com/smallstep/certificates/releases - # - Dockerfile pin: caddy-builder stage, line ~247 (go get nebula@v1.9.7) - - vulnerability: GHSA-69x3-g4r3-p962 + # - CVE-2026-2673: https://nvd.nist.gov/vuln/detail/CVE-2026-2673 + # - Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-2673 + - vulnerability: CVE-2026-2673 package: - name: github.com/slackhq/nebula - version: "v1.9.7" - type: go-module + name: libcrypto3 + version: "3.5.5-r0" + type: apk reason: | - HIGH — ECDSA signature malleability in nebula v1.9.7 embedded in /usr/bin/caddy. - Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19) - still requires nebula v1.9.x (verified across v0.27.5–v0.30.0-rc2). Charon does - not use Nebula VPN PKI by default. Risk accepted pending upstream smallstep fix. - Reviewed 2026-02-19: no new smallstep release changes this assessment. - expiry: "2026-03-05" # Re-evaluate in 14 days (2026-02-19 + 14 days) + HIGH — OpenSSL TLS 1.3 server key exchange group downgrade in libcrypto3 3.5.5-r0 (Alpine base image). + No upstream fix: Alpine 3.23 still ships libcrypto3 3.5.5-r0 as of 2026-03-18. Charon + terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server. + Risk accepted pending Alpine upstream patch. + expiry: "2026-04-18" # Initial 30-day review period. Extend in 14–30 day increments with documented justification. # Action items when this suppression expires: - # 1. Check smallstep/certificates releases: https://github.com/smallstep/certificates/releases - # 2. If a stable version requires nebula v1.10+: - # a. Update Dockerfile caddy-builder: remove the `go get nebula@v1.9.7` pin - # b. Optionally bump smallstep/certificates to the new version - # c. Rebuild Docker image and verify no compile failures - # d. Re-run local security-scan-docker-image and confirm clean result - # e. Remove this suppression entry - # 3. If no fix yet: Extend expiry by 14 days and document justification - # 4. If extended 3+ times: Open upstream issue on smallstep/certificates + # 1. Check Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-2673 + # 2. If a patched Alpine package is now available: + # a. Rebuild Docker image without suppression + # b. Run local security-scan-docker-image and confirm CVE is resolved + # c. Remove this suppression entry, the libssl3 entry below, and the .trivyignore entry + # 3. If no fix yet: Extend expiry by 14–30 days and update the review comment above + # 4. If extended 3+ times: Open an issue to track the upstream status formally + + # CVE-2026-2673 (libssl3) — see full justification in the libcrypto3 entry above + - vulnerability: CVE-2026-2673 + package: + name: libssl3 + version: "3.5.5-r0" + type: apk + reason: | + HIGH — OpenSSL TLS 1.3 server key exchange group downgrade in libssl3 3.5.5-r0 (Alpine base image). + No upstream fix: Alpine 3.23 still ships libssl3 3.5.5-r0 as of 2026-03-18. Charon + terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server. + Risk accepted pending Alpine upstream patch. + expiry: "2026-04-18" # Initial 30-day review period. See libcrypto3 entry above for action items. + + # GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS) + # Severity: HIGH (CVSS 7.5) + # Package: github.com/buger/jsonparser v1.1.1 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli) + # Status: NO upstream fix available — OSV marks "Last affected: v1.1.1" with no Fixed event + # + # Vulnerability Details: + # - The Delete function fails to validate offsets on malformed JSON input, producing a + # negative slice index and a runtime panic — denial of service (CWE-125). + # - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H + # + # Root Cause (Third-Party Binary + No Upstream Fix): + # - Charon does not use buger/jsonparser directly. It is compiled into CrowdSec binaries. + # - The buger/jsonparser repository has no released fix as of 2026-03-19 (GitHub issue #275 + # and golang/vulndb #4514 are both open). + # - Fix path: once buger/jsonparser releases a patched version and CrowdSec updates their + # dependency, rebuild the Docker image and remove this suppression. + # + # Risk Assessment: ACCEPTED (Limited exploitability + no upstream fix) + # - The DoS vector requires passing malformed JSON to the vulnerable Delete function within + # CrowdSec's internal processing pipeline; this is not a direct attack surface in Charon. + # - CrowdSec's exposed surface is its HTTP API (not raw JSON stream parsing via this path). + # + # Mitigation (active while suppression is in effect): + # - Monitor buger/jsonparser: https://github.com/buger/jsonparser/issues/275 + # - Monitor CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases + # - Weekly CI security rebuild flags the moment a fixed image ships. + # + # Review: + # - Reviewed 2026-03-19 (initial suppression): no upstream fix exists. Set 30-day review. + # - Next review: 2026-04-19. Remove suppression once buger/jsonparser ships a fix and + # CrowdSec updates their dependency. + # + # Removal Criteria: + # - buger/jsonparser releases a patched version (v1.1.2 or higher) + # - CrowdSec releases a version built with the patched jsonparser + # - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved + # - Remove this entry and the corresponding .trivyignore entry simultaneously + # + # References: + # - GHSA-6g7g-w4f8-9c9x: https://github.com/advisories/GHSA-6g7g-w4f8-9c9x + # - Upstream issue: https://github.com/buger/jsonparser/issues/275 + # - golang/vulndb: https://github.com/golang/vulndb/issues/4514 + # - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases + - vulnerability: GHSA-6g7g-w4f8-9c9x + package: + name: github.com/buger/jsonparser + version: "v1.1.1" + type: go-module + reason: | + HIGH — DoS panic via malformed JSON in buger/jsonparser v1.1.1 embedded in CrowdSec binaries. + No upstream fix: buger/jsonparser has no released patch as of 2026-03-19 (issue #275 open). + Charon does not use this package directly; the vector requires reaching CrowdSec's internal + JSON processing pipeline. Risk accepted; no remediation path until upstream ships a fix. + Reviewed 2026-03-19: no patched release available. + expiry: "2026-04-19" # 30-day review: no fix exists. Extend in 30-day increments with documented justification. + + # Action items when this suppression expires: + # 1. Check buger/jsonparser releases: https://github.com/buger/jsonparser/releases + # and issue #275: https://github.com/buger/jsonparser/issues/275 + # 2. If a fix has shipped AND CrowdSec has updated their dependency: + # a. Rebuild Docker image and run local security-scan-docker-image + # b. Remove this suppression entry and the corresponding .trivyignore entry + # 3. If no fix yet: Extend expiry by 30 days and update the review comment above + # 4. If extended 3+ times with no progress: Consider opening an issue upstream or + # evaluating whether CrowdSec can replace buger/jsonparser with a safe alternative + + # GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS) + # Severity: HIGH (CVSS 7.5) + # Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli) + # Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5 + # + # Vulnerability Details: + # - DataRow.Decode does not validate field lengths; a malicious or compromised PostgreSQL server + # can send a negative field length causing a slice-bounds panic — denial of service (CWE-129). + # - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H + # + # Root Cause (EOL Module + Third-Party Binary): + # - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package + # is compiled into CrowdSec binaries for their internal database communication. + # - The pgproto3/v2 module is archived and EOL; no fix will be released. The fix path + # is migration to pgx/v5, which embeds an updated pgproto3/v3. + # - Fix path: once CrowdSec migrates to pgx/v5 and releases an updated binary, rebuild + # the Docker image and remove this suppression. + # + # Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path) + # - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite + # internally and does not run PostgreSQL. CrowdSec's database path is not exposed to + # external traffic in a standard Charon deployment. + # - The attack requires a compromised database server, which would imply full host compromise. + # + # Mitigation (active while suppression is in effect): + # - Monitor CrowdSec releases for pgx/v5 migration: + # https://github.com/crowdsecurity/crowdsec/releases + # - Weekly CI security rebuild flags the moment a fixed image ships. + # + # Review: + # - Reviewed 2026-03-19 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist. + # Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review. + # - Next review: 2026-04-19. Remove suppression once CrowdSec ships with pgx/v5. + # + # Removal Criteria: + # - CrowdSec releases a version with pgx/v5 (pgproto3/v3) replacing pgproto3/v2 + # - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved + # - Remove this entry and the corresponding .trivyignore entry simultaneously + # + # References: + # - GHSA-jqcq-xjh3-6g23: https://github.com/advisories/GHSA-jqcq-xjh3-6g23 + # - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3 + # - pgx/v5 (replacement): https://github.com/jackc/pgx + # - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases + - vulnerability: GHSA-jqcq-xjh3-6g23 + package: + name: github.com/jackc/pgproto3/v2 + version: "v2.3.3" + type: go-module + reason: | + HIGH — DoS panic via negative field length in pgproto3/v2 v2.3.3 embedded in CrowdSec binaries. + pgproto3/v2 is archived/EOL with no fix planned; fix path requires CrowdSec to migrate to pgx/v5. + Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment. + Risk accepted; no remediation until CrowdSec ships with pgx/v5. + Reviewed 2026-03-19: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet. + expiry: "2026-04-19" # 30-day review: no fix path until CrowdSec migrates to pgx/v5. + + # Action items when this suppression expires: + # 1. Check CrowdSec releases for pgx/v5 migration: + # https://github.com/crowdsecurity/crowdsec/releases + # 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3` + # Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced) + # 3. If CrowdSec has migrated: + # a. Rebuild Docker image and run local security-scan-docker-image + # b. Remove this suppression entry and the corresponding .trivyignore entry + # 4. If not yet migrated: Extend expiry by 30 days and update the review comment above + # 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration + + # GHSA-x6gf-mpr2-68h6 / CVE-2026-4427: pgproto3/v2 DataRow.Decode panic on negative field length (DoS) + # Severity: HIGH (CVSS 7.5) + # Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli) + # Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5 + # Note: This is the NVD/Red Hat advisory alias for the same underlying vulnerability as GHSA-jqcq-xjh3-6g23 + # + # Vulnerability Details: + # - DataRow.Decode does not validate field lengths; a malicious or compromised PostgreSQL server + # can send a negative field length causing a slice-bounds panic — denial of service (CWE-129). + # - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H (CVSS 7.5) + # + # Root Cause (EOL Module + Third-Party Binary): + # - Same underlying vulnerability as GHSA-jqcq-xjh3-6g23; tracked separately by NVD/Red Hat as CVE-2026-4427. + # - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package + # is compiled into CrowdSec binaries for their internal database communication. + # - The pgproto3/v2 module is archived and EOL; no fix will be released. The fix path + # is migration to pgx/v5, which embeds an updated pgproto3/v3. + # - Fix path: once CrowdSec migrates to pgx/v5 and releases an updated binary, rebuild + # the Docker image and remove this suppression. + # + # Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path) + # - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite + # internally and does not run PostgreSQL. CrowdSec's database path is not exposed to + # external traffic in a standard Charon deployment. + # - The attack requires a compromised database server, which would imply full host compromise. + # + # Mitigation (active while suppression is in effect): + # - Monitor CrowdSec releases for pgx/v5 migration: + # https://github.com/crowdsecurity/crowdsec/releases + # - Weekly CI security rebuild flags the moment a fixed image ships. + # + # Review: + # - Reviewed 2026-03-21 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist. + # Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review. Sibling GHSA-jqcq-xjh3-6g23 + # was already suppressed; this alias surfaced as a separate Grype match via NVD/Red Hat tracking. + # - Next review: 2026-04-21. Remove suppression once CrowdSec ships with pgx/v5. + # + # Removal Criteria: + # - Same as GHSA-jqcq-xjh3-6g23: CrowdSec releases a version with pgx/v5 replacing pgproto3/v2 + # - Rebuild Docker image, run security-scan-docker-image, confirm both advisories are resolved + # - Remove this entry, GHSA-jqcq-xjh3-6g23 entry, and both .trivyignore entries simultaneously + # + # References: + # - GHSA-x6gf-mpr2-68h6: https://github.com/advisories/GHSA-x6gf-mpr2-68h6 + # - CVE-2026-4427: https://nvd.nist.gov/vuln/detail/CVE-2026-4427 + # - Red Hat: https://access.redhat.com/security/cve/CVE-2026-4427 + # - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3 + # - pgx/v5 (replacement): https://github.com/jackc/pgx + # - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases + - vulnerability: GHSA-x6gf-mpr2-68h6 + package: + name: github.com/jackc/pgproto3/v2 + version: "v2.3.3" + type: go-module + reason: | + HIGH — DoS panic via negative field length in pgproto3/v2 v2.3.3 embedded in CrowdSec binaries. + NVD/Red Hat alias (CVE-2026-4427) for the same underlying bug as GHSA-jqcq-xjh3-6g23. + pgproto3/v2 is archived/EOL with no fix planned; fix path requires CrowdSec to migrate to pgx/v5. + Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment. + Risk accepted; no remediation until CrowdSec ships with pgx/v5. + Reviewed 2026-03-21: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet. + expiry: "2026-04-21" # 30-day review: no fix path until CrowdSec migrates to pgx/v5. + + # Action items when this suppression expires: + # 1. Check CrowdSec releases for pgx/v5 migration: + # https://github.com/crowdsecurity/crowdsec/releases + # 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3` + # Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced) + # 3. If CrowdSec has migrated: + # a. Rebuild Docker image and run local security-scan-docker-image + # b. Remove this entry, GHSA-jqcq-xjh3-6g23 entry, and both .trivyignore entries + # 4. If not yet migrated: Extend expiry by 30 days and update the review comment above + # 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration # Match exclusions (patterns to ignore during scanning) # Use sparingly - prefer specific CVE suppressions above diff --git a/.trivyignore b/.trivyignore index fa6966bb..199b38ec 100644 --- a/.trivyignore +++ b/.trivyignore @@ -14,3 +14,67 @@ CVE-2026-25793 # Charon does not use untgz or process untrusted tar archives. Review by: 2026-03-14 # See also: .grype.yaml for full justification CVE-2026-22184 + +# CVE-2026-27171: zlib CPU spin via crc32_combine64 infinite loop (DoS) +# Severity: MEDIUM (CVSS 5.5 NVD / 2.9 MITRE) — Package: zlib 1.3.1-r2 in Alpine base image +# Fix requires zlib >= 1.3.2. No upstream fix available: Alpine 3.23 still ships zlib 1.3.1-r2. +# Attack requires local access (AV:L); the vulnerable code path is not reachable via Charon's +# network-facing surface. Non-blocking by CI policy (MEDIUM). Review by: 2026-04-21 +# exp: 2026-04-21 +CVE-2026-27171 + +# CVE-2026-2673: OpenSSL TLS 1.3 server key exchange group downgrade (libcrypto3/libssl3) +# Severity: HIGH (CVSS 7.5) — Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 in Alpine base image +# No upstream fix available: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18. +# When DEFAULT is in TLS 1.3 group config, server may select a weaker key exchange group. +# Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS 1.3 server. +# Review by: 2026-04-18 +# See also: .grype.yaml for full justification +# exp: 2026-04-18 +CVE-2026-2673 + +# CVE-2026-33186 / GHSA-p77j-4mvh-x3m3: gRPC-Go authorization bypass via missing leading slash +# Severity: CRITICAL (CVSS 9.1) — Package: google.golang.org/grpc, embedded in CrowdSec (v1.74.2) and Caddy (v1.79.1) +# Fix exists at v1.79.3 — Charon's own dep is patched. Waiting on CrowdSec and Caddy upstream releases. +# CrowdSec's and Caddy's grpc servers are not exposed externally in a standard Charon deployment. +# Review by: 2026-04-02 +# See also: .grype.yaml for full justification +# exp: 2026-04-02 +CVE-2026-33186 + +# GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture) +# Severity: HIGH (CVSS 7.5) — Package: github.com/russellhaering/goxmldsig v1.5.0, embedded in /usr/bin/caddy +# Fix exists at v1.6.0 — waiting on Caddy upstream (or caddy-security plugin) to release with patched goxmldsig. +# Charon does not configure SAML-based SSO by default; the vulnerable path is not reachable in a standard deployment. +# Review by: 2026-04-02 +# See also: .grype.yaml for full justification +# exp: 2026-04-02 +GHSA-479m-364c-43vc + +# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS) +# Severity: HIGH (CVSS 7.5) — Package: github.com/buger/jsonparser v1.1.1, embedded in CrowdSec binaries +# No upstream fix available as of 2026-03-19 (issue #275 open, golang/vulndb #4514 open). +# Charon does not use this package; the vector requires reaching CrowdSec's internal processing pipeline. +# Review by: 2026-04-19 +# See also: .grype.yaml for full justification +# exp: 2026-04-19 +GHSA-6g7g-w4f8-9c9x + +# GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS) +# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries +# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5. +# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment. +# Review by: 2026-04-19 +# See also: .grype.yaml for full justification +# exp: 2026-04-19 +GHSA-jqcq-xjh3-6g23 + +# GHSA-x6gf-mpr2-68h6 / CVE-2026-4427: pgproto3/v2 DataRow.Decode panic on negative field length (DoS) +# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries +# NVD/Red Hat alias (CVE-2026-4427) for the same underlying bug as GHSA-jqcq-xjh3-6g23. +# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5. +# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment. +# Review by: 2026-04-21 +# See also: .grype.yaml for full justification +# exp: 2026-04-21 +GHSA-x6gf-mpr2-68h6 diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 4a5f57b8..55d2aa54 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -139,15 +139,15 @@ graph TB | Component | Technology | Version | Purpose | |-----------|-----------|---------|---------| | **Framework** | React | 19.2.3 | UI framework | -| **Language** | TypeScript | 5.x | Type-safe JavaScript | -| **Build Tool** | Vite | 6.1.9 | Fast bundler and dev server | -| **CSS Framework** | Tailwind CSS | 3.x | Utility-first CSS | +| **Language** | TypeScript | 6.x | Type-safe JavaScript | +| **Build Tool** | Vite | 8.0.0-beta.18 | Fast bundler and dev server | +| **CSS Framework** | Tailwind CSS | 4.2.1 | Utility-first CSS | | **Routing** | React Router | 7.x | Client-side routing | | **HTTP Client** | Fetch API | Native | API communication | | **State Management** | React Hooks + Context | Native | Global state | | **Internationalization** | i18next | Latest | 5 language support | -| **Unit Testing** | Vitest | 2.x | Fast unit test runner | -| **E2E Testing** | Playwright | 1.50.x | Browser automation | +| **Unit Testing** | Vitest | 4.1.0-beta.6 | Fast unit test runner | +| **E2E Testing** | Playwright | 1.58.2 | Browser automation | ### Infrastructure @@ -218,7 +218,7 @@ graph TB │ │ └── main.tsx # Application entry point │ ├── public/ # Static assets │ ├── package.json # NPM dependencies -│ └── vite.config.js # Vite configuration +│ └── vite.config.ts # Vite configuration │ ├── .docker/ # Docker configuration │ ├── compose/ # Docker Compose files @@ -306,11 +306,13 @@ graph TB **Key Modules:** #### API Layer (`internal/api/`) + - **Handlers:** Process HTTP requests, validate input, return responses - **Middleware:** CORS, GZIP, authentication, logging, metrics, panic recovery - **Routes:** Route registration and grouping (public vs authenticated) **Example Endpoints:** + - `GET /api/v1/proxy-hosts` - List all proxy hosts - `POST /api/v1/proxy-hosts` - Create new proxy host - `PUT /api/v1/proxy-hosts/:id` - Update proxy host @@ -318,6 +320,7 @@ graph TB - `WS /api/v1/logs` - WebSocket for real-time logs #### Service Layer (`internal/services/`) + - **ProxyService:** CRUD operations for proxy hosts, validation logic - **CertificateService:** ACME certificate provisioning and renewal - **DockerService:** Container discovery and monitoring @@ -327,12 +330,14 @@ graph TB **Design Pattern:** Services contain business logic and call multiple repositories/managers #### Caddy Manager (`internal/caddy/`) + - **Manager:** Orchestrates Caddy configuration updates - **Config Builder:** Generates Caddy JSON from database models - **Reload Logic:** Atomic config application with rollback on failure - **Security Integration:** Injects Cerberus middleware into Caddy pipelines **Responsibilities:** + 1. Generate Caddy JSON configuration from database state 2. Validate configuration before applying 3. Trigger Caddy reload via JSON API @@ -340,22 +345,26 @@ graph TB 5. Integrate security layers (WAF, ACL, Rate Limiting) #### Security Suite (`internal/cerberus/`) + - **ACL (Access Control Lists):** IP-based allow/deny rules, GeoIP blocking - **WAF (Web Application Firewall):** Coraza engine with OWASP CRS - **CrowdSec:** Behavior-based threat detection with global intelligence - **Rate Limiter:** Per-IP request throttling **Integration Points:** + - Middleware injection into Caddy request pipeline - Database-driven rule configuration - Metrics collection for security events #### Database Layer (`internal/database/`) + - **Migrations:** Automatic schema versioning with GORM AutoMigrate - **Seeding:** Default settings and admin user creation - **Connection Management:** SQLite with WAL mode and connection pooling **Schema Overview:** + - **ProxyHost:** Domain, upstream target, SSL config - **RemoteServer:** Upstream server definitions - **CaddyConfig:** Generated Caddy configuration (audit trail) @@ -372,6 +381,7 @@ graph TB **Component Architecture:** #### Pages (`src/pages/`) + - **Dashboard:** System overview, recent activity, quick actions - **ProxyHosts:** List, create, edit, delete proxy configurations - **Certificates:** Manage SSL/TLS certificates, view expiry @@ -380,17 +390,20 @@ graph TB - **Users:** User management (admin only) #### Components (`src/components/`) + - **Forms:** Reusable form inputs with validation - **Modals:** Dialog components for CRUD operations - **Tables:** Data tables with sorting, filtering, pagination - **Layout:** Header, sidebar, navigation #### API Client (`src/api/`) + - Centralized API calls with error handling - Request/response type definitions - Authentication token management **Example:** + ```typescript export const getProxyHosts = async (): Promise => { const response = await fetch('/api/v1/proxy-hosts', { @@ -402,11 +415,13 @@ export const getProxyHosts = async (): Promise => { ``` #### State Management + - **React Context:** Global state for auth, theme, language - **Local State:** Component-specific state with `useState` - **Custom Hooks:** Encapsulate API calls and side effects **Example Hook:** + ```typescript export const useProxyHosts = () => { const [hosts, setHosts] = useState([]); @@ -425,11 +440,13 @@ export const useProxyHosts = () => { **Purpose:** High-performance reverse proxy with automatic HTTPS **Integration:** + - Embedded as a library in the Go backend - Configured via JSON API (not Caddyfile) - Listens on ports 80 (HTTP) and 443 (HTTPS) **Features Used:** + - Dynamic configuration updates without restarts - Automatic HTTPS with Let's Encrypt and ZeroSSL - DNS challenge support for wildcard certificates @@ -437,6 +454,7 @@ export const useProxyHosts = () => { - Request logging and metrics **Configuration Flow:** + 1. User creates proxy host via frontend 2. Backend validates and saves to database 3. Caddy Manager generates JSON configuration @@ -461,12 +479,14 @@ For each proxy host, Charon generates **two routes** with the same domain: - Handlers: Full Cerberus security suite This pattern is **intentional and valid**: + - Emergency route provides break-glass access to security controls - Main route protects application with enterprise security features - Caddy processes routes in order (emergency matches first) - Validator allows duplicate hosts when one has paths and one doesn't **Example:** + ```json // Emergency Route (evaluated first) { @@ -488,6 +508,7 @@ This pattern is **intentional and valid**: **Purpose:** Persistent data storage **Why SQLite:** + - Embedded (no external database server) - Serverless (perfect for single-user/small team) - ACID compliant with WAL mode @@ -495,16 +516,19 @@ This pattern is **intentional and valid**: - Backup-friendly (single file) **Configuration:** + - **WAL Mode:** Allows concurrent reads during writes - **Foreign Keys:** Enforced referential integrity - **Pragma Settings:** Performance optimizations **Backup Strategy:** + - Automated daily backups to `data/backups/` - Retention: 7 daily, 4 weekly, 12 monthly backups - Backup during low-traffic periods **Migrations:** + - GORM AutoMigrate for schema changes - Manual migrations for complex data transformations - Rollback support via backup restoration @@ -537,6 +561,7 @@ graph LR **Purpose:** Prevent brute-force attacks and API abuse **Implementation:** + - Per-IP request counters with sliding window - Configurable thresholds (e.g., 100 req/min, 1000 req/hour) - HTTP 429 response when limit exceeded @@ -547,12 +572,14 @@ graph LR **Purpose:** Behavior-based threat detection **Features:** + - Local log analysis (brute-force, port scans, exploits) - Global threat intelligence (crowd-sourced IP reputation) - Automatic IP banning with configurable duration - Decision management API (view, create, delete bans) **Modes:** + - **Local Only:** No external API calls - **API Mode:** Sync with CrowdSec cloud for global intelligence @@ -561,12 +588,14 @@ graph LR **Purpose:** IP-based access control **Features:** + - Per-proxy-host allow/deny rules - CIDR range support (e.g., `192.168.1.0/24`) - Geographic blocking via GeoIP2 (MaxMind) - Admin whitelist (emergency access) **Evaluation Order:** + 1. Check admin whitelist (always allow) 2. Check deny list (explicit block) 3. Check allow list (explicit allow) @@ -579,6 +608,7 @@ graph LR **Engine:** Coraza with OWASP Core Rule Set (CRS) **Detection Categories:** + - SQL Injection (SQLi) - Cross-Site Scripting (XSS) - Remote Code Execution (RCE) @@ -587,12 +617,14 @@ graph LR - Command Injection **Modes:** + - **Monitor:** Log but don't block (testing) - **Block:** Return HTTP 403 for violations ### Layer 5: Application Security **Additional Protections:** + - **SSRF Prevention:** Block requests to private IP ranges in webhooks/URL validation - **HTTP Security Headers:** CSP, HSTS, X-Frame-Options, X-Content-Type-Options - **Input Validation:** Server-side validation for all user inputs @@ -610,6 +642,7 @@ graph LR 3. **Direct Database Access:** Manual SQLite update as last resort **Emergency Token:** + - 64-character hex token set via `CHARON_EMERGENCY_TOKEN` - Grants temporary admin access - Rotated after each use @@ -635,6 +668,7 @@ Charon operates with **two distinct traffic flows** on separate ports, each with - **Testing:** Playwright E2E tests verify UI/UX functionality on this port **Why No Middleware?** + - Management interface must remain accessible even when security modules are misconfigured - Emergency endpoints (`/api/v1/emergency/*`) require unrestricted access for system recovery - Separation of concerns: admin access control is handled by JWT, not proxy-level security @@ -797,6 +831,7 @@ sequenceDiagram **Rationale:** Simplicity over scalability - target audience is home users and small teams **Container Contents:** + - Frontend static files (Vite build output) - Go backend binary - Embedded Caddy server @@ -911,11 +946,13 @@ services: ### High Availability Considerations **Current Limitations:** + - SQLite does not support clustering - Single point of failure (one container) - Not designed for horizontal scaling **Future Options:** + - PostgreSQL backend for HA deployments - Read replicas for load balancing - Container orchestration (Kubernetes, Docker Swarm) @@ -927,6 +964,7 @@ services: ### Local Development Setup 1. **Prerequisites:** + ```bash - Go 1.26+ (backend development) - Node.js 23+ and npm (frontend development) @@ -935,12 +973,14 @@ services: ``` 2. **Clone Repository:** + ```bash git clone https://github.com/Wikid82/Charon.git cd Charon ``` 3. **Backend Development:** + ```bash cd backend go mod download @@ -949,6 +989,7 @@ services: ``` 4. **Frontend Development:** + ```bash cd frontend npm install @@ -957,6 +998,7 @@ services: ``` 5. **Full-Stack Development (Docker):** + ```bash docker-compose -f .docker/compose/docker-compose.dev.yml up # Frontend + Backend + Caddy in one container @@ -965,12 +1007,14 @@ services: ### Git Workflow **Branch Strategy:** + - `main`: Stable production branch - `feature/*`: New feature development - `fix/*`: Bug fixes - `chore/*`: Maintenance tasks **Commit Convention:** + - `feat:` New user-facing feature - `fix:` Bug fix in application code - `chore:` Infrastructure, CI/CD, dependencies @@ -979,6 +1023,7 @@ services: - `test:` Adding or updating tests **Example:** + ``` feat: add DNS-01 challenge support for Cloudflare @@ -1031,6 +1076,7 @@ Closes #123 **Purpose:** Validate critical user flows in a real browser **Scope:** + - User authentication - Proxy host CRUD operations - Certificate provisioning @@ -1038,6 +1084,7 @@ Closes #123 - Real-time log streaming **Execution:** + ```bash # Run against Docker container npx playwright test --project=chromium @@ -1050,10 +1097,12 @@ npx playwright test --debug ``` **Coverage Modes:** + - **Docker Mode:** Integration testing, no coverage (0% reported) - **Vite Dev Mode:** Coverage collection with V8 inspector **Why Two Modes?** + - Playwright coverage requires source maps and raw source files - Docker serves pre-built production files (no source maps) - Vite dev server exposes source files for coverage instrumentation @@ -1067,6 +1116,7 @@ npx playwright test --debug **Coverage Target:** 85% minimum **Execution:** + ```bash # Run all tests go test ./... @@ -1079,11 +1129,13 @@ go test -cover ./... ``` **Test Organization:** + - `*_test.go` files alongside source code - Table-driven tests for comprehensive coverage - Mocks for external dependencies (database, HTTP clients) **Example:** + ```go func TestCreateProxyHost(t *testing.T) { tests := []struct { @@ -1123,6 +1175,7 @@ func TestCreateProxyHost(t *testing.T) { **Coverage Target:** 85% minimum **Execution:** + ```bash # Run all tests npm test @@ -1135,6 +1188,7 @@ npm run test:coverage ``` **Test Organization:** + - `*.test.tsx` files alongside components - Mock API calls with MSW (Mock Service Worker) - Snapshot tests for UI consistency @@ -1146,12 +1200,14 @@ npm run test:coverage **Location:** `backend/integration/` **Scope:** + - API endpoint end-to-end flows - Database migrations - Caddy manager integration - CrowdSec API calls **Execution:** + ```bash go test ./integration/... ``` @@ -1161,6 +1217,7 @@ go test ./integration/... **Automated Hooks (via `.pre-commit-config.yaml`):** **Fast Stage (< 5 seconds):** + - Trailing whitespace removal - EOF fixer - YAML syntax check @@ -1168,11 +1225,13 @@ go test ./integration/... - Markdown link validation **Manual Stage (run explicitly):** + - Backend coverage tests (60-90s) - Frontend coverage tests (30-60s) - TypeScript type checking (10-20s) **Why Manual?** + - Coverage tests are slow and would block commits - Developers run them on-demand before pushing - CI enforces coverage on pull requests @@ -1180,10 +1239,12 @@ go test ./integration/... ### Continuous Integration (GitHub Actions) **Workflow Triggers:** + - `push` to `main`, `feature/*`, `fix/*` - `pull_request` to `main` **CI Jobs:** + 1. **Lint:** golangci-lint, ESLint, markdownlint, hadolint 2. **Test:** Go tests, Vitest, Playwright 3. **Security:** Trivy, CodeQL, Grype, Govulncheck @@ -1205,6 +1266,7 @@ go test ./integration/... - **PRERELEASE:** `-beta.1`, `-rc.1`, etc. **Examples:** + - `1.0.0` - Stable release - `1.1.0` - New feature (DNS provider support) - `1.1.1` - Bug fix (GORM query fix) @@ -1215,12 +1277,14 @@ go test ./integration/... ### Build Pipeline (Multi-Platform) **Platforms Supported:** + - `linux/amd64` - `linux/arm64` **Build Process:** 1. **Frontend Build:** + ```bash cd frontend npm ci --only=production @@ -1229,6 +1293,7 @@ go test ./integration/... ``` 2. **Backend Build:** + ```bash cd backend go build -o charon cmd/api/main.go @@ -1236,6 +1301,7 @@ go test ./integration/... ``` 3. **Docker Image Build:** + ```bash docker buildx build \ --platform linux/amd64,linux/arm64 \ @@ -1292,6 +1358,7 @@ go test ./integration/... - Level: SLSA Build L3 (hermetic builds) **Verification Example:** + ```bash # Verify image signature cosign verify \ @@ -1309,6 +1376,7 @@ grype ghcr.io/wikid82/charon@sha256: ### Rollback Strategy **Container Rollback:** + ```bash # List available versions docker images wikid82/charon @@ -1319,6 +1387,7 @@ docker-compose up -d --pull always wikid82/charon:1.1.1 ``` **Database Rollback:** + ```bash # Restore from backup docker exec charon /app/scripts/restore-backup.sh \ @@ -1355,11 +1424,13 @@ docker exec charon /app/scripts/restore-backup.sh \ ### API Extensibility **REST API Design:** + - Version prefix: `/api/v1/` - Future versions: `/api/v2/` (backward-compatible) - Deprecation policy: 2 major versions supported **WebHooks (Future):** + - Event notifications for external systems - Triggers: Proxy host created, certificate renewed, security event - Payload: JSON with event type and data @@ -1369,6 +1440,7 @@ docker exec charon /app/scripts/restore-backup.sh \ **Current:** Cerberus security middleware injected into Caddy pipeline **Future:** + - User-defined middleware (rate limiting rules, custom headers) - JavaScript/Lua scripting for request transformation - Plugin marketplace for community contributions @@ -1452,6 +1524,7 @@ docker exec charon /app/scripts/restore-backup.sh \ **GitHub Copilot Instructions:** All agents (`Planning`, `Backend_Dev`, `Frontend_Dev`, `DevOps`) must reference `ARCHITECTURE.md` when: + - Creating new components - Modifying core systems - Changing integration points diff --git a/CHANGELOG.md b/CHANGELOG.md index ea12fcb1..edcc6bd2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,17 +7,44 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- **Notifications:** Added Ntfy notification provider with support for self-hosted and cloud instances, optional Bearer token authentication, and JSON template customization + +- **Certificate Deletion**: Clean up expired and unused certificates directly from the Certificates page + - Expired Let's Encrypt certificates not attached to any proxy host can now be deleted + - Custom and staging certificates remain deletable when not in use + - In-use certificates show a disabled delete button with a tooltip explaining why + - Native browser confirmation replaced with an accessible, themed confirmation dialog + +- **Pushover Notification Provider**: Send push notifications to your devices via the Pushover app + - Supports JSON templates (minimal, detailed, custom) + - Application API Token stored securely — never exposed in API responses + - User Key stored in the URL field, following the same pattern as Telegram + - Feature flag: `feature.notifications.service.pushover.enabled` (on by default) + - Emergency priority (2) is intentionally unsupported — deferred to a future release + +- **Slack Notification Provider**: Send alerts to Slack channels via Incoming Webhooks + - Supports JSON templates (minimal, detailed, custom) with Slack's native `text` format + - Webhook URL stored securely — never exposed in API responses + - Optional channel display name for easy identification in provider list + - Feature flag: `feature.notifications.service.slack.enabled` (on by default) + - See [Notification Guide](docs/features/notifications.md) for setup instructions + ### CI/CD + - **Supply Chain**: Optimized verification workflow to prevent redundant builds - Change: Removed direct Push/PR triggers; now waits for 'Docker Build' via `workflow_run` ### Security + - **Supply Chain**: Enhanced PR verification workflow stability and accuracy - **Vulnerability Reporting**: Eliminated false negatives ("0 vulnerabilities") by enforcing strict failure conditions - **Tooling**: Switched to manual Grype installation ensuring usage of latest stable binary - **Observability**: Improved debugging visibility for vulnerability scans and SARIF generation ### Performance + - **E2E Tests**: Reduced feature flag API calls by 90% through conditional polling optimization (Phase 2) - Conditional skip: Exits immediately if flags already in expected state (~50% of cases) - Request coalescing: Shares in-flight API requests between parallel test workers @@ -29,6 +56,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Prevents timeout errors in Firefox/WebKit caused by strict label matching ### Fixed + +- **Notifications:** Fixed Pushover token-clearing bug where tokens were silently stripped on provider create/update +- **TCP Monitor Creation**: Fixed misleading form UX that caused silent HTTP 500 errors when creating TCP monitors + - Corrected URL placeholder to show `host:port` format instead of the incorrect `tcp://host:port` prefix + - Added dynamic per-type placeholder and helper text (HTTP monitors show a full URL example; TCP monitors show `host:port`) + - Added client-side validation that blocks form submission when a scheme prefix (e.g. `tcp://`) is detected, with an inline error message + - Reordered form fields so the monitor type selector appears above the URL input, making the dynamic helper text immediately relevant + - i18n: Added 5 new translation keys across en, de, fr, es, and zh locales +- **CI: Rate Limit Integration Tests**: Hardened test script reliability — login now validates HTTP status, Caddy admin API readiness gated on `/config/` poll, security config failures are fatal with full diagnostics, and poll interval increased to 5s +- **CI: Rate Limit Integration Tests**: Removed stale GeoIP database SHA256 checksum from Dockerfile non-CI path (hash was perpetually stale due to weekly upstream updates) +- **CI: Rate Limit Integration Tests**: Fixed Caddy admin API debug dump URL to use canonical trailing slash in workflow - Fixed: Added robust validation and debug logging for Docker image tags to prevent invalid reference errors. - Fixed: Removed log masking for image references and added manifest validation to debug CI failures. - **Proxy Hosts**: Fixed ACL and Security Headers dropdown selections so create/edit saves now keep the selected values (including clearing to none) after submit and reload. @@ -41,6 +79,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - **Test Performance**: Reduced system settings test execution time by 31% (from 23 minutes to 16 minutes) ### Changed + - **Testing Infrastructure**: Enhanced E2E test helpers with better synchronization and error handling - **CI**: Optimized E2E workflow shards [Reduced from 4 to 3] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 963bd4d2..422b8534 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -45,8 +45,6 @@ brew install lefthook go install github.com/evilmartians/lefthook@latest ``` - - ```bash # Option 1: Homebrew (macOS/Linux) brew install golangci-lint @@ -84,17 +82,20 @@ For local development, install go 1.26.0+ from [go.dev/dl](https://go.dev/dl/). When the project's Go version is updated (usually by Renovate): 1. **Pull the latest changes** + ```bash git pull ``` 2. **Update your local Go installation** + ```bash # Run the Go update skill (downloads and installs the new version) .github/skills/scripts/skill-runner.sh utility-update-go-version ``` 3. **Rebuild your development tools** + ```bash # This fixes lefthook hook errors and IDE issues ./scripts/rebuild-go-tools.sh diff --git a/Dockerfile b/Dockerfile index 30d61bb5..a435aa5f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,9 +23,13 @@ ARG CROWDSEC_RELEASE_SHA256=704e37121e7ac215991441cef0d8732e33fa3b1a2b2b88b53a0b # ---- Shared Go Security Patches ---- # renovate: datasource=go depName=github.com/expr-lang/expr -ARG EXPR_LANG_VERSION=1.17.7 +ARG EXPR_LANG_VERSION=1.17.8 # renovate: datasource=go depName=golang.org/x/net -ARG XNET_VERSION=0.51.0 +ARG XNET_VERSION=0.52.0 +# renovate: datasource=go depName=github.com/smallstep/certificates +ARG SMALLSTEP_CERTIFICATES_VERSION=0.30.0 +# renovate: datasource=npm depName=npm +ARG NPM_VERSION=11.11.1 # Allow pinning Caddy version - Renovate will update this # Build the most recent Caddy 2.x release (keeps major pinned under v3). @@ -39,7 +43,7 @@ ARG CADDY_CANDIDATE_VERSION=2.11.2 ARG CADDY_USE_CANDIDATE=0 ARG CADDY_PATCH_SCENARIO=B # renovate: datasource=go depName=github.com/greenpau/caddy-security -ARG CADDY_SECURITY_VERSION=1.1.45 +ARG CADDY_SECURITY_VERSION=1.1.51 # renovate: datasource=go depName=github.com/corazawaf/coraza-caddy ARG CORAZA_CADDY_VERSION=2.2.0 ## When an official caddy image tag isn't available on the host, use a @@ -99,9 +103,12 @@ ARG VERSION=dev # Make version available to Vite as VITE_APP_VERSION during the frontend build ENV VITE_APP_VERSION=${VERSION} -# Set environment to bypass native binary requirement for cross-arch builds -ENV npm_config_rollup_skip_nodejs_native=1 \ - ROLLUP_SKIP_NODEJS_NATIVE=1 +# Vite 8: Rolldown native bindings auto-resolved per platform via optionalDependencies +ARG NPM_VERSION +# hadolint ignore=DL3017 +RUN apk upgrade --no-cache && \ + npm install -g npm@${NPM_VERSION} --no-fund --no-audit && \ + npm cache clean --force RUN npm ci @@ -226,6 +233,7 @@ ARG CORAZA_CADDY_VERSION ARG XCADDY_VERSION=0.4.5 ARG EXPR_LANG_VERSION ARG XNET_VERSION +ARG SMALLSTEP_CERTIFICATES_VERSION # hadolint ignore=DL3018 RUN apk add --no-cache bash git @@ -274,6 +282,20 @@ RUN --mount=type=cache,target=/root/.cache/go-build \ # renovate: datasource=go depName=github.com/hslatman/ipstore go get github.com/hslatman/ipstore@v0.4.0; \ go get golang.org/x/net@v${XNET_VERSION}; \ + # CVE-2026-33186 (GHSA-p77j-4mvh-x3m3): gRPC-Go auth bypass via missing leading slash + # Fix available at v1.79.3. Pin here so the Caddy binary is patched immediately; + # remove once Caddy ships a release built with grpc >= v1.79.3. + # renovate: datasource=go depName=google.golang.org/grpc + go get google.golang.org/grpc@v1.79.3; \ + # GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture) + # Fix available at v1.6.0. Pin here so the Caddy binary is patched immediately; + # remove once caddy-security ships a release built with goxmldsig >= v1.6.0. + # renovate: datasource=go depName=github.com/russellhaering/goxmldsig + go get github.com/russellhaering/goxmldsig@v1.6.0; \ + # CVE-2026-30836: smallstep/certificates 0.30.0-rc3 vulnerability + # Fix available at v0.30.0. Pin here so the Caddy binary is patched immediately; + # remove once caddy-security ships a release built with smallstep/certificates >= v0.30.0. + go get github.com/smallstep/certificates@v${SMALLSTEP_CERTIFICATES_VERSION}; \ if [ "${CADDY_PATCH_SCENARIO}" = "A" ]; then \ # Rollback scenario: keep explicit nebula pin if upstream compatibility regresses. # NOTE: smallstep/certificates (pulled by caddy-security stack) currently @@ -338,6 +360,11 @@ RUN git clone --depth 1 --branch "v${CROWDSEC_VERSION}" https://github.com/crowd RUN go get github.com/expr-lang/expr@v${EXPR_LANG_VERSION} && \ go get golang.org/x/crypto@v0.46.0 && \ go get golang.org/x/net@v${XNET_VERSION} && \ + # CVE-2026-33186 (GHSA-p77j-4mvh-x3m3): gRPC-Go auth bypass via missing leading slash + # Fix available at v1.79.3. Pin here so the CrowdSec binary is patched immediately; + # remove once CrowdSec ships a release built with grpc >= v1.79.3. + # renovate: datasource=go depName=google.golang.org/grpc + go get google.golang.org/grpc@v1.79.3 && \ go mod tidy # Fix compatibility issues with expr-lang v1.17.7 @@ -410,11 +437,11 @@ WORKDIR /app # Install runtime dependencies for Charon, including bash for maintenance scripts # Note: gosu is now built from source (see gosu-builder stage) to avoid CVEs from Debian's pre-compiled version # Explicitly upgrade packages to fix security vulnerabilities -# binutils provides objdump for debug symbol detection in docker-entrypoint.sh # hadolint ignore=DL3018 RUN apk add --no-cache \ - bash ca-certificates sqlite-libs sqlite tzdata curl gettext libcap libcap-utils \ - c-ares binutils libc-utils busybox-extras + bash ca-certificates sqlite-libs sqlite tzdata gettext libcap libcap-utils \ + c-ares busybox-extras \ + && apk upgrade --no-cache zlib # Copy gosu binary from gosu-builder (built with Go 1.26+ to avoid stdlib CVEs) COPY --from=gosu-builder /gosu-out/gosu /usr/sbin/gosu @@ -433,10 +460,11 @@ SHELL ["/bin/ash", "-o", "pipefail", "-c"] # In CI, timeout quickly rather than retrying to save build time ARG GEOLITE2_COUNTRY_SHA256=c6549807950f93f609d6433fa295fa517fbdec0ad975a4aafba69c136d5d2347 RUN mkdir -p /app/data/geoip && \ - if [ -n "$CI" ]; then \ + if [ "$CI" = "true" ] || [ "$CI" = "1" ]; then \ echo "⏱️ CI detected - quick download (10s timeout, no retries)"; \ - if curl -fSL -m 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \ - -o /app/data/geoip/GeoLite2-Country.mmdb 2>/dev/null; then \ + if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \ + -T 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" 2>/dev/null \ + && [ -s /app/data/geoip/GeoLite2-Country.mmdb ]; then \ echo "✅ GeoIP downloaded"; \ else \ echo "⚠️ GeoIP skipped"; \ @@ -444,16 +472,12 @@ RUN mkdir -p /app/data/geoip && \ fi; \ else \ echo "Local - full download (30s timeout, 3 retries)"; \ - if curl -fSL -m 30 --retry 3 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \ - -o /app/data/geoip/GeoLite2-Country.mmdb; then \ - if echo "${GEOLITE2_COUNTRY_SHA256} /app/data/geoip/GeoLite2-Country.mmdb" | sha256sum -c -; then \ - echo "✅ GeoIP checksum verified"; \ - else \ - echo "⚠️ Checksum failed"; \ - touch /app/data/geoip/GeoLite2-Country.mmdb.placeholder; \ - fi; \ + if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \ + -T 30 -t 4 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \ + && [ -s /app/data/geoip/GeoLite2-Country.mmdb ]; then \ + echo "✅ GeoIP downloaded"; \ else \ - echo "⚠️ Download failed"; \ + echo "⚠️ GeoIP download failed or empty — skipping"; \ touch /app/data/geoip/GeoLite2-Country.mmdb.placeholder; \ fi; \ fi @@ -579,8 +603,8 @@ EXPOSE 80 443 443/udp 2019 8080 # Security: Add healthcheck to monitor container health # Verifies the Charon API is responding correctly -HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ - CMD curl -f http://localhost:8080/api/v1/health || exit 1 +HEALTHCHECK --interval=30s --timeout=10s --start-period=15s --retries=3 \ + CMD wget -q -O /dev/null http://localhost:8080/api/v1/health || exit 1 # Create CrowdSec symlink as root before switching to non-root user # This symlink allows CrowdSec to use persistent storage at /app/data/crowdsec/config diff --git a/README.md b/README.md index 64f23ed8..776b95a6 100644 --- a/README.md +++ b/README.md @@ -94,6 +94,7 @@ services: retries: 3 start_period: 40s ``` + > **Docker Socket Access:** Charon runs as a non-root user. If you mount the Docker socket for container discovery, the container needs permission to read it. Find your socket's group ID and add it to the compose file: > > ```bash @@ -107,26 +108,34 @@ services: > - "998" > ``` -### 2️⃣ Generate encryption key: +### 2️⃣ Generate encryption key + ```bash openssl rand -base64 32 ``` -### 3️⃣ Start Charon: + +### 3️⃣ Start Charon + ```bash docker-compose up -d ``` -### 4️⃣ Access the dashboard: + +### 4️⃣ Access the dashboard + Open your browser and navigate to `http://localhost:8080` to access the dashboard and create your admin account. + ```code http://localhost:8080 ``` -### Getting Started: -Full setup instructions and documentation are available at [https://wikid82.github.io/Charon/docs/getting-started.html](https://wikid82.github.io/Charon/docs/getting-started.html). +### Getting Started + +Full setup instructions and documentation are available at [https://wikid82.github.io/Charon/docs/getting-started.html](https://wikid82.github.io/Charon/docs/getting-started.html). --- ## ✨ Top 10 Features ### 🎯 **Point & Click Management** + No config files. No terminal commands. Just click, type your domain name, and you're live. If you can use a website, you can run Charon. ### 🔐 **Automatic HTTPS Certificates** @@ -160,6 +169,7 @@ See exactly what's happening with live request logs, uptime monitoring, and inst ### 📥 **Migration Made Easy** Already invested in another reverse proxy? Bring your work with you by importing your existing configurations with one click: + - **Caddyfile** — Migrate from other Caddy setups - **Nginx** — Import from Nginx based configurations (Coming Soon) - **Traefik** - Import from Traefik based configurations (Coming Soon) diff --git a/SECURITY.md b/SECURITY.md index 64457bdc..ec4df8b2 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,60 +11,364 @@ We release security updates for the following versions: ## Reporting a Vulnerability -We take security seriously. If you discover a security vulnerability in Charon, please report it responsibly. +To report a security issue, use +[GitHub Private Security Advisories](https://github.com/Wikid82/charon/security/advisories/new) +or open a [GitHub Issue](https://github.com/Wikid82/Charon/issues) for non-sensitive disclosures. -### Where to Report +Please include a description, reproduction steps, impact assessment, and a non-destructive proof of +concept where possible. -**Preferred Method**: GitHub Security Advisory (Private) +We will acknowledge your report within **48 hours** and provide a remediation timeline within +**7 days**. Reporters are credited in release notes with their consent. We do not pursue legal +action against good-faith security researchers. Please allow **90 days** from initial report before +public disclosure. -1. Go to -2. Fill out the advisory form with: - - Vulnerability description - - Steps to reproduce - - Proof of concept (non-destructive) - - Impact assessment - - Suggested fix (if applicable) +--- -**Alternative Method**: GitHub Issues (Public) +## Known Vulnerabilities -1. Go to -2. Create a new issue with the same information as above +Last reviewed: 2026-03-24 -### What to Include +### [HIGH] CVE-2026-2673 · OpenSSL TLS 1.3 Key Exchange Group Downgrade -Please provide: +| Field | Value | +|--------------|-------| +| **ID** | CVE-2026-2673 (affects `libcrypto3` and `libssl3`) | +| **Severity** | High · 7.5 | +| **Status** | Awaiting Upstream | -1. **Description**: Clear explanation of the vulnerability -2. **Reproduction Steps**: Detailed steps to reproduce the issue -3. **Impact Assessment**: What an attacker could do with this vulnerability -4. **Environment**: Charon version, deployment method, OS, etc. -5. **Proof of Concept**: Code or commands demonstrating the vulnerability (non-destructive) -6. **Suggested Fix**: If you have ideas for remediation +**What** +An OpenSSL TLS 1.3 server may fail to negotiate the intended key exchange group when the +configuration includes the `DEFAULT` keyword, potentially allowing downgrade to weaker cipher +suites. Affects Alpine 3.23.3 packages `libcrypto3` and `libssl3` at version 3.5.5-r0. -### What Happens Next +**Who** -1. **Acknowledgment**: We'll acknowledge your report within **48 hours** -2. **Investigation**: We'll investigate and assess the severity -3. **Updates**: We'll provide regular status updates (weekly minimum) -4. **Fix Development**: We'll develop and test a fix -5. **Disclosure**: Coordinated disclosure after fix is released -6. **Credit**: We'll credit you in release notes (if desired) +- Discovered by: Automated scan (Grype) +- Reported: 2026-03-20 +- Affects: Container runtime environment; Caddy reverse proxy TLS negotiation could be affected + if default key group configuration is used -### Responsible Disclosure +**Where** -We ask that you: +- Component: Alpine 3.23.3 base image (`libcrypto3` 3.5.5-r0, `libssl3` 3.5.5-r0) +- Versions affected: Alpine 3.23.3 prior to a patched `openssl` APK release -- ✅ Give us reasonable time to fix the issue before public disclosure (90 days preferred) -- ✅ Avoid destructive testing or attacks on production systems -- ✅ Not access, modify, or delete data that doesn't belong to you -- ✅ Not perform actions that could degrade service for others +**When** -We commit to: +- Discovered: 2026-03-20 +- Disclosed (if public): 2026-03-13 (OpenSSL advisory) +- Target fix: When Alpine Security publishes a patched `openssl` APK -- ✅ Respond to your report within 48 hours -- ✅ Provide regular status updates -- ✅ Credit you in release notes (if desired) -- ✅ Not pursue legal action for good-faith security research +**How** +When an OpenSSL TLS 1.3 server configuration uses the `DEFAULT` keyword for key exchange groups, +the negotiation logic may select a weaker group than intended. Charon's Caddy TLS configuration +does not use the `DEFAULT` keyword, which limits practical exploitability. The packages are +present in the base image regardless of Caddy's configuration. + +**Planned Remediation** +Monitor for a patched Alpine APK. Once +available, update the pinned `ALPINE_IMAGE` digest in the Dockerfile, or add an explicit +`RUN apk upgrade --no-cache libcrypto3 libssl3` to the runtime stage. + +--- + +### [MEDIUM] CVE-2025-60876 · BusyBox wget HTTP Request Smuggling + +| Field | Value | +|--------------|-------| +| **ID** | CVE-2025-60876 | +| **Severity** | Medium · 6.5 | +| **Status** | Awaiting Upstream | + +**What** +BusyBox wget through 1.37 accepts raw CR/LF and other C0 control bytes in the HTTP +request-target, allowing request line splitting and header injection (CWE-284). + +**Who** + +- Discovered by: Automated scan (Grype) +- Reported: 2026-03-24 +- Affects: Container runtime environment; Charon does not invoke busybox wget in application logic + +**Where** + +- Component: Alpine 3.23.3 base image (`busybox` 1.37.0-r30) +- Versions affected: All Charon images using Alpine 3.23.3 with busybox < patched version + +**When** + +- Discovered: 2026-03-24 +- Disclosed (if public): Not yet publicly disclosed with fix +- Target fix: When Alpine Security publishes a patched busybox APK + +**How** +The vulnerable wget applet would need to be manually invoked inside the container with +attacker-controlled URLs. Charon's application logic does not use busybox wget. EPSS score is +0.00064 (0.20 percentile), indicating extremely low exploitation probability. + +**Planned Remediation** +Monitor Alpine 3.23 for a patched busybox APK. No immediate action required. Practical risk to +Charon users is negligible since the vulnerable code path is not exercised. + +--- + +### [LOW] CVE-2026-26958 · edwards25519 MultiScalarMult Invalid Results + +| Field | Value | +|--------------|-------| +| **ID** | CVE-2026-26958 (GHSA-fw7p-63qq-7hpr) | +| **Severity** | Low · 1.7 | +| **Status** | Awaiting Upstream | + +**What** +`filippo.io/edwards25519` v1.1.0 `MultiScalarMult` produces invalid results or undefined +behavior if the receiver is not the identity point. Fix available at v1.1.1 but requires +CrowdSec to rebuild. + +**Who** + +- Discovered by: Automated scan (Grype) +- Reported: 2026-03-24 +- Affects: CrowdSec Agent component within the container; not directly exposed through Charon's + primary application interface + +**Where** + +- Component: CrowdSec Agent (bundled `cscli` and `crowdsec` binaries) +- Versions affected: CrowdSec builds using `filippo.io/edwards25519` < v1.1.1 + +**When** + +- Discovered: 2026-03-24 +- Disclosed (if public): Public +- Target fix: When CrowdSec releases a build with updated dependency + +**How** +This is a rarely used advanced API within the edwards25519 library. CrowdSec does not directly +expose MultiScalarMult to external input. EPSS score is 0.00018 (0.04 percentile). + +**Planned Remediation** +Awaiting CrowdSec upstream release with updated dependency. No action available for Charon +maintainers. + +--- + +## Patched Vulnerabilities + +### ✅ [CRITICAL] CVE-2025-68121 · Go Stdlib Critical in CrowdSec Bundled Binaries + +| Field | Value | +|--------------|-------| +| **ID** | CVE-2025-68121 (see also CHARON-2025-001) | +| **Severity** | Critical | +| **Patched** | 2026-03-24 | + +**What** +A critical Go standard library vulnerability affects CrowdSec binaries bundled in the Charon +container image. The binaries were compiled against Go 1.25.6, which contains this flaw. +Charon's own application code, compiled with Go 1.26.1, is unaffected. + +**Who** + +- Discovered by: Automated scan (Grype) +- Reported: 2026-03-20 + +**Where** + +- Component: CrowdSec Agent (bundled `cscli` and `crowdsec` binaries) +- Versions affected: Charon container images with CrowdSec binaries compiled against Go < 1.25.7 + +**When** + +- Discovered: 2026-03-20 +- Patched: 2026-03-24 +- Time to patch: 4 days + +**How** +The vulnerability resides entirely within CrowdSec's compiled binary artifacts. Exploitation +is limited to the CrowdSec agent's internal execution paths, which are not externally exposed +through Charon's API or network interface. + +**Resolution** +CrowdSec binaries now compiled with Go 1.26.1 (was 1.25.6). + +--- + +### ✅ [HIGH] CHARON-2025-001 · CrowdSec Bundled Binaries — Go Stdlib CVEs + +| Field | Value | +|--------------|-------| +| **ID** | CHARON-2025-001 (aliases: CVE-2025-58183, CVE-2025-58186, CVE-2025-58187, CVE-2025-61729, CVE-2026-25679, CVE-2025-61732, CVE-2026-27142, CVE-2026-27139) | +| **Severity** | High · (preliminary, CVSS scores pending upstream confirmation) | +| **Patched** | 2026-03-24 | + +**What** +Multiple CVEs in Go standard library packages continue to accumulate in CrowdSec binaries bundled +with Charon. The cluster originated when CrowdSec was compiled against Go 1.25.1; subsequent +CrowdSec updates advanced the toolchain to Go 1.25.6/1.25.7, resolving earlier CVEs but +introducing new ones. The cluster now includes a Critical-severity finding (CVE-2025-68121, +tracked separately above). All issues resolve when CrowdSec is rebuilt against Go ≥ 1.26.2. +Charon's own application code is unaffected. + +**Who** + +- Discovered by: Automated scan (Trivy, Grype) +- Reported: 2025-12-01 (original cluster); expanded 2026-03-20 + +**Where** + +- Component: CrowdSec Agent (bundled `cscli` and `crowdsec` binaries) +- Versions affected: All Charon versions shipping CrowdSec binaries compiled against Go < 1.26.2 + +**When** + +- Discovered: 2025-12-01 +- Patched: 2026-03-24 +- Time to patch: 114 days + +**How** +The CVEs reside entirely within CrowdSec's compiled binaries and cover HTTP/2, TLS, and archive +processing paths that are not invoked by Charon's core application logic. The relevant network +interfaces are not externally exposed via Charon's API surface. + +**Resolution** +CrowdSec binaries now compiled with Go 1.26.1. + +--- + +### ✅ [MEDIUM] CVE-2026-27171 · zlib CPU Exhaustion via Infinite Loop in CRC Combine Functions + +| Field | Value | +|--------------|-------| +| **ID** | CVE-2026-27171 | +| **Severity** | Medium · 5.5 (NVD) / 2.9 (MITRE) | +| **Patched** | 2026-03-24 | + +**What** +zlib before 1.3.2 allows unbounded CPU consumption (denial of service) via the `crc32_combine64` +and `crc32_combine_gen64` functions. An internal helper `x2nmodp` performs right-shifts inside a +loop with no termination condition when given a specially crafted input, causing a CPU spin +(CWE-1284). + +**Who** + +- Discovered by: 7aSecurity audit (commissioned by OSTIF) +- Reported: 2026-02-17 + +**Where** + +- Component: Alpine 3.23.3 base image (`zlib` package, version 1.3.1-r2) +- Versions affected: zlib < 1.3.2; all current Charon images using Alpine 3.23.3 + +**When** + +- Discovered: 2026-02-17 +- Patched: 2026-03-24 +- Time to patch: 35 days + +**How** +Exploitation requires local access (CVSS vector `AV:L`) and the ability to pass a crafted value +to the `crc32_combine`-family functions. This code path is not invoked by Charon's reverse proxy +or backend API. The vulnerability is non-blocking under the project's CI severity policy. + +**Resolution** +Alpine now ships zlib 1.3.2-r0 (fix threshold was 1.3.2). + +--- + +### ✅ [HIGH] CHARON-2026-001 · Debian Base Image CVE Cluster + +| Field | Value | +|--------------|-------| +| **ID** | CHARON-2026-001 (aliases: CVE-2026-0861, CVE-2025-15281, CVE-2026-0915, CVE-2025-13151, and 2 libtiff HIGH CVEs) | +| **Severity** | High · 8.4 (highest per CVSS v3.1) | +| **Patched** | 2026-03-20 (Alpine base image migration complete) | + +**What** +Seven HIGH-severity CVEs in Debian Trixie base image system libraries (`glibc`, `libtasn1-6`, +`libtiff`). These vulnerabilities resided in the container's OS-level packages with no fixes +available from the Debian Security Team. + +**Who** + +- Discovered by: Automated scan (Trivy) +- Reported: 2026-02-04 + +**Where** + +- Component: Debian Trixie base image (`libc6`, `libc-bin`, `libtasn1-6`, `libtiff`) +- Versions affected: Charon container images built on Debian Trixie base (prior to Alpine migration) + +**When** + +- Discovered: 2026-02-04 +- Patched: 2026-03-20 +- Time to patch: 44 days + +**How** +The affected packages were OS-level shared libraries bundled in the Debian Trixie container base +image. Exploitation would have required local container access or a prior application-level +compromise. Caddy reverse proxy ingress filtering and container isolation significantly reduced +the effective attack surface throughout the exposure window. + +**Resolution** +Reverted to Alpine Linux base image (Alpine 3.23.3). Alpine's patch of CVE-2025-60876 (busybox +heap overflow) removed the original blocker for the Alpine migration. Post-migration scan +confirmed zero HIGH/CRITICAL CVEs from this cluster. + +- Spec: [docs/plans/alpine_migration_spec.md](docs/plans/alpine_migration_spec.md) +- Advisory: [docs/security/advisory_2026-02-04_debian_cves_temporary.md](docs/security/advisory_2026-02-04_debian_cves_temporary.md) + +**Credit** +Internal remediation; no external reporter. + +--- + +### ✅ [HIGH] CVE-2025-68156 · expr-lang/expr ReDoS + +| Field | Value | +|--------------|-------| +| **ID** | CVE-2025-68156 | +| **Severity** | High · 7.5 | +| **Patched** | 2026-01-11 | + +**What** +Regular Expression Denial of Service (ReDoS) vulnerability in the `expr-lang/expr` library used +by CrowdSec for expression evaluation. Malicious regular expressions in CrowdSec scenarios or +parsers could cause CPU exhaustion and service degradation through exponential backtracking. + +**Who** + +- Discovered by: Automated scan (Trivy) +- Reported: 2026-01-11 + +**Where** + +- Component: CrowdSec (via `expr-lang/expr` dependency) +- Versions affected: CrowdSec versions using `expr-lang/expr` < v1.17.7 + +**When** + +- Discovered: 2026-01-11 +- Patched: 2026-01-11 +- Time to patch: 0 days + +**How** +Maliciously crafted regular expressions in CrowdSec scenario or parser rules could trigger +exponential backtracking in `expr-lang/expr`'s evaluation engine, causing CPU exhaustion and +denial of service. The vulnerability is in the upstream expression evaluation library, not in +Charon's own code. + +**Resolution** +Upgraded CrowdSec to build from source with the patched `expr-lang/expr` v1.17.7. Verification +confirmed via `go version -m ./cscli` showing the patched library version in compiled artifacts. +Post-patch Trivy scan reports 0 HIGH/CRITICAL vulnerabilities in application code. + +- Technical details: [docs/plans/crowdsec_source_build.md](docs/plans/crowdsec_source_build.md) + +**Credit** +Internal remediation; no external reporter. --- @@ -72,7 +376,8 @@ We commit to: ### Server-Side Request Forgery (SSRF) Protection -Charon implements industry-leading **5-layer defense-in-depth** SSRF protection to prevent attackers from using the application to access internal resources or cloud metadata. +Charon implements industry-leading **5-layer defense-in-depth** SSRF protection to prevent +attackers from using the application to access internal resources or cloud metadata. #### Protected Against @@ -100,8 +405,6 @@ Charon implements industry-leading **5-layer defense-in-depth** SSRF protection #### Learn More -For complete technical details, see: - - [SSRF Protection Guide](docs/security/ssrf-protection.md) - [Manual Test Plan](docs/issues/ssrf-manual-test-plan.md) - [QA Audit Report](docs/reports/qa_ssrf_remediation_report.md) @@ -124,7 +427,10 @@ For complete technical details, see: ### Infrastructure Security -- **Non-root by default**: Charon runs as an unprivileged user (`charon`, uid 1000) inside the container. Docker socket access is granted via a minimal supplemental group matching the host socket's GID—never by running as root. If the socket GID is `0` (root group), Charon requires explicit opt-in before granting access. +- **Non-root by default**: Charon runs as an unprivileged user (`charon`, uid 1000) inside the + container. Docker socket access is granted via a minimal supplemental group matching the host + socket's GID — never by running as root. If the socket GID is `0` (root group), Charon requires + explicit opt-in before granting access. - **Container isolation**: Docker-based deployment - **Minimal attack surface**: Alpine Linux base image - **Dependency scanning**: Regular Trivy and govulncheck scans @@ -139,6 +445,126 @@ For complete technical details, see: --- +## Supply Chain Security + +Charon implements comprehensive supply chain security measures to ensure the integrity and +authenticity of releases. Every release includes cryptographic signatures, SLSA provenance +attestation, and a Software Bill of Materials (SBOM). + +### Verification Commands + +#### Verify Container Image Signature + +All official Charon images are signed with Sigstore Cosign: + +```bash +cosign verify \ + --certificate-identity-regexp='https://github.com/Wikid82/charon' \ + --certificate-oidc-issuer='https://token.actions.githubusercontent.com' \ + ghcr.io/wikid82/charon:latest +``` + +Successful verification confirms the image was built by GitHub Actions from the official +repository and has not been tampered with since signing. + +#### Verify SLSA Provenance + +```bash +# Download provenance from release assets +curl -LO https://github.com/Wikid82/charon/releases/latest/download/provenance.json + +slsa-verifier verify-artifact \ + --provenance-path provenance.json \ + --source-uri github.com/Wikid82/charon \ + ./backend/charon-binary +``` + +#### Inspect the SBOM + +```bash +# Download SBOM from release assets +curl -LO https://github.com/Wikid82/charon/releases/latest/download/sbom.spdx.json + +# Scan for known vulnerabilities +grype sbom:sbom.spdx.json +``` + +### Transparency Log (Rekor) + +All signatures are recorded in the public Sigstore Rekor transparency log: + + +### Digest Pinning Policy + +**Scope (Required):** + +- CI workflows: `.github/workflows/*.yml` +- CI compose files: `.docker/compose/*.yml` +- CI helper actions with container refs: `.github/actions/**/*.yml` + +CI workflows and CI compose files MUST use digest-pinned images for third-party services. +Tag+digest pairs are preferred for human-readable references with immutable resolution. +Self-built images MUST propagate digests to downstream jobs and tests. + +**Local Development Exceptions:** + +Local-only overrides (e.g., `CHARON_E2E_IMAGE`, `CHARON_IMAGE`, `CHARON_DEV_IMAGE`) MAY use tags +for developer iteration. Tag-only overrides MUST NOT be used in CI contexts. + +**Documented Exceptions & Compensating Controls:** + +1. **Go toolchain shim** (`golang.org/dl/goX.Y.Z@latest`) — Uses `@latest` to install the shim; + compensated by the target toolchain version being pinned in `go.work` with Renovate tracking. +2. **Unpinnable dependencies** — Require documented justification; prefer vendor checksums or + signed releases; keep SBOM/vulnerability scans in CI. + +### Learn More + +- [User Guide](docs/guides/supply-chain-security-user-guide.md) +- [Developer Guide](docs/guides/supply-chain-security-developer-guide.md) +- [Sigstore Documentation](https://docs.sigstore.dev/) +- [SLSA Framework](https://slsa.dev/) + +--- + +## Security Audits & Scanning + +### Automated Scanning + +| Tool | Purpose | +|------|---------| +| Trivy | Container image vulnerability scanning | +| CodeQL | Static analysis for Go and JavaScript | +| govulncheck | Go module vulnerability scanning | +| golangci-lint (gosec) | Go code linting | +| npm audit | Frontend dependency scanning | + +### Scanning Workflows + +**Docker Build & Scan** (`.github/workflows/docker-build.yml`) — runs on every commit to `main`, +`development`, and `feature/beta-release`, and on all PRs targeting those branches. Performs Trivy +scanning, generates an SBOM, creates SBOM attestations, and uploads SARIF results to the GitHub +Security tab. + +**Supply Chain Verification** (`.github/workflows/supply-chain-verify.yml`) — triggers +automatically via `workflow_run` after a successful docker-build. Runs SBOM completeness checks, +Grype vulnerability scans, and (on releases) Cosign signature and SLSA provenance validation. + +**Weekly Security Rebuild** (`.github/workflows/security-weekly-rebuild.yml`) — runs every Sunday +at 02:00 UTC. Performs a full no-cache rebuild, scans for all severity levels, and retains JSON +artifacts for 90 days. + +**PR-Specific Scanning** — extracts and scans only the Charon application binary on each pull +request. Fails the PR if CRITICAL or HIGH vulnerabilities are found in application code. + +### Manual Reviews + +- Security code reviews for all major features +- Peer review of security-sensitive changes +- Third-party security audits (planned) + +--- + ## Security Best Practices ### Deployment Recommendations @@ -153,26 +579,25 @@ For complete technical details, see: ### Configuration Hardening ```yaml -# Recommended docker-compose.yml settings services: charon: image: ghcr.io/wikid82/charon:latest restart: unless-stopped environment: - CHARON_ENV=production - - LOG_LEVEL=info # Don't use debug in production + - LOG_LEVEL=info volumes: - ./charon-data:/app/data:rw - - /var/run/docker.sock:/var/run/docker.sock:ro # Read-only! + - /var/run/docker.sock:/var/run/docker.sock:ro networks: - - charon-internal # Isolated network + - charon-internal cap_drop: - ALL cap_add: - - NET_BIND_SERVICE # Only if binding to ports < 1024 + - NET_BIND_SERVICE security_opt: - no-new-privileges:true - read_only: true # If possible + read_only: true tmpfs: - /tmp:noexec,nosuid,nodev ``` @@ -182,9 +607,8 @@ services: Gotify application tokens are secrets and must be handled with strict confidentiality. - Never echo, print, log, or return token values in API responses or errors. -- Never expose tokenized endpoint query strings (for example, - `...?token=...`) in logs, diagnostics, examples, screenshots, - tickets, or reports. +- Never expose tokenized endpoint query strings (e.g., `...?token=...`) in logs, diagnostics, + examples, screenshots, tickets, or reports. - Always redact query parameters in diagnostics and examples before display or storage. - Use write-only token inputs in operator workflows and UI forms. - Store tokens only in environment variables or a dedicated secret manager. @@ -200,322 +624,6 @@ Gotify application tokens are secrets and must be handled with strict confidenti --- -## Supply Chain Security - -Charon implements comprehensive supply chain security measures to ensure the integrity and authenticity of releases. Every release includes cryptographic signatures, SLSA provenance attestation, and Software Bill of Materials (SBOM). - -### Verification Commands - -#### Verify Container Image Signature - -All official Charon images are signed with Sigstore Cosign: - -```bash -# Install cosign (if not already installed) -curl -LO https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-amd64 -sudo mv cosign-linux-amd64 /usr/local/bin/cosign -sudo chmod +x /usr/local/bin/cosign - -# Verify image signature -cosign verify \ - --certificate-identity-regexp='https://github.com/Wikid82/charon' \ - --certificate-oidc-issuer='https://token.actions.githubusercontent.com' \ - ghcr.io/wikid82/charon:latest -``` - -Successful verification output confirms: - -- The image was built by GitHub Actions -- The build came from the official Charon repository -- The image has not been tampered with since signing - -#### Verify SLSA Provenance - -SLSA (Supply-chain Levels for Software Artifacts) provenance provides tamper-proof evidence of how the software was built: - -```bash -# Install slsa-verifier (if not already installed) -curl -LO https://github.com/slsa-framework/slsa-verifier/releases/latest/download/slsa-verifier-linux-amd64 -sudo mv slsa-verifier-linux-amd64 /usr/local/bin/slsa-verifier -sudo chmod +x /usr/local/bin/slsa-verifier - -# Download provenance from release assets -curl -LO https://github.com/Wikid82/charon/releases/latest/download/provenance.json - -# Verify provenance -slsa-verifier verify-artifact \ - --provenance-path provenance.json \ - --source-uri github.com/Wikid82/charon \ - ./backend/charon-binary -``` - -#### Inspect Software Bill of Materials (SBOM) - -Every release includes a comprehensive SBOM in SPDX format: - -```bash -# Download SBOM from release assets -curl -LO https://github.com/Wikid82/charon/releases/latest/download/sbom.spdx.json - -# View SBOM contents -cat sbom.spdx.json | jq . - -# Check for known vulnerabilities (requires Grype) -grype sbom:sbom.spdx.json -``` - -### Transparency Log (Rekor) - -All signatures are recorded in the public Sigstore Rekor transparency log, providing an immutable audit trail: - -- **Search the log**: -- **Query by image**: Search for `ghcr.io/wikid82/charon` -- **View entry details**: Each entry includes commit SHA, workflow run, and signing timestamp - -### Automated Verification in CI/CD - -Integrate supply chain verification into your deployment pipeline: - -```yaml -# Example GitHub Actions workflow -- name: Verify Charon Image - run: | - cosign verify \ - --certificate-identity-regexp='https://github.com/Wikid82/charon' \ - --certificate-oidc-issuer='https://token.actions.githubusercontent.com' \ - ghcr.io/wikid82/charon:${{ env.VERSION }} -``` - -### What's Protected - -- **Container Images**: All `ghcr.io/wikid82/charon:*` images are signed -- **Release Binaries**: Backend binaries include provenance attestation -- **Build Process**: SLSA Level 3 compliant build provenance -- **Dependencies**: Complete SBOM including all direct and transitive dependencies - -### Digest Pinning Policy - -Charon uses digest pinning to reduce supply chain risk and ensure CI runs against immutable artifacts. - -**Scope (Required):** - -- **CI workflows**: `.github/workflows/*.yml`, `.github/workflows/*.yaml` -- **CI compose files**: `.docker/compose/*.yml`, `.docker/compose/*.yaml`, `.docker/compose/docker-compose*.yml`, `.docker/compose/docker-compose*.yaml` -- **CI helper actions with container refs**: `.github/actions/**/*.yml`, `.github/actions/**/*.yaml` -- CI workflows and CI compose files MUST use digest-pinned images for third-party services. -- Tag+digest pairs are preferred for human-readable references with immutable resolution. -- Self-built images MUST propagate digests to downstream jobs and tests. - -**Rationale:** - -- Prevent tag drift and supply chain substitution in automated runs. -- Ensure deterministic builds, reproducible scans, and stable SBOM generation. -- Reduce rollback risk by guaranteeing CI uses immutable artifacts. - -**Local Development Exceptions:** - -- Local-only overrides (e.g., `CHARON_E2E_IMAGE`, `CHARON_IMAGE`, `CHARON_DEV_IMAGE`) MAY use tags for developer iteration. -- Tag-only overrides MUST NOT be used in CI contexts. - -**Documented Exceptions & Compensating Controls:** - -1. **Go toolchain shim** (`golang.org/dl/goX.Y.Z@latest`) - - **Exception:** Uses `@latest` to install the shim. - - **Compensating controls:** The target toolchain version is pinned in - `go.work`, and Renovate tracks the required version for updates. - -2. **Unpinnable dependencies** (no stable digest or checksum source) - - **Exception:** Dependency cannot be pinned by digest. - - **Compensating controls:** Require documented justification, prefer - vendor-provided checksums or signed releases when available, and keep - SBOM/vulnerability scans in CI. - -### Learn More - -- **[User Guide](docs/guides/supply-chain-security-user-guide.md)**: Step-by-step verification instructions -- **[Developer Guide](docs/guides/supply-chain-security-developer-guide.md)**: Integration into development workflow -- **[Sigstore Documentation](https://docs.sigstore.dev/)**: Technical details on signing and verification -- **[SLSA Framework](https://slsa.dev/)**: Supply chain security framework overview - ---- - -## Security Audits & Scanning - -### Automated Scanning - -We use the following tools: - -- **Trivy**: Container image vulnerability scanning -- **CodeQL**: Static code analysis for Go and JavaScript -- **govulncheck**: Go module vulnerability scanning -- **golangci-lint**: Go code linting (including gosec) -- **npm audit**: Frontend dependency vulnerability scanning - -### Security Scanning Workflows - -Charon implements multiple layers of automated security scanning: - -#### Docker Build & Scan (Per-Commit) - -**Workflow**: `.github/workflows/docker-build.yml` - -- Runs on every commit to `main`, `development`, and `feature/beta-release` branches -- Runs on all pull requests targeting these branches -- Performs Trivy vulnerability scanning on built images -- Generates SBOM (Software Bill of Materials) for supply chain transparency -- Creates SBOM attestations for verifiable build provenance -- Verifies Caddy security patches (CVE-2025-68156) -- Uploads SARIF results to GitHub Security tab - -**Note**: This workflow replaced the previous `docker-publish.yml` (deleted Dec 21, 2025) with enhanced security features. - -#### Supply Chain Verification - -**Workflow**: `.github/workflows/supply-chain-verify.yml` - -**Trigger Timing**: Runs automatically after `docker-build.yml` completes successfully via `workflow_run` trigger. - -**Branch Coverage**: Triggers on **ALL branches** where docker-build completes, including: - -- `main` (default branch) -- `development` -- `feature/*` branches (including `feature/beta-release`) -- Pull request branches - -**Why No Branch Filter**: GitHub Actions has a platform limitation where `branches` filters in `workflow_run` triggers only match the default branch. To ensure comprehensive supply chain verification across all branches and PRs, we intentionally omit the branch filter. The workflow file must exist on the branch to execute, preventing untrusted code execution. - -**Verification Steps**: - -1. SBOM completeness verification -2. Vulnerability scanning with Grype -3. Results uploaded as workflow artifacts -4. PR comments with vulnerability summary (when applicable) -5. For releases: Cosign signature verification and SLSA provenance validation - -**Additional Triggers**: - -- Runs on all published releases -- Scheduled weekly on Mondays at 00:00 UTC -- Can be triggered manually via `workflow_dispatch` - -#### Weekly Security Rebuild - -**Workflow**: `.github/workflows/security-weekly-rebuild.yml` - -- Runs every Sunday at 02:00 UTC -- Performs full rebuild with no cache to ensure latest base images -- Scans with Trivy for CRITICAL, HIGH, MEDIUM, and LOW vulnerabilities -- Uploads results to GitHub Security tab -- Stores JSON artifacts for 90-day retention -- Checks Alpine package versions for security updates - -#### PR-Specific Scanning - -**Workflow**: `.github/workflows/docker-build.yml` (trivy-pr-app-only job) - -- Runs on all pull requests -- Extracts and scans only the Charon application binary -- Fails PR if CRITICAL or HIGH vulnerabilities found in application code -- Faster feedback loop for developers during code review - -### Workflow Orchestration - -The security scanning workflows use a coordinated orchestration pattern: - -1. **Build Phase**: `docker-build.yml` builds the image and performs initial Trivy scan -2. **Verification Phase**: `supply-chain-verify.yml` triggers automatically via `workflow_run` after successful build -3. **Verification Timing**: - - On feature branches: Runs after docker-build completes on push events - - On pull requests: Runs after docker-build completes on PR synchronize events - - No delay or gaps: verification starts immediately after build success -4. **Weekly Maintenance**: `security-weekly-rebuild.yml` provides ongoing monitoring - -This pattern ensures: - -- Images are built before verification attempts to scan them -- No race conditions between build and verification -- Comprehensive coverage across all branches and PRs -- Efficient resource usage (verification only runs after successful builds) - -### Manual Reviews - -- Security code reviews for all major features -- Peer review of security-sensitive changes -- Third-party security audits (planned) - -### Continuous Monitoring - -- GitHub Dependabot alerts -- Weekly security scans in CI/CD -- Community vulnerability reports -- Automated supply chain verification on every build - ---- - -## Recently Resolved Vulnerabilities - -Charon maintains transparency about security issues and their resolution. Below is a comprehensive record of recently patched vulnerabilities. - -### CVE-2025-68156 (expr-lang/expr ReDoS) - -- **Severity**: HIGH (CVSS 7.5) -- **Component**: expr-lang/expr (used by CrowdSec for expression evaluation) -- **Vulnerability**: Regular Expression Denial of Service (ReDoS) -- **Description**: Malicious regular expressions in CrowdSec scenarios or parsers could cause CPU exhaustion and service degradation through exponential backtracking in vulnerable regex patterns. -- **Fixed Version**: expr-lang/expr v1.17.7 -- **Resolution Date**: January 11, 2026 -- **Remediation**: Upgraded CrowdSec to build from source with patched expr-lang/expr v1.17.7 -- **Verification**: - - Binary inspection: `go version -m ./cscli` confirms v1.17.7 in compiled artifacts - - Container scan: Trivy reports 0 HIGH/CRITICAL vulnerabilities in application code - - Runtime testing: CrowdSec scenarios and parsers load successfully with patched library -- **Impact**: No known exploits in Charon deployments; preventive upgrade completed -- **Status**: ✅ **PATCHED** — Verified in all release artifacts -- **Technical Details**: See [CrowdSec Source Build Documentation](docs/plans/crowdsec_source_build.md) - ---- - -## Known Security Considerations - -### Debian Base Image CVEs (2026-02-04) — TEMPORARY - -**Status**: ⚠️ 7 HIGH severity CVEs in Debian Trixie base image. **Alpine migration in progress.** - -**Background**: Migrated from Alpine → Debian due to CVE-2025-60876 (busybox heap overflow). Debian now has worse CVE posture with no fixes available. Reverting to Alpine as Alpine CVE-2025-60876 is now patched. - -**Affected Packages**: -- **libc6/libc-bin** (glibc): CVE-2026-0861 (CVSS 8.4), CVE-2025-15281, CVE-2026-0915 -- **libtasn1-6**: CVE-2025-13151 (CVSS 7.5) -- **libtiff**: 2 additional HIGH CVEs - -**Fix Status**: ❌ No fixes available from Debian Security Team - -**Risk Assessment**: 🟢 **LOW actual risk** -- CVEs affect system libraries, NOT Charon application code -- Container isolation limits exploit surface area -- No direct exploit paths identified in Charon's usage patterns -- Network ingress filtered through Caddy proxy - -**Mitigation**: Alpine base image migration -- **Spec**: [`docs/plans/alpine_migration_spec.md`](docs/plans/alpine_migration_spec.md) -- **Security Advisory**: [`docs/security/advisory_2026-02-04_debian_cves_temporary.md`](docs/security/advisory_2026-02-04_debian_cves_temporary.md) -- **Timeline**: 2-3 weeks (target completion: March 5, 2026) -- **Expected Outcome**: 100% CVE reduction (7 HIGH → 0) - -**Review Date**: 2026-02-11 (Phase 1 Alpine CVE verification) - -**Details**: See [VULNERABILITY_ACCEPTANCE.md](docs/security/VULNERABILITY_ACCEPTANCE.md) for complete risk assessment and monitoring plan. - -### Third-Party Dependencies - -**CrowdSec Binaries**: As of December 2025, CrowdSec binaries shipped with Charon contain 4 HIGH-severity CVEs in Go stdlib (CVE-2025-58183, CVE-2025-58186, CVE-2025-58187, CVE-2025-61729). These are upstream issues in Go 1.25.1 and will be resolved when CrowdSec releases binaries built with go 1.26.0+. - -**Impact**: Low. These vulnerabilities are in CrowdSec's third-party binaries, not in Charon's application code. They affect HTTP/2, TLS certificate handling, and archive parsing—areas not directly exposed to attackers through Charon's interface. - -**Mitigation**: Monitor CrowdSec releases for updated binaries. Charon's own application code has zero vulnerabilities. - ---- - ## Security Hall of Fame We recognize security researchers who help improve Charon: @@ -525,19 +633,4 @@ We recognize security researchers who help improve Charon: --- -## Security Contact - -- **GitHub Security Advisories**: -- **GitHub Discussions**: -- **GitHub Issues** (non-security): - ---- - -## License - -This security policy is part of the Charon project, licensed under the MIT License. - ---- - -**Last Updated**: January 30, 2026 -**Version**: 1.2 +**Last Updated**: 2026-03-24 diff --git a/VERSION.md b/VERSION.md index 90129050..311c0601 100644 --- a/VERSION.md +++ b/VERSION.md @@ -24,8 +24,10 @@ Example: `0.1.0-alpha`, `1.0.0-beta.1`, `2.0.0-rc.2` 1. **Create and push a release tag**: ```bash + git tag -a v1.0.0 -m "Release v1.0.0" git push origin v1.0.0 + ``` 2. **GitHub Actions automatically**: @@ -51,10 +53,12 @@ Use it only when you need local/version-file parity checks: echo "1.0.0" > .version ``` -2. **Validate `.version` matches the latest tag**: +1. **Validate `.version` matches the latest tag**: ```bash + bash scripts/check-version-match-tag.sh + ``` ### Deterministic Rollout Verification Gates (Mandatory) diff --git a/backend/go.mod b/backend/go.mod index be19ceb1..44a2e22b 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -10,14 +10,14 @@ require ( github.com/golang-jwt/jwt/v5 v5.3.1 github.com/google/uuid v1.6.0 github.com/gorilla/websocket v1.5.3 - github.com/mattn/go-sqlite3 v1.14.34 + github.com/mattn/go-sqlite3 v1.14.37 github.com/oschwald/geoip2-golang/v2 v2.1.0 github.com/prometheus/client_golang v1.23.2 github.com/robfig/cron/v3 v3.0.1 github.com/sirupsen/logrus v1.9.4 github.com/stretchr/testify v1.11.1 - golang.org/x/crypto v0.48.0 - golang.org/x/net v0.51.0 + golang.org/x/crypto v0.49.0 + golang.org/x/net v0.52.0 golang.org/x/text v0.35.0 golang.org/x/time v0.15.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 @@ -28,7 +28,7 @@ require ( require ( github.com/Microsoft/go-winio v0.6.2 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/bytedance/gopkg v0.1.3 // indirect + github.com/bytedance/gopkg v0.1.4 // indirect github.com/bytedance/sonic v1.15.0 // indirect github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -50,7 +50,7 @@ require ( github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.30.1 // indirect - github.com/goccy/go-json v0.10.5 // indirect + github.com/goccy/go-json v0.10.6 // indirect github.com/goccy/go-yaml v1.19.2 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect @@ -64,13 +64,13 @@ require ( github.com/moby/term v0.5.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/morikuni/aec v1.0.0 // indirect + github.com/morikuni/aec v1.1.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/ncruces/go-strftime v1.0.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect github.com/oschwald/maxminddb-golang/v2 v2.1.1 // indirect - github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/pelletier/go-toml/v2 v2.3.0 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/client_model v0.6.2 // indirect @@ -79,24 +79,25 @@ require ( github.com/quic-go/qpack v0.6.0 // indirect github.com/quic-go/quic-go v0.59.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect - github.com/stretchr/objx v0.5.2 // indirect + github.com/stretchr/objx v0.5.3 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.3.1 // indirect go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 // indirect go.opentelemetry.io/otel v1.42.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 // indirect go.opentelemetry.io/otel/metric v1.42.0 // indirect go.opentelemetry.io/otel/trace v1.42.0 // indirect go.yaml.in/yaml/v2 v2.4.4 // indirect golang.org/x/arch v0.25.0 // indirect golang.org/x/sys v0.42.0 // indirect + google.golang.org/grpc v1.79.3 // indirect google.golang.org/protobuf v1.36.11 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/v3 v3.5.2 // indirect modernc.org/libc v1.70.0 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect - modernc.org/sqlite v1.46.1 // indirect + modernc.org/sqlite v1.47.0 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 268f570d..5c30b306 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -4,8 +4,8 @@ github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERo github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= -github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= +github.com/bytedance/gopkg v0.1.4 h1:oZnQwnX82KAIWb7033bEwtxvTqXcYMxDBaQxo5JJHWM= +github.com/bytedance/gopkg v0.1.4/go.mod h1:v1zWfPm21Fb+OsyXN2VAHdL6TBb2L88anLQgdyje6R4= github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= @@ -62,8 +62,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= -github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= -github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-json v0.10.6 h1:p8HrPJzOakx/mn/bQtjgNjdTcN+/S6FcG2CTtQOrHVU= +github.com/goccy/go-json v0.10.6/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM= github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY= @@ -77,8 +77,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= @@ -101,8 +101,8 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.34 h1:3NtcvcUnFBPsuRcno8pUtupspG/GM+9nZ88zgJcp6Zk= -github.com/mattn/go-sqlite3 v1.14.34/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.37 h1:3DOZp4cXis1cUIpCfXLtmlGolNLp2VEqhiB/PARNBIg= +github.com/mattn/go-sqlite3 v1.14.37/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= @@ -116,8 +116,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/morikuni/aec v1.1.0 h1:vBBl0pUnvi/Je71dsRrhMBtreIqNMYErSAbEeb8jrXQ= +github.com/morikuni/aec v1.1.0/go.mod h1:xDRgiq/iw5l+zkao76YTKzKttOp2cwPEne25HDkJnBw= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w= @@ -130,8 +130,8 @@ github.com/oschwald/geoip2-golang/v2 v2.1.0 h1:DjnLhNJu9WHwTrmoiQFvgmyJoczhdnm7L github.com/oschwald/geoip2-golang/v2 v2.1.0/go.mod h1:qdVmcPgrTJ4q2eP9tHq/yldMTdp2VMr33uVdFbHBiBc= github.com/oschwald/maxminddb-golang/v2 v2.1.1 h1:lA8FH0oOrM4u7mLvowq8IT6a3Q/qEnqRzLQn9eH5ojc= github.com/oschwald/maxminddb-golang/v2 v2.1.1/go.mod h1:PLdx6PR+siSIoXqqy7C7r3SB3KZnhxWr1Dp6g0Hacl8= -github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= -github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pelletier/go-toml/v2 v2.3.0 h1:k59bC/lIZREW0/iVaQR8nDHxVq8OVlIzYCOJf421CaM= +github.com/pelletier/go-toml/v2 v2.3.0/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -159,8 +159,9 @@ github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC4 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/objx v0.5.3 h1:jmXUvGomnU1o3W/V5h2VEradbpJDwGrzugQQvL0POH4= +github.com/stretchr/objx v0.5.3/go.mod h1:rDQraq+vQZU7Fde9LOZLr8Tax6zZvy4kuNKF+QYS+U0= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= @@ -180,10 +181,10 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:Oyrsyzu go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg= go.opentelemetry.io/otel v1.42.0 h1:lSQGzTgVR3+sgJDAU/7/ZMjN9Z+vUip7leaqBKy4sho= go.opentelemetry.io/otel v1.42.0/go.mod h1:lJNsdRMxCUIWuMlVJWzecSMuNjE7dOYyWlqOXWkdqCc= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0 h1:THuZiwpQZuHPul65w4WcwEnkX2QIuMT+UFoOrygtoJw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0/go.mod h1:J2pvYM5NGHofZ2/Ru6zw/TNWnEQp5crgyDeSrYpXkAw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 h1:uLXP+3mghfMf7XmV4PkGfFhFKuNWoCvvx5wP/wOXo0o= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0/go.mod h1:v0Tj04armyT59mnURNUJf7RCKcKzq+lgJs6QSjHjaTc= go.opentelemetry.io/otel/metric v1.42.0 h1:2jXG+3oZLNXEPfNmnpxKDeZsFI5o4J+nz6xUlaFdF/4= go.opentelemetry.io/otel/metric v1.42.0/go.mod h1:RlUN/7vTU7Ao/diDkEpQpnz3/92J9ko05BIwxYa2SSI= go.opentelemetry.io/otel/sdk v1.42.0 h1:LyC8+jqk6UJwdrI/8VydAq/hvkFKNHZVIWuslJXYsDo= @@ -192,8 +193,8 @@ go.opentelemetry.io/otel/sdk/metric v1.42.0 h1:D/1QR46Clz6ajyZ3G8SgNlTJKBdGp84q9 go.opentelemetry.io/otel/sdk/metric v1.42.0/go.mod h1:Ua6AAlDKdZ7tdvaQKfSmnFTdHx37+J4ba8MwVCYM5hc= go.opentelemetry.io/otel/trace v1.42.0 h1:OUCgIPt+mzOnaUTpOQcBiM/PLQ/Op7oq6g4LenLmOYY= go.opentelemetry.io/otel/trace v1.42.0/go.mod h1:f3K9S+IFqnumBkKhRJMeaZeNk9epyhnCmQh/EysQCdc= -go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4= -go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE= +go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A= +go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= @@ -202,12 +203,12 @@ go.yaml.in/yaml/v2 v2.4.4 h1:tuyd0P+2Ont/d6e2rl3be67goVK4R6deVxCUX5vyPaQ= go.yaml.in/yaml/v2 v2.4.4/go.mod h1:gMZqIpDtDqOfM0uNfy0SkpRhvUryYH0Z6wdMYcacYXQ= golang.org/x/arch v0.25.0 h1:qnk6Ksugpi5Bz32947rkUgDt9/s5qvqDPl/gBKdMJLE= golang.org/x/arch v0.25.0/go.mod h1:0X+GdSIP+kL5wPmpK7sdkEVTt2XoYP0cSjQSbZBwOi8= -golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= -golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= +golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= -golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo= -golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y= +golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0= +golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw= golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4= golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -219,12 +220,12 @@ golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U= golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno= golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= -google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY= -google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc= -google.golang.org/grpc v1.75.0 h1:+TW+dqTd2Biwe6KKfhE5JpiYIBWq865PhKGSXiivqt4= -google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 h1:JLQynH/LBHfCTSbDWl+py8C+Rg/k1OVH3xfcaiANuF0= +google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:kSJwQxqmFXeo79zOmbrALdflXQeAYcUbgS7PbpMknCY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/grpc v1.79.3 h1:sybAEdRIEtvcD68Gx7dmnwjZKlyfuc61Dyo9pGXXkKE= +google.golang.org/grpc v1.79.3/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -263,8 +264,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= -modernc.org/sqlite v1.46.1 h1:eFJ2ShBLIEnUWlLy12raN0Z1plqmFX9Qe3rjQTKt6sU= -modernc.org/sqlite v1.46.1/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA= +modernc.org/sqlite v1.47.0 h1:R1XyaNpoW4Et9yly+I2EeX7pBza/w+pmYee/0HJDyKk= +modernc.org/sqlite v1.47.0/go.mod h1:hWjRO6Tj/5Ik8ieqxQybiEOUXy0NJFNp2tpvVpKlvig= modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A= modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= diff --git a/backend/internal/api/handlers/auth_handler.go b/backend/internal/api/handlers/auth_handler.go index ce4c5572..9eeb4847 100644 --- a/backend/internal/api/handlers/auth_handler.go +++ b/backend/internal/api/handlers/auth_handler.go @@ -126,19 +126,16 @@ func isLocalRequest(c *gin.Context) bool { } // setSecureCookie sets an auth cookie with security best practices -// - HttpOnly: prevents JavaScript access (XSS protection) -// - Secure: true for HTTPS; false for local/private network HTTP requests -// - SameSite: Lax for any local/private-network request (regardless of scheme), -// Strict otherwise (public HTTPS only) +// - HttpOnly: prevents JavaScript access (XSS protection) +// - Secure: always true (all major browsers honour Secure on localhost HTTP; +// HTTP-on-private-IP without TLS is an unsupported deployment) +// - SameSite: Lax for any local/private-network request (regardless of scheme), +// Strict otherwise (public HTTPS only) func setSecureCookie(c *gin.Context, name, value string, maxAge int) { scheme := requestScheme(c) - secure := true sameSite := http.SameSiteStrictMode if scheme != "https" { sameSite = http.SameSiteLaxMode - if isLocalRequest(c) { - secure = false - } } if isLocalRequest(c) { @@ -149,14 +146,13 @@ func setSecureCookie(c *gin.Context, name, value string, maxAge int) { domain := "" c.SetSameSite(sameSite) - // secure is intentionally false for local/private network HTTP requests; always true for external or HTTPS requests. - c.SetCookie( // codeql[go/cookie-secure-not-set] + c.SetCookie( name, // name value, // value maxAge, // maxAge in seconds "/", // path domain, // domain (empty = current host) - secure, // secure + true, // secure true, // httpOnly (no JS access) ) } diff --git a/backend/internal/api/handlers/auth_handler_test.go b/backend/internal/api/handlers/auth_handler_test.go index cf308bd9..9e945e75 100644 --- a/backend/internal/api/handlers/auth_handler_test.go +++ b/backend/internal/api/handlers/auth_handler_test.go @@ -112,7 +112,7 @@ func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) { cookies := recorder.Result().Cookies() require.Len(t, cookies, 1) cookie := cookies[0] - assert.False(t, cookie.Secure) + assert.True(t, cookie.Secure) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) } @@ -216,7 +216,7 @@ func TestSetSecureCookie_HTTP_PrivateIP_Insecure(t *testing.T) { cookies := recorder.Result().Cookies() require.Len(t, cookies, 1) cookie := cookies[0] - assert.False(t, cookie.Secure) + assert.True(t, cookie.Secure) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) } @@ -234,7 +234,7 @@ func TestSetSecureCookie_HTTP_10Network_Insecure(t *testing.T) { cookies := recorder.Result().Cookies() require.Len(t, cookies, 1) cookie := cookies[0] - assert.False(t, cookie.Secure) + assert.True(t, cookie.Secure) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) } @@ -252,7 +252,7 @@ func TestSetSecureCookie_HTTP_172Network_Insecure(t *testing.T) { cookies := recorder.Result().Cookies() require.Len(t, cookies, 1) cookie := cookies[0] - assert.False(t, cookie.Secure) + assert.True(t, cookie.Secure) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) } @@ -288,7 +288,7 @@ func TestSetSecureCookie_HTTP_IPv6ULA_Insecure(t *testing.T) { cookies := recorder.Result().Cookies() require.Len(t, cookies, 1) cookie := cookies[0] - assert.False(t, cookie.Secure) + assert.True(t, cookie.Secure) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) } @@ -439,6 +439,7 @@ func TestClearSecureCookie(t *testing.T) { require.Len(t, cookies, 1) assert.Equal(t, "auth_token", cookies[0].Name) assert.Equal(t, -1, cookies[0].MaxAge) + assert.True(t, cookies[0].Secure) } func TestAuthHandler_Login_Errors(t *testing.T) { diff --git a/backend/internal/api/handlers/certificate_handler_test.go b/backend/internal/api/handlers/certificate_handler_test.go index 4fad16d2..bb10ac01 100644 --- a/backend/internal/api/handlers/certificate_handler_test.go +++ b/backend/internal/api/handlers/certificate_handler_test.go @@ -699,6 +699,124 @@ func TestDeleteCertificate_DiskSpaceCheckError(t *testing.T) { } } +// Test that an expired Let's Encrypt certificate not in use can be deleted. +// The backend has no provider-based restrictions; deletion policy is frontend-only. +func TestDeleteCertificate_ExpiredLetsEncrypt_NotInUse(t *testing.T) { + dbPath := t.TempDir() + "/cert_expired_le.db" + db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?_journal_mode=WAL&_busy_timeout=5000&_foreign_keys=1", dbPath)), &gorm.Config{}) + if err != nil { + t.Fatalf("failed to open db: %v", err) + } + sqlDB, err := db.DB() + if err != nil { + t.Fatalf("failed to access sql db: %v", err) + } + sqlDB.SetMaxOpenConns(1) + sqlDB.SetMaxIdleConns(1) + + if err = db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}); err != nil { + t.Fatalf("failed to migrate: %v", err) + } + + expired := time.Now().Add(-24 * time.Hour) + cert := models.SSLCertificate{ + UUID: "expired-le-cert", + Name: "expired-le", + Provider: "letsencrypt", + Domains: "expired.example.com", + ExpiresAt: &expired, + } + if err = db.Create(&cert).Error; err != nil { + t.Fatalf("failed to create cert: %v", err) + } + + gin.SetMode(gin.TestMode) + r := gin.New() + r.Use(mockAuthMiddleware()) + svc := services.NewCertificateService("/tmp", db) + + mockBS := &mockBackupService{ + createFunc: func() (string, error) { + return "backup-expired-le.tar.gz", nil + }, + } + + h := NewCertificateHandler(svc, mockBS, nil) + r.DELETE("/api/certificates/:id", h.Delete) + + req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + if w.Code != http.StatusOK { + t.Fatalf("expected 200 OK, got %d, body=%s", w.Code, w.Body.String()) + } + + var found models.SSLCertificate + if err = db.First(&found, cert.ID).Error; err == nil { + t.Fatal("expected expired LE certificate to be deleted") + } +} + +// Test that a valid (non-expired) Let's Encrypt certificate not in use can be deleted. +// Confirms the backend imposes no provider-based restrictions on deletion. +func TestDeleteCertificate_ValidLetsEncrypt_NotInUse(t *testing.T) { + dbPath := t.TempDir() + "/cert_valid_le.db" + db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?_journal_mode=WAL&_busy_timeout=5000&_foreign_keys=1", dbPath)), &gorm.Config{}) + if err != nil { + t.Fatalf("failed to open db: %v", err) + } + sqlDB, err := db.DB() + if err != nil { + t.Fatalf("failed to access sql db: %v", err) + } + sqlDB.SetMaxOpenConns(1) + sqlDB.SetMaxIdleConns(1) + + if err = db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}); err != nil { + t.Fatalf("failed to migrate: %v", err) + } + + future := time.Now().Add(30 * 24 * time.Hour) + cert := models.SSLCertificate{ + UUID: "valid-le-cert", + Name: "valid-le", + Provider: "letsencrypt", + Domains: "valid.example.com", + ExpiresAt: &future, + } + if err = db.Create(&cert).Error; err != nil { + t.Fatalf("failed to create cert: %v", err) + } + + gin.SetMode(gin.TestMode) + r := gin.New() + r.Use(mockAuthMiddleware()) + svc := services.NewCertificateService("/tmp", db) + + mockBS := &mockBackupService{ + createFunc: func() (string, error) { + return "backup-valid-le.tar.gz", nil + }, + } + + h := NewCertificateHandler(svc, mockBS, nil) + r.DELETE("/api/certificates/:id", h.Delete) + + req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + if w.Code != http.StatusOK { + t.Fatalf("expected 200 OK, got %d, body=%s", w.Code, w.Body.String()) + } + + var found models.SSLCertificate + if err = db.First(&found, cert.ID).Error; err == nil { + t.Fatal("expected valid LE certificate to be deleted") + } +} + // Test Delete when IsCertificateInUse fails func TestDeleteCertificate_UsageCheckError(t *testing.T) { db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{}) diff --git a/backend/internal/api/handlers/notification_coverage_test.go b/backend/internal/api/handlers/notification_coverage_test.go index 4b56cb9e..7ddc0c28 100644 --- a/backend/internal/api/handlers/notification_coverage_test.go +++ b/backend/internal/api/handlers/notification_coverage_test.go @@ -474,6 +474,61 @@ func TestClassifyProviderTestFailure_TLSHandshakeFailed(t *testing.T) { assert.Contains(t, message, "TLS handshake failed") } +func TestClassifyProviderTestFailure_SlackInvalidPayload(t *testing.T) { + code, category, message := classifyProviderTestFailure(errors.New("invalid_payload")) + + assert.Equal(t, "PROVIDER_TEST_VALIDATION_FAILED", code) + assert.Equal(t, "validation", category) + assert.Contains(t, message, "Slack rejected the payload") +} + +func TestClassifyProviderTestFailure_SlackMissingTextOrFallback(t *testing.T) { + code, category, message := classifyProviderTestFailure(errors.New("missing_text_or_fallback")) + + assert.Equal(t, "PROVIDER_TEST_VALIDATION_FAILED", code) + assert.Equal(t, "validation", category) + assert.Contains(t, message, "Slack rejected the payload") +} + +func TestClassifyProviderTestFailure_SlackNoService(t *testing.T) { + code, category, message := classifyProviderTestFailure(errors.New("no_service")) + + assert.Equal(t, "PROVIDER_TEST_AUTH_REJECTED", code) + assert.Equal(t, "dispatch", category) + assert.Contains(t, message, "Slack webhook is revoked") +} + +func TestNotificationProviderHandler_Test_RejectsSlackTokenInTestRequest(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupNotificationCoverageDB(t) + svc := services.NewNotificationService(db, nil) + h := NewNotificationProviderHandler(svc) + + payload := map[string]any{ + "type": "slack", + "url": "#alerts", + "token": "https://hooks.slack.com/services/T00/B00/secret", + } + body, _ := json.Marshal(payload) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + setAdminContext(c) + c.Set(string(trace.RequestIDKey), "req-slack-token-reject") + c.Request = httptest.NewRequest(http.MethodPost, "/providers/test", bytes.NewBuffer(body)) + c.Request.Header.Set("Content-Type", "application/json") + + h.Test(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + var resp map[string]any + require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp)) + assert.Equal(t, "TOKEN_WRITE_ONLY", resp["code"]) + assert.Equal(t, "validation", resp["category"]) + assert.Equal(t, "Slack webhook URL is accepted only on provider create/update", resp["error"]) + assert.NotContains(t, w.Body.String(), "hooks.slack.com") +} + func TestNotificationProviderHandler_Templates(t *testing.T) { gin.SetMode(gin.TestMode) db := setupNotificationCoverageDB(t) @@ -948,14 +1003,14 @@ func TestNotificationProviderHandler_Update_UnsupportedType(t *testing.T) { existing := models.NotificationProvider{ ID: "unsupported-type", Name: "Custom Provider", - Type: "slack", - URL: "https://hooks.slack.com/test", + Type: "sms", + URL: "https://sms.example.com/test", } require.NoError(t, db.Create(&existing).Error) payload := map[string]any{ - "name": "Updated Slack Provider", - "url": "https://hooks.slack.com/updated", + "name": "Updated SMS Provider", + "url": "https://sms.example.com/updated", } body, _ := json.Marshal(payload) diff --git a/backend/internal/api/handlers/notification_provider_blocker3_test.go b/backend/internal/api/handlers/notification_provider_blocker3_test.go index 3d71d38e..5cd6338e 100644 --- a/backend/internal/api/handlers/notification_provider_blocker3_test.go +++ b/backend/internal/api/handlers/notification_provider_blocker3_test.go @@ -28,19 +28,22 @@ func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T assert.NoError(t, err) // Create handler - service := services.NewNotificationService(db, nil) + service := services.NewNotificationService(db, nil, + services.WithSlackURLValidator(func(string) error { return nil }), + ) handler := NewNotificationProviderHandler(service) // Test cases: provider types with security events enabled testCases := []struct { name string providerType string + token string wantStatus int }{ - {"webhook", "webhook", http.StatusCreated}, - {"gotify", "gotify", http.StatusCreated}, - {"slack", "slack", http.StatusBadRequest}, - {"email", "email", http.StatusCreated}, + {"webhook", "webhook", "", http.StatusCreated}, + {"gotify", "gotify", "", http.StatusCreated}, + {"slack", "slack", "https://hooks.slack.com/services/T1234567890/B1234567890/XXXXXXXXXXXXXXXXXXXX", http.StatusCreated}, + {"email", "email", "", http.StatusCreated}, } for _, tc := range testCases { @@ -50,6 +53,7 @@ func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T "name": "Test Provider", "type": tc.providerType, "url": "https://example.com/webhook", + "token": tc.token, "enabled": true, "notify_security_waf_blocks": true, // Security event enabled } diff --git a/backend/internal/api/handlers/notification_provider_discord_only_test.go b/backend/internal/api/handlers/notification_provider_discord_only_test.go index f9f67d62..0a91d9f3 100644 --- a/backend/internal/api/handlers/notification_provider_discord_only_test.go +++ b/backend/internal/api/handlers/notification_provider_discord_only_test.go @@ -24,21 +24,24 @@ func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) { require.NoError(t, err) require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})) - service := services.NewNotificationService(db, nil) + service := services.NewNotificationService(db, nil, + services.WithSlackURLValidator(func(string) error { return nil }), + ) handler := NewNotificationProviderHandler(service) testCases := []struct { name string providerType string + token string wantStatus int wantCode string }{ - {"webhook", "webhook", http.StatusCreated, ""}, - {"gotify", "gotify", http.StatusCreated, ""}, - {"slack", "slack", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"}, - {"telegram", "telegram", http.StatusCreated, ""}, - {"generic", "generic", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"}, - {"email", "email", http.StatusCreated, ""}, + {"webhook", "webhook", "", http.StatusCreated, ""}, + {"gotify", "gotify", "", http.StatusCreated, ""}, + {"slack", "slack", "https://hooks.slack.com/services/T1234567890/B1234567890/XXXXXXXXXXXXXXXXXXXX", http.StatusCreated, ""}, + {"telegram", "telegram", "", http.StatusCreated, ""}, + {"generic", "generic", "", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"}, + {"email", "email", "", http.StatusCreated, ""}, } for _, tc := range testCases { @@ -47,6 +50,7 @@ func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) { "name": "Test Provider", "type": tc.providerType, "url": "https://example.com/webhook", + "token": tc.token, "enabled": true, "notify_proxy_hosts": true, } @@ -363,7 +367,7 @@ func TestDiscordOnly_ErrorCodes(t *testing.T) { requestFunc: func(id string) (*http.Request, gin.Params) { payload := map[string]interface{}{ "name": "Test", - "type": "slack", + "type": "sms", "url": "https://example.com", } body, _ := json.Marshal(payload) diff --git a/backend/internal/api/handlers/notification_provider_handler.go b/backend/internal/api/handlers/notification_provider_handler.go index e45f5b8f..8b16cf2f 100644 --- a/backend/internal/api/handlers/notification_provider_handler.go +++ b/backend/internal/api/handlers/notification_provider_handler.go @@ -136,6 +136,16 @@ func classifyProviderTestFailure(err error) (code string, category string, messa return "PROVIDER_TEST_UNREACHABLE", "dispatch", "Could not reach provider endpoint. Verify URL, DNS, and network connectivity" } + if strings.Contains(errText, "invalid_payload") || + strings.Contains(errText, "missing_text_or_fallback") { + return "PROVIDER_TEST_VALIDATION_FAILED", "validation", + "Slack rejected the payload. Ensure your template includes a 'text' or 'blocks' field" + } + if strings.Contains(errText, "no_service") { + return "PROVIDER_TEST_AUTH_REJECTED", "dispatch", + "Slack webhook is revoked or the app is disabled. Create a new webhook" + } + return "PROVIDER_TEST_FAILED", "dispatch", "Provider test failed" } @@ -172,7 +182,7 @@ func (h *NotificationProviderHandler) Create(c *gin.Context) { } providerType := strings.ToLower(strings.TrimSpace(req.Type)) - if providerType != "discord" && providerType != "gotify" && providerType != "webhook" && providerType != "email" && providerType != "telegram" { + if providerType != "discord" && providerType != "gotify" && providerType != "webhook" && providerType != "email" && providerType != "telegram" && providerType != "slack" && providerType != "pushover" && providerType != "ntfy" { respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type") return } @@ -232,12 +242,12 @@ func (h *NotificationProviderHandler) Update(c *gin.Context) { } providerType := strings.ToLower(strings.TrimSpace(existing.Type)) - if providerType != "discord" && providerType != "gotify" && providerType != "webhook" && providerType != "email" && providerType != "telegram" { + if providerType != "discord" && providerType != "gotify" && providerType != "webhook" && providerType != "email" && providerType != "telegram" && providerType != "slack" && providerType != "pushover" && providerType != "ntfy" { respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type") return } - if (providerType == "gotify" || providerType == "telegram") && strings.TrimSpace(req.Token) == "" { + if (providerType == "gotify" || providerType == "telegram" || providerType == "slack" || providerType == "pushover" || providerType == "ntfy") && strings.TrimSpace(req.Token) == "" { // Keep existing token if update payload omits token req.Token = existing.Token } @@ -278,7 +288,8 @@ func isProviderValidationError(err error) bool { strings.Contains(errMsg, "rendered template") || strings.Contains(errMsg, "failed to parse template") || strings.Contains(errMsg, "failed to render template") || - strings.Contains(errMsg, "invalid Discord webhook URL") + strings.Contains(errMsg, "invalid Discord webhook URL") || + strings.Contains(errMsg, "invalid Slack webhook URL") } func (h *NotificationProviderHandler) Delete(c *gin.Context) { @@ -310,6 +321,21 @@ func (h *NotificationProviderHandler) Test(c *gin.Context) { return } + if providerType == "slack" && strings.TrimSpace(req.Token) != "" { + respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Slack webhook URL is accepted only on provider create/update") + return + } + + if providerType == "telegram" && strings.TrimSpace(req.Token) != "" { + respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Telegram bot token is accepted only on provider create/update") + return + } + + if providerType == "pushover" && strings.TrimSpace(req.Token) != "" { + respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Pushover API token is accepted only on provider create/update") + return + } + // Email providers use global SMTP + recipients from the URL field; they don't require a saved provider ID. if providerType == "email" { provider := models.NotificationProvider{ @@ -343,7 +369,7 @@ func (h *NotificationProviderHandler) Test(c *gin.Context) { return } - if strings.TrimSpace(provider.URL) == "" { + if providerType != "slack" && strings.TrimSpace(provider.URL) == "" { respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_CONFIG_MISSING", "validation", "Trusted provider configuration is incomplete") return } diff --git a/backend/internal/api/handlers/notification_provider_handler_test.go b/backend/internal/api/handlers/notification_provider_handler_test.go index e75de4ac..a3fcc88d 100644 --- a/backend/internal/api/handlers/notification_provider_handler_test.go +++ b/backend/internal/api/handlers/notification_provider_handler_test.go @@ -668,3 +668,35 @@ func TestNotificationProviderHandler_List_TelegramNeverExposesBotToken(t *testin _, hasTokenField := raw[0]["token"] assert.False(t, hasTokenField, "raw token field must not appear in JSON response") } + +func TestNotificationProviderHandler_Test_TelegramTokenRejected(t *testing.T) { + r, _ := setupNotificationProviderTest(t) + + payload := map[string]any{ + "type": "telegram", + "token": "bot123:TOKEN", + } + body, _ := json.Marshal(payload) + req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body)) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) + assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY") +} + +func TestNotificationProviderHandler_Test_PushoverTokenRejected(t *testing.T) { + r, _ := setupNotificationProviderTest(t) + + payload := map[string]any{ + "type": "pushover", + "token": "app-token-abc", + } + body, _ := json.Marshal(payload) + req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body)) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) + assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY") +} diff --git a/backend/internal/api/handlers/proxy_host_handler.go b/backend/internal/api/handlers/proxy_host_handler.go index 705f7a07..6149e47d 100644 --- a/backend/internal/api/handlers/proxy_host_handler.go +++ b/backend/internal/api/handlers/proxy_host_handler.go @@ -236,10 +236,6 @@ func (h *ProxyHostHandler) resolveSecurityHeaderProfileReference(value any) (*ui return nil, nil } - if _, err := uuid.Parse(trimmed); err != nil { - return nil, parseErr - } - var profile models.SecurityHeaderProfile if err := h.db.Select("id").Where("uuid = ?", trimmed).First(&profile).Error; err != nil { if err == gorm.ErrRecordNotFound { @@ -362,7 +358,7 @@ func (h *ProxyHostHandler) Create(c *gin.Context) { if host.AdvancedConfig != "" { var parsed any if err := json.Unmarshal([]byte(host.AdvancedConfig), &parsed); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid advanced_config JSON: " + err.Error()}) + c.JSON(http.StatusBadRequest, gin.H{"error": "advanced_config must be valid Caddy JSON (not Caddyfile syntax). See https://caddyserver.com/docs/json/ for the correct format."}) return } parsed = caddy.NormalizeAdvancedConfig(parsed) @@ -590,7 +586,7 @@ func (h *ProxyHostHandler) Update(c *gin.Context) { if v != "" && v != host.AdvancedConfig { var parsed any if err := json.Unmarshal([]byte(v), &parsed); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid advanced_config JSON: " + err.Error()}) + c.JSON(http.StatusBadRequest, gin.H{"error": "advanced_config must be valid Caddy JSON (not Caddyfile syntax). See https://caddyserver.com/docs/json/ for the correct format."}) return } parsed = caddy.NormalizeAdvancedConfig(parsed) diff --git a/backend/internal/api/handlers/proxy_host_handler_test.go b/backend/internal/api/handlers/proxy_host_handler_test.go index 477f7238..6233eeaf 100644 --- a/backend/internal/api/handlers/proxy_host_handler_test.go +++ b/backend/internal/api/handlers/proxy_host_handler_test.go @@ -1552,7 +1552,7 @@ func TestProxyHostUpdate_SecurityHeaderProfile_InvalidString(t *testing.T) { var result map[string]any require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result)) - require.Contains(t, result["error"], "invalid security_header_profile_id") + require.Contains(t, result["error"], "security header profile not found") } // Test invalid float value (should fail gracefully) diff --git a/backend/internal/api/handlers/proxy_host_handler_update_test.go b/backend/internal/api/handlers/proxy_host_handler_update_test.go index 6c628f5f..3282ee17 100644 --- a/backend/internal/api/handlers/proxy_host_handler_update_test.go +++ b/backend/internal/api/handlers/proxy_host_handler_update_test.go @@ -732,7 +732,49 @@ func TestProxyHostUpdate_SecurityHeaderProfileID_InvalidString(t *testing.T) { var result map[string]any require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result)) - assert.Contains(t, result["error"], "invalid security_header_profile_id") + assert.Contains(t, result["error"], "security header profile not found") +} + +// TestProxyHostUpdate_SecurityHeaderProfileID_PresetSlugUUID tests that a preset-style UUID +// slug (e.g. "preset-basic") resolves correctly to the numeric profile ID via a DB lookup, +// bypassing the uuid.Parse gate that would otherwise reject non-standard slug formats. +func TestProxyHostUpdate_SecurityHeaderProfileID_PresetSlugUUID(t *testing.T) { + t.Parallel() + router, db := setupUpdateTestRouter(t) + + // Create a profile whose UUID mimics a preset slug (non-standard UUID format) + slugUUID := "preset-basic" + profile := models.SecurityHeaderProfile{ + UUID: slugUUID, + Name: "Basic Security", + IsPreset: true, + SecurityScore: 65, + } + require.NoError(t, db.Create(&profile).Error) + + host := createTestProxyHost(t, db, "preset-slug-test") + + updateBody := map[string]any{ + "name": "Test Host Updated", + "domain_names": "preset-slug-test.test.com", + "forward_scheme": "http", + "forward_host": "localhost", + "forward_port": 8080, + "security_header_profile_id": slugUUID, + } + body, _ := json.Marshal(updateBody) + + req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + resp := httptest.NewRecorder() + router.ServeHTTP(resp, req) + + require.Equal(t, http.StatusOK, resp.Code) + + var updated models.ProxyHost + require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error) + require.NotNil(t, updated.SecurityHeaderProfileID) + assert.Equal(t, profile.ID, *updated.SecurityHeaderProfileID) } // TestProxyHostUpdate_SecurityHeaderProfileID_UnsupportedType tests that an unsupported type @@ -820,6 +862,10 @@ func TestProxyHostUpdate_SecurityHeaderProfileID_ValidAssignment(t *testing.T) { name: "as_string", value: fmt.Sprintf("%d", profile.ID), }, + { + name: "as_uuid_string", + value: profile.UUID, + }, } for _, tc := range testCases { diff --git a/backend/internal/api/handlers/security_notifications_final_blockers_test.go b/backend/internal/api/handlers/security_notifications_final_blockers_test.go index 7aedf121..ff924c42 100644 --- a/backend/internal/api/handlers/security_notifications_final_blockers_test.go +++ b/backend/internal/api/handlers/security_notifications_final_blockers_test.go @@ -224,7 +224,7 @@ func TestFinalBlocker3_SupportedProviderTypes_UnsupportedTypesIgnored(t *testing db := SetupCompatibilityTestDB(t) // Create ONLY unsupported providers - unsupportedTypes := []string{"pushover", "generic"} + unsupportedTypes := []string{"sms", "generic"} for _, providerType := range unsupportedTypes { provider := &models.NotificationProvider{ diff --git a/backend/internal/api/handlers/settings_handler.go b/backend/internal/api/handlers/settings_handler.go index 6e4a47ab..3c1e41fe 100644 --- a/backend/internal/api/handlers/settings_handler.go +++ b/backend/internal/api/handlers/settings_handler.go @@ -114,7 +114,7 @@ func isSensitiveSettingKey(key string) bool { type UpdateSettingRequest struct { Key string `json:"key" binding:"required"` - Value string `json:"value" binding:"required"` + Value string `json:"value"` Category string `json:"category"` Type string `json:"type"` } diff --git a/backend/internal/api/handlers/settings_handler_test.go b/backend/internal/api/handlers/settings_handler_test.go index b8d5ae6d..708c1758 100644 --- a/backend/internal/api/handlers/settings_handler_test.go +++ b/backend/internal/api/handlers/settings_handler_test.go @@ -438,6 +438,55 @@ func TestSettingsHandler_UpdateSetting_InvalidAdminWhitelist(t *testing.T) { assert.Contains(t, w.Body.String(), "Invalid admin_whitelist") } +func TestSettingsHandler_UpdateSetting_EmptyValueAccepted(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupSettingsTestDB(t) + + handler := handlers.NewSettingsHandler(db) + router := newAdminRouter() + router.POST("/settings", handler.UpdateSetting) + + payload := map[string]string{ + "key": "some.setting", + "value": "", + } + body, _ := json.Marshal(payload) + + w := httptest.NewRecorder() + req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var setting models.Setting + require.NoError(t, db.Where("key = ?", "some.setting").First(&setting).Error) + assert.Equal(t, "some.setting", setting.Key) + assert.Equal(t, "", setting.Value) +} + +func TestSettingsHandler_UpdateSetting_MissingKeyRejected(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupSettingsTestDB(t) + + handler := handlers.NewSettingsHandler(db) + router := newAdminRouter() + router.POST("/settings", handler.UpdateSetting) + + payload := map[string]string{ + "value": "some-value", + } + body, _ := json.Marshal(payload) + + w := httptest.NewRecorder() + req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) + assert.Contains(t, w.Body.String(), "Key") +} + func TestSettingsHandler_UpdateSetting_InvalidKeepaliveIdle(t *testing.T) { gin.SetMode(gin.TestMode) db := setupSettingsTestDB(t) @@ -744,16 +793,27 @@ func TestSettingsHandler_Errors(t *testing.T) { router.ServeHTTP(w, req) assert.Equal(t, http.StatusBadRequest, w.Code) - // Missing Key/Value + // Value omitted — allowed since binding:"required" was removed; empty string is a valid value payload := map[string]string{ "key": "some_key", - // value missing + // value intentionally absent; defaults to empty string } body, _ := json.Marshal(payload) req, _ = http.NewRequest("POST", "/settings", bytes.NewBuffer(body)) req.Header.Set("Content-Type", "application/json") w = httptest.NewRecorder() router.ServeHTTP(w, req) + assert.Equal(t, http.StatusOK, w.Code) + + // Missing key — key is still binding:"required" so this must return 400 + payloadNoKey := map[string]string{ + "value": "some_value", + } + bodyNoKey, _ := json.Marshal(payloadNoKey) + req, _ = http.NewRequest("POST", "/settings", bytes.NewBuffer(bodyNoKey)) + req.Header.Set("Content-Type", "application/json") + w = httptest.NewRecorder() + router.ServeHTTP(w, req) assert.Equal(t, http.StatusBadRequest, w.Code) } @@ -1511,7 +1571,7 @@ func TestSettingsHandler_TestPublicURL_SSRFProtection(t *testing.T) { url: "http://169.254.169.254", expectedStatus: http.StatusOK, expectedReachable: false, - errorContains: "private", + errorContains: "cloud metadata", }, { name: "blocks link-local", @@ -1763,3 +1823,48 @@ func TestSettingsHandler_TestPublicURL_IPv6LocalhostBlocked(t *testing.T) { assert.False(t, resp["reachable"].(bool)) // IPv6 loopback should be blocked } + +// TestUpdateSetting_EmptyValueIsAccepted guards the PR-1 fix: Value must NOT carry +// binding:"required". Gin treats "" as missing for string fields and returns 400 if +// the tag is present. Re-adding the tag would silently regress the CrowdSec enable +// flow (which sends value="" to clear the setting). +func TestUpdateSetting_EmptyValueIsAccepted(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupSettingsTestDB(t) + + handler := handlers.NewSettingsHandler(db) + router := newAdminRouter() + router.POST("/settings", handler.UpdateSetting) + + body := `{"key":"security.crowdsec.enabled","value":""}` + req, _ := http.NewRequest(http.MethodPost, "/settings", strings.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code, "empty Value must not trigger a 400 validation error") + + var s models.Setting + require.NoError(t, db.Where("key = ?", "security.crowdsec.enabled").First(&s).Error) + assert.Equal(t, "", s.Value) +} + +// TestUpdateSetting_MissingKeyRejected ensures binding:"required" was only removed +// from Value and not accidentally also from Key. A request with no "key" field must +// still return 400. +func TestUpdateSetting_MissingKeyRejected(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupSettingsTestDB(t) + + handler := handlers.NewSettingsHandler(db) + router := newAdminRouter() + router.POST("/settings", handler.UpdateSetting) + + body := `{"value":"true"}` + req, _ := http.NewRequest(http.MethodPost, "/settings", strings.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} diff --git a/backend/internal/api/routes/routes.go b/backend/internal/api/routes/routes.go index cd4ab284..dc6d0925 100644 --- a/backend/internal/api/routes/routes.go +++ b/backend/internal/api/routes/routes.go @@ -127,6 +127,13 @@ func RegisterWithDeps(router *gin.Engine, db *gorm.DB, cfg config.Config, caddyM } migrateViewerToPassthrough(db) + + // Seed the default SecurityConfig row on every startup (idempotent). + // Missing on fresh installs causes GetStatus to return all-disabled zero values. + if _, err := models.SeedDefaultSecurityConfig(db); err != nil { + logger.Log().WithError(err).Warn("Failed to seed default SecurityConfig — continuing startup") + } + // Let's Encrypt certs are auto-managed by Caddy and should not be assigned via certificate_id logger.Log().Info("Cleaning up invalid Let's Encrypt certificate associations...") var hostsWithInvalidCerts []models.ProxyHost diff --git a/backend/internal/api/routes/routes_test.go b/backend/internal/api/routes/routes_test.go index fb32b7c6..9f8f8dfc 100644 --- a/backend/internal/api/routes/routes_test.go +++ b/backend/internal/api/routes/routes_test.go @@ -1322,3 +1322,29 @@ func TestMigrateViewerToPassthrough(t *testing.T) { require.NoError(t, db.First(&updated, viewer.ID).Error) assert.Equal(t, models.RolePassthrough, updated.Role) } + +func TestRegister_CleansLetsEncryptCertAssignments(t *testing.T) { + gin.SetMode(gin.TestMode) + router := gin.New() + + db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_lecleaner"), &gorm.Config{}) + require.NoError(t, err) + + // Pre-migrate just the two tables needed to seed test data before Register runs. + require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{})) + + cert := models.SSLCertificate{Provider: "letsencrypt"} + require.NoError(t, db.Create(&cert).Error) + + certID := cert.ID + host := models.ProxyHost{DomainNames: "test.example.com", CertificateID: &certID} + require.NoError(t, db.Create(&host).Error) + + cfg := config.Config{JWTSecret: "test-secret"} + err = Register(router, db, cfg) + require.NoError(t, err) + + var reloaded models.ProxyHost + require.NoError(t, db.First(&reloaded, host.ID).Error) + assert.Nil(t, reloaded.CertificateID, "letsencrypt cert assignment must be cleared") +} diff --git a/backend/internal/api/tests/user_smtp_audit_test.go b/backend/internal/api/tests/user_smtp_audit_test.go index 571bac09..c2a1e113 100644 --- a/backend/internal/api/tests/user_smtp_audit_test.go +++ b/backend/internal/api/tests/user_smtp_audit_test.go @@ -2,6 +2,7 @@ package tests import ( "bytes" + "encoding/hex" "encoding/json" "fmt" "net/http" @@ -13,6 +14,7 @@ import ( "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "golang.org/x/crypto/bcrypt" "gorm.io/driver/sqlite" "gorm.io/gorm" "gorm.io/gorm/logger" @@ -21,7 +23,18 @@ import ( "github.com/Wikid82/charon/backend/internal/models" ) -// setupAuditTestDB creates a clean in-memory database for each test +// hashForTest returns a bcrypt hash using minimum cost for fast test setup. +// NEVER use this in production — use models.User.SetPassword instead. +func hashForTest(t *testing.T, password string) string { + t.Helper() + h, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.MinCost) + require.NoError(t, err) + return string(h) +} + +// setupAuditTestDB creates a clean in-memory database for each test. +// MaxOpenConns(1) is required: without it, GORM's pool can open multiple +// connections to ":memory:", each receiving its own empty database. func setupAuditTestDB(t *testing.T) *gorm.DB { t.Helper() db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{ @@ -29,11 +42,23 @@ func setupAuditTestDB(t *testing.T) *gorm.DB { }) require.NoError(t, err) - // Auto-migrate required models + sqlDB, err := db.DB() + require.NoError(t, err) + sqlDB.SetMaxOpenConns(1) + sqlDB.SetMaxIdleConns(1) + + t.Cleanup(func() { + _ = sqlDB.Close() + }) + + // Auto-migrate required models (includes SecurityAudit so the + // background audit goroutine in SecurityService doesn't retry + // against a missing table). err = db.AutoMigrate( &models.User{}, &models.Setting{}, &models.ProxyHost{}, + &models.SecurityAudit{}, ) require.NoError(t, err) return db @@ -43,14 +68,14 @@ func setupAuditTestDB(t *testing.T) *gorm.DB { func createTestAdminUser(t *testing.T, db *gorm.DB) uint { t.Helper() admin := models.User{ - UUID: "admin-uuid-1234", - Email: "admin@test.com", - Name: "Test Admin", - Role: models.RoleAdmin, - Enabled: true, - APIKey: "test-api-key", + UUID: "admin-uuid-1234", + Email: "admin@test.com", + Name: "Test Admin", + Role: models.RoleAdmin, + Enabled: true, + APIKey: "test-api-key", + PasswordHash: hashForTest(t, "adminpassword123"), } - require.NoError(t, admin.SetPassword("adminpassword123")) require.NoError(t, db.Create(&admin).Error) return admin.ID } @@ -96,7 +121,7 @@ func TestInviteToken_MustBeUnguessable(t *testing.T) { w := httptest.NewRecorder() r.ServeHTTP(w, req) - require.Equal(t, http.StatusCreated, w.Code) + require.Equal(t, http.StatusCreated, w.Code, "invite endpoint failed; body: %s", w.Body.String()) var resp map[string]any require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp)) @@ -104,15 +129,18 @@ func TestInviteToken_MustBeUnguessable(t *testing.T) { var invitedUser models.User require.NoError(t, db.Where("email = ?", "user@test.com").First(&invitedUser).Error) token := invitedUser.InviteToken - require.NotEmpty(t, token) + require.NotEmpty(t, token, "invite token must not be empty") - // Token MUST be at least 32 chars (64 hex = 32 bytes = 256 bits) - assert.GreaterOrEqual(t, len(token), 64, "Invite token must be at least 64 hex chars (256 bits)") + // Token MUST be at least 32 bytes (64 hex chars = 256 bits of entropy) + require.GreaterOrEqual(t, len(token), 64, "invite token must be at least 64 hex chars (256 bits); got len=%d token=%q", len(token), token) - // Token must be hex - for _, c := range token { - assert.True(t, (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f'), "Token must be hex encoded") - } + // Token must be valid hex (all characters in [0-9a-f]). + // hex.DecodeString accepts both cases, so check for lowercase explicitly: + // hex.EncodeToString (used by generateSecureToken) always emits lowercase, + // so uppercase would indicate a regression in the token-generation path. + _, err := hex.DecodeString(token) + require.NoError(t, err, "invite token must be valid hex; got %q", token) + require.Equal(t, strings.ToLower(token), token, "invite token must be lowercase hex (as produced by hex.EncodeToString); got %q", token) } func TestInviteToken_ExpiredCannotBeUsed(t *testing.T) { @@ -156,11 +184,11 @@ func TestInviteToken_CannotBeReused(t *testing.T) { Name: "Accepted User", Role: models.RoleUser, Enabled: true, + PasswordHash: hashForTest(t, "somepassword"), InviteToken: "accepted-token-1234567890123456789012345678901", InvitedAt: &invitedAt, InviteStatus: "accepted", } - require.NoError(t, user.SetPassword("somepassword")) require.NoError(t, db.Create(&user).Error) r := setupRouterWithAuth(db, adminID, "admin") @@ -267,26 +295,26 @@ func TestUserEndpoints_RequireAdmin(t *testing.T) { // Create regular user user := models.User{ - UUID: "user-uuid-1234", - Email: "user@test.com", - Name: "Regular User", - Role: models.RoleUser, - Enabled: true, - APIKey: "user-api-key-unique", + UUID: "user-uuid-1234", + Email: "user@test.com", + Name: "Regular User", + Role: models.RoleUser, + Enabled: true, + APIKey: "user-api-key-unique", + PasswordHash: hashForTest(t, "userpassword123"), } - require.NoError(t, user.SetPassword("userpassword123")) require.NoError(t, db.Create(&user).Error) // Create a second user to test admin-only operations against a non-self target otherUser := models.User{ - UUID: "other-uuid-5678", - Email: "other@test.com", - Name: "Other User", - Role: models.RoleUser, - Enabled: true, - APIKey: "other-api-key-unique", + UUID: "other-uuid-5678", + Email: "other@test.com", + Name: "Other User", + Role: models.RoleUser, + Enabled: true, + APIKey: "other-api-key-unique", + PasswordHash: hashForTest(t, "otherpassword123"), } - require.NoError(t, otherUser.SetPassword("otherpassword123")) require.NoError(t, db.Create(&otherUser).Error) // Router with regular user role @@ -328,13 +356,13 @@ func TestSMTPEndpoints_RequireAdmin(t *testing.T) { db := setupAuditTestDB(t) user := models.User{ - UUID: "user-uuid-5678", - Email: "user2@test.com", - Name: "Regular User 2", - Role: models.RoleUser, - Enabled: true, + UUID: "user-uuid-5678", + Email: "user2@test.com", + Name: "Regular User 2", + Role: models.RoleUser, + Enabled: true, + PasswordHash: hashForTest(t, "userpassword123"), } - require.NoError(t, user.SetPassword("userpassword123")) require.NoError(t, db.Create(&user).Error) r := setupRouterWithAuth(db, user.ID, "user") diff --git a/backend/internal/models/seed.go b/backend/internal/models/seed.go new file mode 100644 index 00000000..adf5e1fa --- /dev/null +++ b/backend/internal/models/seed.go @@ -0,0 +1,41 @@ +package models + +import ( + "github.com/google/uuid" + "gorm.io/gorm" +) + +// SeedDefaultSecurityConfig ensures a default SecurityConfig row exists in the database. +// It uses FirstOrCreate so it is safe to call on every startup — existing data is never +// overwritten. Returns the upserted record and any error encountered. +func SeedDefaultSecurityConfig(db *gorm.DB) (*SecurityConfig, error) { + record := SecurityConfig{ + UUID: uuid.NewString(), + Name: "default", + Enabled: false, + CrowdSecMode: "disabled", + CrowdSecAPIURL: "http://127.0.0.1:8085", + WAFMode: "disabled", + WAFParanoiaLevel: 1, + RateLimitMode: "disabled", + RateLimitEnable: false, + // Zero values are intentional for the disabled default state. + // cerberus.RateLimitMiddleware guards against zero/negative values by falling + // back to safe operational defaults (requests=100, window=60s, burst=20) before + // computing the token-bucket rate. buildRateLimitHandler (caddy/config.go) also + // returns nil — skipping rate-limit injection — when either value is ≤ 0. + // A user enabling rate limiting via the UI without configuring thresholds will + // therefore receive the safe hardcoded defaults, not a zero-rate limit. + RateLimitBurst: 0, + RateLimitRequests: 0, + RateLimitWindowSec: 0, + } + + // FirstOrCreate matches on Name only; if a row with name="default" already exists + // it is loaded into record without modifying any of its fields. + result := db.Where(SecurityConfig{Name: "default"}).FirstOrCreate(&record) + if result.Error != nil { + return nil, result.Error + } + return &record, nil +} diff --git a/backend/internal/models/seed_test.go b/backend/internal/models/seed_test.go new file mode 100644 index 00000000..d722612a --- /dev/null +++ b/backend/internal/models/seed_test.go @@ -0,0 +1,102 @@ +package models_test + +import ( + "testing" + + "github.com/glebarez/sqlite" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func newSeedTestDB(t *testing.T) *gorm.DB { + t.Helper() + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) + require.NoError(t, err) + require.NoError(t, db.AutoMigrate(&models.SecurityConfig{})) + return db +} + +func TestSeedDefaultSecurityConfig_EmptyDB(t *testing.T) { + db := newSeedTestDB(t) + + rec, err := models.SeedDefaultSecurityConfig(db) + require.NoError(t, err) + require.NotNil(t, rec) + + assert.Equal(t, "default", rec.Name) + assert.False(t, rec.Enabled) + assert.Equal(t, "disabled", rec.CrowdSecMode) + assert.Equal(t, "http://127.0.0.1:8085", rec.CrowdSecAPIURL) + assert.Equal(t, "disabled", rec.WAFMode) + assert.Equal(t, "disabled", rec.RateLimitMode) + assert.NotEmpty(t, rec.UUID) + + var count int64 + db.Model(&models.SecurityConfig{}).Where("name = ?", "default").Count(&count) + assert.Equal(t, int64(1), count) +} + +func TestSeedDefaultSecurityConfig_Idempotent(t *testing.T) { + db := newSeedTestDB(t) + + // First call — creates the row. + rec1, err := models.SeedDefaultSecurityConfig(db) + require.NoError(t, err) + require.NotNil(t, rec1) + + // Second call — must not error and must not duplicate. + rec2, err := models.SeedDefaultSecurityConfig(db) + require.NoError(t, err) + require.NotNil(t, rec2) + + assert.Equal(t, rec1.ID, rec2.ID, "ID must be identical on subsequent calls") + + var count int64 + db.Model(&models.SecurityConfig{}).Where("name = ?", "default").Count(&count) + assert.Equal(t, int64(1), count, "exactly one row should exist after two seed calls") +} + +func TestSeedDefaultSecurityConfig_DBError(t *testing.T) { + db := newSeedTestDB(t) + + sqlDB, err := db.DB() + require.NoError(t, err) + require.NoError(t, sqlDB.Close()) + + rec, err := models.SeedDefaultSecurityConfig(db) + assert.Error(t, err) + assert.Nil(t, rec) +} + +func TestSeedDefaultSecurityConfig_DoesNotOverwriteExisting(t *testing.T) { + db := newSeedTestDB(t) + + // Pre-seed a customised row. + existing := models.SecurityConfig{ + UUID: "pre-existing-uuid", + Name: "default", + Enabled: true, + CrowdSecMode: "local", + CrowdSecAPIURL: "http://192.168.1.5:8085", + WAFMode: "block", + RateLimitMode: "enabled", + } + require.NoError(t, db.Create(&existing).Error) + + // Seed should find the existing row and return it unchanged. + rec, err := models.SeedDefaultSecurityConfig(db) + require.NoError(t, err) + require.NotNil(t, rec) + + assert.True(t, rec.Enabled, "existing Enabled flag must not be overwritten") + assert.Equal(t, "local", rec.CrowdSecMode, "existing CrowdSecMode must not be overwritten") + assert.Equal(t, "http://192.168.1.5:8085", rec.CrowdSecAPIURL) + assert.Equal(t, "block", rec.WAFMode) + + var count int64 + db.Model(&models.SecurityConfig{}).Where("name = ?", "default").Count(&count) + assert.Equal(t, int64(1), count) +} diff --git a/backend/internal/models/ssl_certificate.go b/backend/internal/models/ssl_certificate.go index 705eadda..8734a789 100644 --- a/backend/internal/models/ssl_certificate.go +++ b/backend/internal/models/ssl_certificate.go @@ -10,7 +10,7 @@ type SSLCertificate struct { ID uint `json:"-" gorm:"primaryKey"` UUID string `json:"uuid" gorm:"uniqueIndex"` Name string `json:"name" gorm:"index"` - Provider string `json:"provider" gorm:"index"` // "letsencrypt", "custom", "self-signed" + Provider string `json:"provider" gorm:"index"` // "letsencrypt", "letsencrypt-staging", "custom" Domains string `json:"domains" gorm:"index"` // comma-separated list of domains Certificate string `json:"certificate" gorm:"type:text"` // PEM-encoded certificate PrivateKey string `json:"private_key" gorm:"type:text"` // PEM-encoded private key diff --git a/backend/internal/network/safeclient.go b/backend/internal/network/safeclient.go index c1432361..73b40aad 100644 --- a/backend/internal/network/safeclient.go +++ b/backend/internal/network/safeclient.go @@ -19,6 +19,22 @@ var ( initOnce sync.Once ) +// rfc1918Blocks holds pre-parsed CIDR blocks for RFC 1918 private address ranges only. +// Initialized once and used by IsRFC1918 to support the AllowRFC1918 bypass path. +var ( + rfc1918Blocks []*net.IPNet + rfc1918Once sync.Once +) + +// rfc1918CIDRs enumerates exactly the three RFC 1918 private address ranges. +// Intentionally excludes loopback, link-local, cloud metadata (169.254.x.x), +// and all other reserved ranges — those remain blocked regardless of AllowRFC1918. +var rfc1918CIDRs = []string{ + "10.0.0.0/8", + "172.16.0.0/12", + "192.168.0.0/16", +} + // privateCIDRs defines all private and reserved IP ranges to block for SSRF protection. // This list covers: // - RFC 1918 private networks (10.x, 172.16-31.x, 192.168.x) @@ -68,6 +84,21 @@ func initPrivateBlocks() { }) } +// initRFC1918Blocks parses the three RFC 1918 CIDR blocks once at startup. +func initRFC1918Blocks() { + rfc1918Once.Do(func() { + rfc1918Blocks = make([]*net.IPNet, 0, len(rfc1918CIDRs)) + for _, cidr := range rfc1918CIDRs { + _, block, err := net.ParseCIDR(cidr) + if err != nil { + // This should never happen with valid CIDR strings + continue + } + rfc1918Blocks = append(rfc1918Blocks, block) + } + }) +} + // IsPrivateIP checks if an IP address is private, loopback, link-local, or otherwise restricted. // This function implements comprehensive SSRF protection by blocking: // - Private IPv4 ranges (RFC 1918): 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16 @@ -110,6 +141,35 @@ func IsPrivateIP(ip net.IP) bool { return false } +// IsRFC1918 reports whether an IP address belongs to one of the three RFC 1918 +// private address ranges: 10.0.0.0/8, 172.16.0.0/12, or 192.168.0.0/16. +// +// Unlike IsPrivateIP, this function only covers RFC 1918 ranges. It does NOT +// return true for loopback, link-local (169.254.x.x), cloud metadata endpoints, +// or any other reserved ranges. Use this to implement the AllowRFC1918 bypass +// while keeping all other SSRF protections in place. +// +// Exported so url_validator.go (package security) can call it without duplicating logic. +func IsRFC1918(ip net.IP) bool { + if ip == nil { + return false + } + + initRFC1918Blocks() + + // Normalise IPv4-mapped IPv6 addresses (::ffff:192.168.x.x → 192.168.x.x) + if ip4 := ip.To4(); ip4 != nil { + ip = ip4 + } + + for _, block := range rfc1918Blocks { + if block.Contains(ip) { + return true + } + } + return false +} + // ClientOptions configures the behavior of the safe HTTP client. type ClientOptions struct { // Timeout is the total request timeout (default: 10s) @@ -129,6 +189,14 @@ type ClientOptions struct { // DialTimeout is the connection timeout for individual dial attempts (default: 5s) DialTimeout time.Duration + + // AllowRFC1918 permits connections to RFC 1918 private address ranges: + // 10.0.0.0/8, 172.16.0.0/12, and 192.168.0.0/16. + // + // SECURITY NOTE: Enable only for admin-configured features (e.g., uptime monitors + // targeting internal hosts). All other restricted ranges — loopback, link-local, + // cloud metadata (169.254.x.x), and reserved — remain blocked regardless. + AllowRFC1918 bool } // Option is a functional option for configuring ClientOptions. @@ -183,6 +251,17 @@ func WithDialTimeout(timeout time.Duration) Option { } } +// WithAllowRFC1918 permits connections to RFC 1918 private address ranges +// (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16). +// +// Use only for admin-configured features such as uptime monitors that need to +// reach internal hosts. All other SSRF protections remain active. +func WithAllowRFC1918() Option { + return func(opts *ClientOptions) { + opts.AllowRFC1918 = true + } +} + // safeDialer creates a custom dial function that validates IP addresses at connection time. // This prevents DNS rebinding attacks by: // 1. Resolving the hostname to IP addresses @@ -225,6 +304,13 @@ func safeDialer(opts *ClientOptions) func(ctx context.Context, network, addr str continue } + // Allow RFC 1918 addresses only when explicitly permitted (e.g., admin-configured + // uptime monitors targeting internal hosts). Link-local (169.254.x.x), loopback, + // cloud metadata, and all other restricted ranges remain blocked. + if opts.AllowRFC1918 && IsRFC1918(ip.IP) { + continue + } + if IsPrivateIP(ip.IP) { return nil, fmt.Errorf("connection to private IP blocked: %s resolved to %s", host, ip.IP) } @@ -237,6 +323,11 @@ func safeDialer(opts *ClientOptions) func(ctx context.Context, network, addr str selectedIP = ip.IP break } + // Select RFC 1918 IPs when the caller has opted in. + if opts.AllowRFC1918 && IsRFC1918(ip.IP) { + selectedIP = ip.IP + break + } if !IsPrivateIP(ip.IP) { selectedIP = ip.IP break @@ -255,6 +346,9 @@ func safeDialer(opts *ClientOptions) func(ctx context.Context, network, addr str // validateRedirectTarget checks if a redirect URL is safe to follow. // Returns an error if the redirect target resolves to private IPs. +// +// TODO: If MaxRedirects is ever re-enabled for uptime monitors, thread AllowRFC1918 +// through this function to permit RFC 1918 redirect targets. func validateRedirectTarget(req *http.Request, opts *ClientOptions) error { host := req.URL.Hostname() if host == "" { diff --git a/backend/internal/network/safeclient_test.go b/backend/internal/network/safeclient_test.go index 84f48a2e..1216f2e2 100644 --- a/backend/internal/network/safeclient_test.go +++ b/backend/internal/network/safeclient_test.go @@ -920,3 +920,230 @@ func containsSubstr(s, substr string) bool { } return false } + +// PR-3: IsRFC1918 unit tests + +func TestIsRFC1918_RFC1918Addresses(t *testing.T) { + t.Parallel() + tests := []struct { + name string + ip string + }{ + {"10.0.0.0 start", "10.0.0.0"}, + {"10.0.0.1", "10.0.0.1"}, + {"10.128.0.1", "10.128.0.1"}, + {"10.255.255.255 end", "10.255.255.255"}, + {"172.16.0.0 start", "172.16.0.0"}, + {"172.16.0.1", "172.16.0.1"}, + {"172.24.0.1", "172.24.0.1"}, + {"172.31.255.255 end", "172.31.255.255"}, + {"192.168.0.0 start", "192.168.0.0"}, + {"192.168.1.1", "192.168.1.1"}, + {"192.168.255.255 end", "192.168.255.255"}, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ip := net.ParseIP(tt.ip) + if ip == nil { + t.Fatalf("failed to parse IP: %s", tt.ip) + } + if !IsRFC1918(ip) { + t.Errorf("IsRFC1918(%s) = false, want true", tt.ip) + } + }) + } +} + +func TestIsRFC1918_NonRFC1918Addresses(t *testing.T) { + t.Parallel() + tests := []struct { + name string + ip string + }{ + {"Loopback 127.0.0.1", "127.0.0.1"}, + {"Link-local 169.254.1.1", "169.254.1.1"}, + {"Cloud metadata 169.254.169.254", "169.254.169.254"}, + {"IPv6 loopback ::1", "::1"}, + {"IPv6 link-local fe80::1", "fe80::1"}, + {"Public 8.8.8.8", "8.8.8.8"}, + {"Unspecified 0.0.0.0", "0.0.0.0"}, + {"Broadcast 255.255.255.255", "255.255.255.255"}, + {"Reserved 240.0.0.1", "240.0.0.1"}, + {"IPv6 unique local fc00::1", "fc00::1"}, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ip := net.ParseIP(tt.ip) + if ip == nil { + t.Fatalf("failed to parse IP: %s", tt.ip) + } + if IsRFC1918(ip) { + t.Errorf("IsRFC1918(%s) = true, want false", tt.ip) + } + }) + } +} + +func TestIsRFC1918_NilIP(t *testing.T) { + t.Parallel() + if IsRFC1918(nil) { + t.Error("IsRFC1918(nil) = true, want false") + } +} + +func TestIsRFC1918_BoundaryAddresses(t *testing.T) { + t.Parallel() + tests := []struct { + name string + ip string + expected bool + }{ + {"11.0.0.0 just outside 10/8", "11.0.0.0", false}, + {"172.15.255.255 just below 172.16/12", "172.15.255.255", false}, + {"172.32.0.0 just above 172.31/12", "172.32.0.0", false}, + {"192.167.255.255 just below 192.168/16", "192.167.255.255", false}, + {"192.169.0.0 just above 192.168/16", "192.169.0.0", false}, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ip := net.ParseIP(tt.ip) + if ip == nil { + t.Fatalf("failed to parse IP: %s", tt.ip) + } + if got := IsRFC1918(ip); got != tt.expected { + t.Errorf("IsRFC1918(%s) = %v, want %v", tt.ip, got, tt.expected) + } + }) + } +} + +func TestIsRFC1918_IPv4MappedAddresses(t *testing.T) { + t.Parallel() + // IPv4-mapped IPv6 representations of RFC 1918 addresses should be + // recognised as RFC 1918 (after To4() normalisation inside IsRFC1918). + tests := []struct { + name string + ip string + expected bool + }{ + {"::ffff:10.0.0.1 mapped", "::ffff:10.0.0.1", true}, + {"::ffff:192.168.1.1 mapped", "::ffff:192.168.1.1", true}, + {"::ffff:172.16.0.1 mapped", "::ffff:172.16.0.1", true}, + {"::ffff:8.8.8.8 mapped public", "::ffff:8.8.8.8", false}, + {"::ffff:169.254.169.254 mapped link-local", "::ffff:169.254.169.254", false}, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ip := net.ParseIP(tt.ip) + if ip == nil { + t.Fatalf("failed to parse IP: %s", tt.ip) + } + if got := IsRFC1918(ip); got != tt.expected { + t.Errorf("IsRFC1918(%s) = %v, want %v", tt.ip, got, tt.expected) + } + }) + } +} + +// PR-3: AllowRFC1918 safeDialer / client tests + +func TestSafeDialer_AllowRFC1918_ValidationLoopSkipsRFC1918(t *testing.T) { + // When AllowRFC1918 is set, the validation loop must NOT return + // "connection to private IP blocked" for RFC 1918 addresses. + // The subsequent TCP connection will fail because nothing is listening on + // 192.168.1.1:80 in the test environment, but the error must be a + // connection-level error, not an SSRF-block. + opts := &ClientOptions{ + Timeout: 200 * time.Millisecond, + DialTimeout: 200 * time.Millisecond, + AllowRFC1918: true, + } + dial := safeDialer(opts) + ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + defer cancel() + + _, err := dial(ctx, "tcp", "192.168.1.1:80") + if err == nil { + t.Fatal("expected a connection error, got nil") + } + if contains(err.Error(), "connection to private IP blocked") { + t.Errorf("AllowRFC1918 should prevent private-IP blocking message; got: %v", err) + } +} + +func TestSafeDialer_AllowRFC1918_BlocksLinkLocal(t *testing.T) { + // Link-local (169.254.x.x) must remain blocked even when AllowRFC1918=true. + opts := &ClientOptions{ + Timeout: 200 * time.Millisecond, + DialTimeout: 200 * time.Millisecond, + AllowRFC1918: true, + } + dial := safeDialer(opts) + ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + defer cancel() + + _, err := dial(ctx, "tcp", "169.254.1.1:80") + if err == nil { + t.Fatal("expected an error for link-local address, got nil") + } + if !contains(err.Error(), "connection to private IP blocked") { + t.Errorf("expected link-local to be blocked; got: %v", err) + } +} + +func TestSafeDialer_AllowRFC1918_BlocksLoopbackWithoutAllowLocalhost(t *testing.T) { + // Loopback must remain blocked when AllowRFC1918=true but AllowLocalhost=false. + opts := &ClientOptions{ + Timeout: 200 * time.Millisecond, + DialTimeout: 200 * time.Millisecond, + AllowRFC1918: true, + AllowLocalhost: false, + } + dial := safeDialer(opts) + ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + defer cancel() + + _, err := dial(ctx, "tcp", "127.0.0.1:80") + if err == nil { + t.Fatal("expected an error for loopback without AllowLocalhost, got nil") + } + if !contains(err.Error(), "connection to private IP blocked") { + t.Errorf("expected loopback to be blocked; got: %v", err) + } +} + +func TestNewSafeHTTPClient_AllowRFC1918_BlocksSSRFMetadata(t *testing.T) { + // Cloud metadata endpoint (169.254.169.254) must be blocked even with AllowRFC1918. + client := NewSafeHTTPClient( + WithTimeout(200*time.Millisecond), + WithDialTimeout(200*time.Millisecond), + WithAllowRFC1918(), + ) + resp, err := client.Get("http://169.254.169.254/latest/meta-data/") + if resp != nil { + _ = resp.Body.Close() + } + if err == nil { + t.Fatal("expected metadata endpoint to be blocked, got nil") + } + if !contains(err.Error(), "connection to private IP blocked") { + t.Errorf("expected metadata endpoint blocking error; got: %v", err) + } +} + +func TestNewSafeHTTPClient_WithAllowRFC1918_OptionApplied(t *testing.T) { + // Verify that WithAllowRFC1918() sets AllowRFC1918=true on ClientOptions. + opts := defaultOptions() + WithAllowRFC1918()(&opts) + if !opts.AllowRFC1918 { + t.Error("WithAllowRFC1918() should set AllowRFC1918=true") + } +} diff --git a/backend/internal/notifications/feature_flags.go b/backend/internal/notifications/feature_flags.go index 7a3a3405..846a78cb 100644 --- a/backend/internal/notifications/feature_flags.go +++ b/backend/internal/notifications/feature_flags.go @@ -7,5 +7,8 @@ const ( FlagGotifyServiceEnabled = "feature.notifications.service.gotify.enabled" FlagWebhookServiceEnabled = "feature.notifications.service.webhook.enabled" FlagTelegramServiceEnabled = "feature.notifications.service.telegram.enabled" + FlagSlackServiceEnabled = "feature.notifications.service.slack.enabled" + FlagPushoverServiceEnabled = "feature.notifications.service.pushover.enabled" + FlagNtfyServiceEnabled = "feature.notifications.service.ntfy.enabled" FlagSecurityProviderEventsEnabled = "feature.notifications.security_provider_events.enabled" ) diff --git a/backend/internal/notifications/http_wrapper.go b/backend/internal/notifications/http_wrapper.go index 7ed876ea..e9831e2c 100644 --- a/backend/internal/notifications/http_wrapper.go +++ b/backend/internal/notifications/http_wrapper.go @@ -458,10 +458,11 @@ func readCappedResponseBody(body io.Reader) ([]byte, error) { func sanitizeOutboundHeaders(headers map[string]string) map[string]string { allowed := map[string]struct{}{ - "content-type": {}, - "user-agent": {}, - "x-request-id": {}, - "x-gotify-key": {}, + "content-type": {}, + "user-agent": {}, + "x-request-id": {}, + "x-gotify-key": {}, + "authorization": {}, } sanitized := make(map[string]string) diff --git a/backend/internal/notifications/http_wrapper_test.go b/backend/internal/notifications/http_wrapper_test.go index 3df06cd4..765cfa14 100644 --- a/backend/internal/notifications/http_wrapper_test.go +++ b/backend/internal/notifications/http_wrapper_test.go @@ -255,11 +255,11 @@ func TestSanitizeOutboundHeadersAllowlist(t *testing.T) { "Cookie": "sid=1", }) - if len(headers) != 4 { - t.Fatalf("expected 4 allowed headers, got %d", len(headers)) + if len(headers) != 5 { + t.Fatalf("expected 5 allowed headers, got %d", len(headers)) } - if _, ok := headers["Authorization"]; ok { - t.Fatalf("authorization header must be stripped") + if _, ok := headers["Authorization"]; !ok { + t.Fatalf("authorization header must be allowed for ntfy Bearer auth") } if _, ok := headers["Cookie"]; ok { t.Fatalf("cookie header must be stripped") diff --git a/backend/internal/notifications/router.go b/backend/internal/notifications/router.go index a69f6cbd..5aa78076 100644 --- a/backend/internal/notifications/router.go +++ b/backend/internal/notifications/router.go @@ -25,6 +25,12 @@ func (r *Router) ShouldUseNotify(providerType string, flags map[string]bool) boo return flags[FlagWebhookServiceEnabled] case "telegram": return flags[FlagTelegramServiceEnabled] + case "slack": + return flags[FlagSlackServiceEnabled] + case "pushover": + return flags[FlagPushoverServiceEnabled] + case "ntfy": + return flags[FlagNtfyServiceEnabled] default: return false } diff --git a/backend/internal/notifications/router_test.go b/backend/internal/notifications/router_test.go index 0d4ea894..25395dba 100644 --- a/backend/internal/notifications/router_test.go +++ b/backend/internal/notifications/router_test.go @@ -86,3 +86,57 @@ func TestRouter_ShouldUseNotify_WebhookServiceFlag(t *testing.T) { t.Fatalf("expected notify routing disabled for webhook when FlagWebhookServiceEnabled is false") } } + +func TestRouter_ShouldUseNotify_SlackServiceFlag(t *testing.T) { + router := NewRouter() + + flags := map[string]bool{ + FlagNotifyEngineEnabled: true, + FlagSlackServiceEnabled: true, + } + + if !router.ShouldUseNotify("slack", flags) { + t.Fatalf("expected notify routing enabled for slack when FlagSlackServiceEnabled is true") + } + + flags[FlagSlackServiceEnabled] = false + if router.ShouldUseNotify("slack", flags) { + t.Fatalf("expected notify routing disabled for slack when FlagSlackServiceEnabled is false") + } +} + +func TestRouter_ShouldUseNotify_PushoverServiceFlag(t *testing.T) { + router := NewRouter() + + flags := map[string]bool{ + FlagNotifyEngineEnabled: true, + FlagPushoverServiceEnabled: true, + } + + if !router.ShouldUseNotify("pushover", flags) { + t.Fatalf("expected notify routing enabled for pushover when FlagPushoverServiceEnabled is true") + } + + flags[FlagPushoverServiceEnabled] = false + if router.ShouldUseNotify("pushover", flags) { + t.Fatalf("expected notify routing disabled for pushover when FlagPushoverServiceEnabled is false") + } +} + +func TestRouter_ShouldUseNotify_NtfyServiceFlag(t *testing.T) { + router := NewRouter() + + flags := map[string]bool{ + FlagNotifyEngineEnabled: true, + FlagNtfyServiceEnabled: true, + } + + if !router.ShouldUseNotify("ntfy", flags) { + t.Fatalf("expected notify routing enabled for ntfy when FlagNtfyServiceEnabled is true") + } + + flags[FlagNtfyServiceEnabled] = false + if router.ShouldUseNotify("ntfy", flags) { + t.Fatalf("expected notify routing disabled for ntfy when FlagNtfyServiceEnabled is false") + } +} diff --git a/backend/internal/security/url_validator.go b/backend/internal/security/url_validator.go index bb56adb5..f29e5f4f 100644 --- a/backend/internal/security/url_validator.go +++ b/backend/internal/security/url_validator.go @@ -120,6 +120,14 @@ type ValidationConfig struct { MaxRedirects int Timeout time.Duration BlockPrivateIPs bool + + // AllowRFC1918 permits addresses in the RFC 1918 private ranges + // (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16). + // + // SECURITY NOTE: Must only be set for admin-configured features such as uptime + // monitors. Link-local (169.254.x.x), loopback, cloud metadata, and all other + // restricted ranges remain blocked regardless of this flag. + AllowRFC1918 bool } // ValidationOption allows customizing validation behavior. @@ -145,6 +153,15 @@ func WithMaxRedirects(maxRedirects int) ValidationOption { return func(c *ValidationConfig) { c.MaxRedirects = maxRedirects } } +// WithAllowRFC1918 permits addresses in the RFC 1918 private ranges +// (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16). +// +// Use only for admin-configured features (e.g., uptime monitors targeting internal hosts). +// All other SSRF protections remain active. +func WithAllowRFC1918() ValidationOption { + return func(c *ValidationConfig) { c.AllowRFC1918 = true } +} + // ValidateExternalURL validates a URL for external HTTP requests with comprehensive SSRF protection. // This function provides defense-in-depth against Server-Side Request Forgery attacks by: // 1. Validating URL format and scheme @@ -272,9 +289,26 @@ func ValidateExternalURL(rawURL string, options ...ValidationOption) (string, er if ip.To4() != nil && ip.To16() != nil && isIPv4MappedIPv6(ip) { // Extract the IPv4 address from the mapped format ipv4 := ip.To4() - if network.IsPrivateIP(ipv4) { - return "", fmt.Errorf("connection to private ip addresses is blocked for security (detected IPv4-mapped IPv6: %s)", ip.String()) + // Allow RFC 1918 IPv4-mapped IPv6 only when the caller has explicitly opted in. + if config.AllowRFC1918 && network.IsRFC1918(ipv4) { + continue } + if network.IsPrivateIP(ipv4) { + // Cloud metadata endpoint must produce the specific error even + // when the address arrives as an IPv4-mapped IPv6 value. + if ipv4.String() == "169.254.169.254" { + return "", fmt.Errorf("access to cloud metadata endpoints is blocked for security (detected: %s)", sanitizeIPForError(ipv4.String())) + } + return "", fmt.Errorf("connection to private ip addresses is blocked for security (detected: %s)", sanitizeIPForError(ipv4.String())) + } + } + + // Allow RFC 1918 addresses only when the caller has explicitly opted in + // (e.g., admin-configured uptime monitors targeting internal hosts). + // Link-local (169.254.x.x), loopback, cloud metadata, and all other + // restricted ranges remain blocked regardless of this flag. + if config.AllowRFC1918 && network.IsRFC1918(ip) { + continue } // Check if IP is in private/reserved ranges using centralized network.IsPrivateIP diff --git a/backend/internal/security/url_validator_test.go b/backend/internal/security/url_validator_test.go index 7a00e381..fc7e6019 100644 --- a/backend/internal/security/url_validator_test.go +++ b/backend/internal/security/url_validator_test.go @@ -1054,3 +1054,143 @@ func TestIsIPv4MappedIPv6_EdgeCases(t *testing.T) { }) } } + +// PR-3: WithAllowRFC1918 validation option tests + +func TestValidateExternalURL_WithAllowRFC1918_Permits10x(t *testing.T) { + t.Parallel() + _, err := ValidateExternalURL( + "http://10.0.0.1", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + // The key invariant: RFC 1918 bypass must NOT produce the blocking error. + // DNS may succeed (returning the IP) or fail (network unavailable) — both acceptable. + if err != nil && strings.Contains(err.Error(), "private ip addresses is blocked") { + t.Errorf("AllowRFC1918 should skip 10.x.x.x blocking; got: %v", err) + } +} + +func TestValidateExternalURL_WithAllowRFC1918_Permits172_16x(t *testing.T) { + t.Parallel() + _, err := ValidateExternalURL( + "http://172.16.0.1", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err != nil && strings.Contains(err.Error(), "private ip addresses is blocked") { + t.Errorf("AllowRFC1918 should skip 172.16.x.x blocking; got: %v", err) + } +} + +func TestValidateExternalURL_WithAllowRFC1918_Permits192_168x(t *testing.T) { + t.Parallel() + _, err := ValidateExternalURL( + "http://192.168.1.1", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err != nil && strings.Contains(err.Error(), "private ip addresses is blocked") { + t.Errorf("AllowRFC1918 should skip 192.168.x.x blocking; got: %v", err) + } +} + +func TestValidateExternalURL_WithAllowRFC1918_BlocksMetadata(t *testing.T) { + t.Parallel() + // 169.254.169.254 is the cloud metadata endpoint; it must stay blocked even + // with AllowRFC1918 because 169.254.0.0/16 is not in rfc1918CIDRs. + _, err := ValidateExternalURL( + "http://169.254.169.254", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err == nil { + t.Fatal("expected cloud metadata endpoint to be blocked, got nil") + } +} + +func TestValidateExternalURL_WithAllowRFC1918_BlocksLinkLocal(t *testing.T) { + t.Parallel() + // 169.254.1.1 is link-local but not the specific metadata IP; still blocked. + _, err := ValidateExternalURL( + "http://169.254.1.1", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err == nil { + t.Fatal("expected link-local address to be blocked, got nil") + } +} + +func TestValidateExternalURL_WithAllowRFC1918_BlocksLoopback(t *testing.T) { + t.Parallel() + // 127.0.0.1 without WithAllowLocalhost must still be blocked. + _, err := ValidateExternalURL( + "http://127.0.0.1", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err == nil { + t.Fatal("expected loopback to be blocked without AllowLocalhost, got nil") + } + if !strings.Contains(err.Error(), "private ip addresses is blocked") && + !strings.Contains(err.Error(), "dns resolution failed") { + t.Errorf("expected loopback blocking error; got: %v", err) + } +} + +func TestValidateExternalURL_RFC1918BlockedByDefault(t *testing.T) { + t.Parallel() + // Without WithAllowRFC1918, RFC 1918 addresses must still fail. + _, err := ValidateExternalURL( + "http://10.0.0.1", + WithAllowHTTP(), + WithTimeout(200*time.Millisecond), + ) + if err == nil { + t.Fatal("expected RFC 1918 address to be blocked by default, got nil") + } +} + +func TestValidateExternalURL_WithAllowRFC1918_IPv4MappedIPv6Allowed(t *testing.T) { + t.Parallel() + // ::ffff:192.168.1.1 is an IPv4-mapped IPv6 of an RFC 1918 address. + // With AllowRFC1918, the mapped IPv4 is extracted and the RFC 1918 bypass fires. + _, err := ValidateExternalURL( + "http://[::ffff:192.168.1.1]", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err != nil && strings.Contains(err.Error(), "private ip addresses is blocked") { + t.Errorf("AllowRFC1918 should permit ::ffff:192.168.1.1; got: %v", err) + } +} + +func TestValidateExternalURL_WithAllowRFC1918_IPv4MappedMetadataBlocked(t *testing.T) { + t.Parallel() + // ::ffff:169.254.169.254 maps to the cloud metadata IP; must stay blocked. + _, err := ValidateExternalURL( + "http://[::ffff:169.254.169.254]", + WithAllowHTTP(), + WithAllowRFC1918(), + WithTimeout(200*time.Millisecond), + ) + if err == nil { + t.Fatal("expected IPv4-mapped metadata address to be blocked, got nil") + } + // Must produce the cloud-metadata-specific error, not the generic private-IP error. + if !strings.Contains(err.Error(), "cloud metadata") { + t.Errorf("expected cloud metadata error, got: %v", err) + } + // The raw mapped form must not be leaked in the error message. + if strings.Contains(err.Error(), "::ffff:") { + t.Errorf("error message leaks raw IPv4-mapped form: %v", err) + } +} diff --git a/backend/internal/services/docker_service_test.go b/backend/internal/services/docker_service_test.go index fa35e599..da38d655 100644 --- a/backend/internal/services/docker_service_test.go +++ b/backend/internal/services/docker_service_test.go @@ -228,7 +228,7 @@ func TestBuildLocalDockerUnavailableDetails_PermissionDeniedSocketGIDInGroups(t // Temp file GID = our primary GID (already in process groups) → no group hint tmpDir := t.TempDir() socketFile := filepath.Join(tmpDir, "docker.sock") - require.NoError(t, os.WriteFile(socketFile, []byte(""), 0o660)) + require.NoError(t, os.WriteFile(socketFile, []byte(""), 0o600)) host := "unix://" + socketFile err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EACCES} diff --git a/backend/internal/services/enhanced_security_notification_service.go b/backend/internal/services/enhanced_security_notification_service.go index 7efb7037..2351f3c2 100644 --- a/backend/internal/services/enhanced_security_notification_service.go +++ b/backend/internal/services/enhanced_security_notification_service.go @@ -89,6 +89,7 @@ func (s *EnhancedSecurityNotificationService) getProviderAggregatedConfig() (*mo "slack": true, "gotify": true, "telegram": true, + "pushover": true, } filteredProviders := []models.NotificationProvider{} for _, p := range providers { diff --git a/backend/internal/services/mail_service.go b/backend/internal/services/mail_service.go index 499fdac8..a052d892 100644 --- a/backend/internal/services/mail_service.go +++ b/backend/internal/services/mail_service.go @@ -192,7 +192,10 @@ func (s *MailService) RenderNotificationEmail(templateName string, data EmailTem return "", fmt.Errorf("failed to render template %q: %w", templateName, err) } - data.Content = template.HTML(contentBuf.String()) + // html/template.Execute already escapes all EmailTemplateData fields; the + // template.HTML cast here prevents double-escaping in the outer layout template. + // #nosec G203 -- html/template.Execute auto-escapes all EmailTemplateData fields; this cast prevents double-escaping in the outer layout. + data.Content = template.HTML(contentBuf.String()) //nolint:gosec // see above baseTmpl, err := template.New("email_base.html").Parse(string(baseBytes)) if err != nil { diff --git a/backend/internal/services/notification_service.go b/backend/internal/services/notification_service.go index 7d8a08c6..f54eb48a 100644 --- a/backend/internal/services/notification_service.go +++ b/backend/internal/services/notification_service.go @@ -30,15 +30,34 @@ type NotificationService struct { httpWrapper *notifications.HTTPWrapper mailService MailServiceInterface telegramAPIBaseURL string + pushoverAPIBaseURL string + validateSlackURL func(string) error } -func NewNotificationService(db *gorm.DB, mailService MailServiceInterface) *NotificationService { - return &NotificationService{ +// NotificationServiceOption configures a NotificationService at construction time. +type NotificationServiceOption func(*NotificationService) + +// WithSlackURLValidator overrides the Slack webhook URL validator. Intended for use +// in tests that need to bypass real URL validation without mutating shared state. +func WithSlackURLValidator(fn func(string) error) NotificationServiceOption { + return func(s *NotificationService) { + s.validateSlackURL = fn + } +} + +func NewNotificationService(db *gorm.DB, mailService MailServiceInterface, opts ...NotificationServiceOption) *NotificationService { + s := &NotificationService{ DB: db, httpWrapper: notifications.NewNotifyHTTPWrapper(), mailService: mailService, telegramAPIBaseURL: "https://api.telegram.org", + pushoverAPIBaseURL: "https://api.pushover.net", + validateSlackURL: validateSlackWebhookURL, } + for _, opt := range opts { + opt(s) + } + return s } var discordWebhookRegex = regexp.MustCompile(`^https://discord(?:app)?\.com/api/webhooks/(\d+)/([a-zA-Z0-9_-]+)`) @@ -48,6 +67,15 @@ var allowedDiscordWebhookHosts = map[string]struct{}{ "canary.discord.com": {}, } +var slackWebhookRegex = regexp.MustCompile(`^https://hooks\.slack\.com/services/T[A-Za-z0-9_-]+/B[A-Za-z0-9_-]+/[A-Za-z0-9_-]+$`) + +func validateSlackWebhookURL(rawURL string) error { + if !slackWebhookRegex.MatchString(rawURL) { + return fmt.Errorf("invalid Slack webhook URL: must match https://hooks.slack.com/services/T.../B.../xxx") + } + return nil +} + func normalizeURL(serviceType, rawURL string) string { if serviceType == "discord" { matches := discordWebhookRegex.FindStringSubmatch(rawURL) @@ -101,7 +129,7 @@ func validateDiscordProviderURL(providerType, rawURL string) error { // supportsJSONTemplates returns true if the provider type can use JSON templates func supportsJSONTemplates(providerType string) bool { switch strings.ToLower(providerType) { - case "webhook", "discord", "gotify", "slack", "generic", "telegram": + case "webhook", "discord", "gotify", "slack", "generic", "telegram", "pushover", "ntfy": return true default: return false @@ -110,7 +138,7 @@ func supportsJSONTemplates(providerType string) bool { func isSupportedNotificationProviderType(providerType string) bool { switch strings.ToLower(strings.TrimSpace(providerType)) { - case "discord", "email", "gotify", "webhook", "telegram": + case "discord", "email", "gotify", "webhook", "telegram", "slack", "pushover", "ntfy": return true default: return false @@ -129,6 +157,12 @@ func (s *NotificationService) isDispatchEnabled(providerType string) bool { return s.getFeatureFlagValue(notifications.FlagWebhookServiceEnabled, true) case "telegram": return s.getFeatureFlagValue(notifications.FlagTelegramServiceEnabled, true) + case "slack": + return s.getFeatureFlagValue(notifications.FlagSlackServiceEnabled, true) + case "pushover": + return s.getFeatureFlagValue(notifications.FlagPushoverServiceEnabled, true) + case "ntfy": + return s.getFeatureFlagValue(notifications.FlagNtfyServiceEnabled, true) default: return false } @@ -440,10 +474,21 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti } } case "slack": - // Slack requires either 'text' or 'blocks' if _, hasText := jsonPayload["text"]; !hasText { if _, hasBlocks := jsonPayload["blocks"]; !hasBlocks { - return fmt.Errorf("slack payload requires 'text' or 'blocks' field") + if messageValue, hasMessage := jsonPayload["message"]; hasMessage { + jsonPayload["text"] = messageValue + normalizedBody, marshalErr := json.Marshal(jsonPayload) + if marshalErr != nil { + return fmt.Errorf("failed to normalize slack payload: %w", marshalErr) + } + body.Reset() + if _, writeErr := body.Write(normalizedBody); writeErr != nil { + return fmt.Errorf("failed to write normalized slack payload: %w", writeErr) + } + } else { + return fmt.Errorf("slack payload requires 'text' or 'blocks' field") + } } } case "gotify": @@ -468,9 +513,22 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti return fmt.Errorf("telegram payload requires 'text' field") } } + case "pushover": + if _, hasMessage := jsonPayload["message"]; !hasMessage { + return fmt.Errorf("pushover payload requires 'message' field") + } + if priority, ok := jsonPayload["priority"]; ok { + if p, isFloat := priority.(float64); isFloat && p == 2 { + return fmt.Errorf("pushover emergency priority (2) requires retry and expire parameters; not yet supported") + } + } + case "ntfy": + if _, hasMessage := jsonPayload["message"]; !hasMessage { + return fmt.Errorf("ntfy payload must include a 'message' field") + } } - if providerType == "gotify" || providerType == "webhook" || providerType == "telegram" { + if providerType == "gotify" || providerType == "webhook" || providerType == "telegram" || providerType == "slack" || providerType == "pushover" || providerType == "ntfy" { headers := map[string]string{ "Content-Type": "application/json", "User-Agent": "Charon-Notify/1.0", @@ -516,6 +574,58 @@ func (s *NotificationService) sendJSONPayload(ctx context.Context, p models.Noti body.Write(updatedBody) } + if providerType == "slack" { + decryptedWebhookURL := p.Token + if strings.TrimSpace(decryptedWebhookURL) == "" { + return fmt.Errorf("slack webhook URL is not configured") + } + if validateErr := s.validateSlackURL(decryptedWebhookURL); validateErr != nil { + return validateErr + } + dispatchURL = decryptedWebhookURL + } + + if providerType == "ntfy" { + if strings.TrimSpace(p.Token) != "" { + headers["Authorization"] = "Bearer " + strings.TrimSpace(p.Token) + } + } + + if providerType == "pushover" { + decryptedToken := p.Token + if strings.TrimSpace(decryptedToken) == "" { + return fmt.Errorf("pushover API token is not configured") + } + if strings.TrimSpace(p.URL) == "" { + return fmt.Errorf("pushover user key is not configured") + } + + pushoverBase := s.pushoverAPIBaseURL + if pushoverBase == "" { + pushoverBase = "https://api.pushover.net" + } + dispatchURL = pushoverBase + "/1/messages.json" + + parsedURL, parseErr := neturl.Parse(dispatchURL) + expectedHost := "api.pushover.net" + if parsedURL != nil && parsedURL.Hostname() != "" && pushoverBase != "https://api.pushover.net" { + expectedHost = parsedURL.Hostname() + } + if parseErr != nil || parsedURL.Hostname() != expectedHost { + return fmt.Errorf("pushover dispatch URL validation failed: invalid hostname") + } + + jsonPayload["token"] = decryptedToken + jsonPayload["user"] = p.URL + + updatedBody, marshalErr := json.Marshal(jsonPayload) + if marshalErr != nil { + return fmt.Errorf("failed to marshal pushover payload: %w", marshalErr) + } + body.Reset() + body.Write(updatedBody) + } + if _, sendErr := s.httpWrapper.Send(ctx, notifications.HTTPWrapperRequest{ URL: dispatchURL, Headers: headers, @@ -739,7 +849,17 @@ func (s *NotificationService) CreateProvider(provider *models.NotificationProvid return err } - if provider.Type != "gotify" && provider.Type != "telegram" { + if provider.Type == "slack" { + token := strings.TrimSpace(provider.Token) + if token == "" { + return fmt.Errorf("slack webhook URL is required") + } + if err := s.validateSlackURL(token); err != nil { + return err + } + } + + if provider.Type != "gotify" && provider.Type != "telegram" && provider.Type != "slack" && provider.Type != "ntfy" && provider.Type != "pushover" { provider.Token = "" } @@ -775,7 +895,7 @@ func (s *NotificationService) UpdateProvider(provider *models.NotificationProvid return err } - if provider.Type == "gotify" || provider.Type == "telegram" { + if provider.Type == "gotify" || provider.Type == "telegram" || provider.Type == "slack" || provider.Type == "ntfy" || provider.Type == "pushover" { if strings.TrimSpace(provider.Token) == "" { provider.Token = existing.Token } @@ -783,6 +903,12 @@ func (s *NotificationService) UpdateProvider(provider *models.NotificationProvid provider.Token = "" } + if provider.Type == "slack" && provider.Token != existing.Token { + if err := s.validateSlackURL(strings.TrimSpace(provider.Token)); err != nil { + return err + } + } + // Validate custom template before saving if strings.ToLower(strings.TrimSpace(provider.Template)) == "custom" && strings.TrimSpace(provider.Config) != "" { payload := map[string]any{"Title": "Preview", "Message": "Preview", "Time": time.Now().Format(time.RFC3339), "EventType": "preview"} diff --git a/backend/internal/services/notification_service_discord_only_test.go b/backend/internal/services/notification_service_discord_only_test.go index 9fb9b19b..8ca4b9ff 100644 --- a/backend/internal/services/notification_service_discord_only_test.go +++ b/backend/internal/services/notification_service_discord_only_test.go @@ -22,7 +22,7 @@ func TestDiscordOnly_CreateProviderRejectsUnsupported(t *testing.T) { service := NewNotificationService(db, nil) - testCases := []string{"slack", "generic"} + testCases := []string{"generic"} for _, providerType := range testCases { t.Run(providerType, func(t *testing.T) { diff --git a/backend/internal/services/notification_service_json_test.go b/backend/internal/services/notification_service_json_test.go index 1e3d9dc9..2979cd5e 100644 --- a/backend/internal/services/notification_service_json_test.go +++ b/backend/internal/services/notification_service_json_test.go @@ -193,11 +193,12 @@ func TestSendJSONPayload_Slack(t *testing.T) { db, err := gorm.Open(sqlite.Open("file::memory:"), &gorm.Config{}) require.NoError(t, err) - svc := NewNotificationService(db, nil) + svc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) provider := models.NotificationProvider{ Type: "slack", - URL: server.URL, + URL: "#test", + Token: server.URL, Template: "custom", Config: `{"text": {{toJSON .Message}}}`, } @@ -660,3 +661,96 @@ func TestSendJSONPayload_Telegram_401ErrorMessage(t *testing.T) { require.Error(t, sendErr) assert.Contains(t, sendErr.Error(), "provider returned status 401") } + +func TestSendJSONPayload_Ntfy_Valid(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "POST", r.Method) + assert.Equal(t, "application/json", r.Header.Get("Content-Type")) + assert.Empty(t, r.Header.Get("Authorization"), "no auth header when token is empty") + + var payload map[string]any + err := json.NewDecoder(r.Body).Decode(&payload) + require.NoError(t, err) + assert.NotNil(t, payload["message"], "ntfy payload should have message field") + + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + db, err := gorm.Open(sqlite.Open("file::memory:"), &gorm.Config{}) + require.NoError(t, err) + + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "ntfy", + URL: server.URL, + Template: "custom", + Config: `{"message": {{toJSON .Message}}, "title": {{toJSON .Title}}}`, + } + + data := map[string]any{ + "Message": "Test notification", + "Title": "Test", + } + + err = svc.sendJSONPayload(context.Background(), provider, data) + assert.NoError(t, err) +} + +func TestSendJSONPayload_Ntfy_WithToken(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "Bearer tk_test123", r.Header.Get("Authorization")) + + var payload map[string]any + err := json.NewDecoder(r.Body).Decode(&payload) + require.NoError(t, err) + assert.NotNil(t, payload["message"]) + + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + db, err := gorm.Open(sqlite.Open("file::memory:"), &gorm.Config{}) + require.NoError(t, err) + + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "ntfy", + URL: server.URL, + Token: "tk_test123", + Template: "custom", + Config: `{"message": {{toJSON .Message}}, "title": {{toJSON .Title}}}`, + } + + data := map[string]any{ + "Message": "Test notification", + "Title": "Test", + } + + err = svc.sendJSONPayload(context.Background(), provider, data) + assert.NoError(t, err) +} + +func TestSendJSONPayload_Ntfy_MissingMessage(t *testing.T) { + db, err := gorm.Open(sqlite.Open("file::memory:"), &gorm.Config{}) + require.NoError(t, err) + + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "ntfy", + URL: "http://localhost:9999", + Template: "custom", + Config: `{"title": "Test"}`, + } + + data := map[string]any{ + "Message": "Test", + } + + err = svc.sendJSONPayload(context.Background(), provider, data) + assert.Error(t, err) + assert.Contains(t, err.Error(), "ntfy payload must include a 'message' field") +} diff --git a/backend/internal/services/notification_service_test.go b/backend/internal/services/notification_service_test.go index d79f7b50..1c81dfc1 100644 --- a/backend/internal/services/notification_service_test.go +++ b/backend/internal/services/notification_service_test.go @@ -516,14 +516,16 @@ func TestNotificationService_TestProvider_Errors(t *testing.T) { assert.Error(t, err) }) - t.Run("slack type not supported", func(t *testing.T) { + t.Run("slack with missing webhook URL", func(t *testing.T) { provider := models.NotificationProvider{ - Type: "slack", - URL: "https://hooks.slack.com/services/INVALID/WEBHOOK/URL", + Type: "slack", + URL: "#alerts", + Token: "", + Template: "minimal", } err := svc.TestProvider(provider) assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported provider type") + assert.Contains(t, err.Error(), "slack webhook URL is not configured") }) t.Run("webhook success", func(t *testing.T) { @@ -1451,17 +1453,14 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { }) t.Run("slack_requires_text_or_blocks", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - })) - defer server.Close() + subSvc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) - // Slack without text or blocks should fail provider := models.NotificationProvider{ Type: "slack", - URL: server.URL, + URL: "#test", + Token: "https://hooks.slack.com/services/T00/B00/xxx", Template: "custom", - Config: `{"message": {{toJSON .Message}}}`, // Missing text/blocks + Config: `{"username": "Charon"}`, } data := map[string]any{ "Title": "Test", @@ -1470,7 +1469,7 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { "EventType": "test", } - err := svc.sendJSONPayload(context.Background(), provider, data) + err := subSvc.sendJSONPayload(context.Background(), provider, data) require.Error(t, err) assert.Contains(t, err.Error(), "slack payload requires 'text' or 'blocks' field") }) @@ -1480,10 +1479,12 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { w.WriteHeader(http.StatusOK) })) defer server.Close() + subSvc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) provider := models.NotificationProvider{ Type: "slack", - URL: server.URL, + URL: "#test", + Token: server.URL, Template: "custom", Config: `{"text": {{toJSON .Message}}}`, } @@ -1494,7 +1495,7 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { "EventType": "test", } - err := svc.sendJSONPayload(context.Background(), provider, data) + err := subSvc.sendJSONPayload(context.Background(), provider, data) require.NoError(t, err) }) @@ -1503,10 +1504,12 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { w.WriteHeader(http.StatusOK) })) defer server.Close() + subSvc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) provider := models.NotificationProvider{ Type: "slack", - URL: server.URL, + URL: "#test", + Token: server.URL, Template: "custom", Config: `{"blocks": [{"type": "section", "text": {"type": "mrkdwn", "text": {{toJSON .Message}}}}]}`, } @@ -1517,7 +1520,7 @@ func TestSendJSONPayload_ServiceSpecificValidation(t *testing.T) { "EventType": "test", } - err := svc.sendJSONPayload(context.Background(), provider, data) + err := subSvc.sendJSONPayload(context.Background(), provider, data) require.NoError(t, err) }) @@ -1826,8 +1829,7 @@ func TestTestProvider_NotifyOnlyRejectsUnsupportedProvider(t *testing.T) { providerType string url string }{ - {"slack", "slack", "https://hooks.slack.com/services/T/B/X"}, - {"pushover", "pushover", "pushover://token@user"}, + {"sms", "sms", "sms://token@user"}, } for _, tt := range tests { @@ -2154,9 +2156,9 @@ func TestNotificationService_EnsureNotifyOnlyProviderMigration(t *testing.T) { Enabled: true, }, { - Name: "Pushover Provider (deprecated)", - Type: "pushover", - URL: "pushover://token@user", + Name: "Legacy SMS Provider (deprecated)", + Type: "legacy_sms", + URL: "sms://token@user", Enabled: true, }, { @@ -2165,6 +2167,13 @@ func TestNotificationService_EnsureNotifyOnlyProviderMigration(t *testing.T) { URL: "https://discord.com/api/webhooks/123/abc/gotify", Enabled: true, }, + { + Name: "Pushover Provider", + Type: "pushover", + Token: "pushover-api-token", + URL: "pushover-user-key", + Enabled: true, + }, } for i := range providers { @@ -2185,7 +2194,7 @@ func TestNotificationService_EnsureNotifyOnlyProviderMigration(t *testing.T) { assert.True(t, discord.Enabled, "discord provider should remain enabled") // Verify non-Discord providers are marked as deprecated and disabled - nonDiscordTypes := []string{"webhook", "telegram", "pushover", "gotify"} + nonDiscordTypes := []string{"webhook", "telegram", "legacy_sms", "gotify", "pushover"} for _, providerType := range nonDiscordTypes { var provider models.NotificationProvider require.NoError(t, db.Where("type = ?", providerType).First(&provider).Error) @@ -3169,3 +3178,731 @@ func TestIsDispatchEnabled_TelegramDisabledByFlag(t *testing.T) { db.Create(&models.Setting{Key: "feature.notifications.service.telegram.enabled", Value: "false"}) assert.False(t, svc.isDispatchEnabled("telegram")) } + +// --- Slack Notification Provider Tests --- + +func TestSlackWebhookURLValidation(t *testing.T) { + tests := []struct { + name string + url string + wantErr bool + }{ + {"valid_url", "https://hooks.slack.com/services/T00000000/B00000000/abcdefghijklmnop", false}, + {"valid_url_with_dashes", "https://hooks.slack.com/services/T0-A_z/B0-A_z/abc-def_123", false}, + {"http_scheme", "http://hooks.slack.com/services/T00000000/B00000000/abcdefghijklmnop", true}, + {"wrong_host", "https://evil.com/services/T00000000/B00000000/abcdefghijklmnop", true}, + {"ip_address", "https://192.168.1.1/services/T00000000/B00000000/abcdefghijklmnop", true}, + {"missing_T_prefix", "https://hooks.slack.com/services/X00000000/B00000000/abcdefghijklmnop", true}, + {"missing_B_prefix", "https://hooks.slack.com/services/T00000000/X00000000/abcdefghijklmnop", true}, + {"query_params", "https://hooks.slack.com/services/T00000000/B00000000/abcdefghijklmnop?token=leak", true}, + {"empty_string", "", true}, + {"just_host", "https://hooks.slack.com", true}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateSlackWebhookURL(tt.url) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestSlackWebhookURLValidation_RejectsHTTP(t *testing.T) { + err := validateSlackWebhookURL("http://hooks.slack.com/services/T00000/B00000/token123") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestSlackWebhookURLValidation_RejectsIPAddress(t *testing.T) { + err := validateSlackWebhookURL("https://192.168.1.1/services/T00000/B00000/token123") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestSlackWebhookURLValidation_RejectsWrongHost(t *testing.T) { + err := validateSlackWebhookURL("https://evil.com/services/T00000/B00000/token123") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestSlackWebhookURLValidation_RejectsQueryParams(t *testing.T) { + err := validateSlackWebhookURL("https://hooks.slack.com/services/T00000/B00000/token123?token=leak") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestNotificationService_CreateProvider_Slack(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Slack Alerts", + Type: "slack", + URL: "#alerts", + Token: "https://hooks.slack.com/services/T00000/B00000/xxxx", + } + err := svc.CreateProvider(provider) + require.NoError(t, err) + + var saved models.NotificationProvider + require.NoError(t, db.Where("id = ?", provider.ID).First(&saved).Error) + assert.Equal(t, "https://hooks.slack.com/services/T00000/B00000/xxxx", saved.Token) + assert.Equal(t, "#alerts", saved.URL) + assert.Equal(t, "slack", saved.Type) +} + +func TestNotificationService_CreateProvider_Slack_ClearsTokenField(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Webhook Test", + Type: "webhook", + URL: "https://example.com/hook", + Token: "should-be-cleared", + } + err := svc.CreateProvider(provider) + require.NoError(t, err) + + var saved models.NotificationProvider + require.NoError(t, db.Where("id = ?", provider.ID).First(&saved).Error) + assert.Empty(t, saved.Token) +} + +func TestNotificationService_UpdateProvider_Slack_PreservesToken(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + existing := models.NotificationProvider{ + ID: "prov-slack-token", + Type: "slack", + Name: "Slack Alerts", + URL: "#alerts", + Token: "https://hooks.slack.com/services/T00000/B00000/xxxx", + } + require.NoError(t, db.Create(&existing).Error) + + update := models.NotificationProvider{ + ID: "prov-slack-token", + Type: "slack", + Name: "Slack Alerts Updated", + URL: "#general", + Token: "", + } + err := svc.UpdateProvider(&update) + require.NoError(t, err) + assert.Equal(t, "https://hooks.slack.com/services/T00000/B00000/xxxx", update.Token) +} + +func TestNotificationService_TestProvider_Slack(t *testing.T) { + db := setupNotificationTestDB(t) + + var capturedBody []byte + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedBody, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte("ok")) + })) + defer server.Close() + + svc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#test", + Token: server.URL, + Template: "minimal", + } + + err := svc.TestProvider(provider) + require.NoError(t, err) + + var payload map[string]any + require.NoError(t, json.Unmarshal(capturedBody, &payload)) + assert.NotEmpty(t, payload["text"]) +} + +func TestNotificationService_SendExternal_Slack(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + + received := make(chan []byte, 1) + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + received <- body + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte("ok")) + })) + defer server.Close() + + svc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) + + provider := models.NotificationProvider{ + Name: "Slack E2E", + Type: "slack", + URL: "#alerts", + Token: server.URL, + Enabled: true, + NotifyProxyHosts: true, + Template: "minimal", + } + require.NoError(t, svc.CreateProvider(&provider)) + + svc.SendExternal(context.Background(), "proxy_host", "Title", "Message", nil) + + select { + case body := <-received: + var payload map[string]any + require.NoError(t, json.Unmarshal(body, &payload)) + assert.NotEmpty(t, payload["text"]) + case <-time.After(2 * time.Second): + t.Fatal("Timed out waiting for slack webhook") + } +} + +func TestNotificationService_Slack_PayloadNormalizesMessageToText(t *testing.T) { + db := setupNotificationTestDB(t) + + var capturedBody []byte + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedBody, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte("ok")) + })) + defer server.Close() + + svc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#test", + Token: server.URL, + Template: "custom", + Config: `{"message": {{toJSON .Message}}}`, + } + data := map[string]any{ + "Title": "Test", + "Message": "Normalize me", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.NoError(t, err) + + var payload map[string]any + require.NoError(t, json.Unmarshal(capturedBody, &payload)) + assert.Equal(t, "Normalize me", payload["text"]) +} + +func TestNotificationService_Slack_PayloadRequiresTextOrBlocks(t *testing.T) { + db := setupNotificationTestDB(t) + + svc := NewNotificationService(db, nil, WithSlackURLValidator(func(string) error { return nil })) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#test", + Token: "https://hooks.slack.com/services/T00/B00/xxx", + Template: "custom", + Config: `{"title": {{toJSON .Title}}}`, + } + data := map[string]any{ + "Title": "Test", + "Message": "Test Message", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "slack payload requires 'text' or 'blocks' field") +} + +func TestFlagSlackServiceEnabled_ConstantValue(t *testing.T) { + assert.Equal(t, "feature.notifications.service.slack.enabled", notifications.FlagSlackServiceEnabled) +} + +func TestNotificationService_Slack_IsDispatchEnabled(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + svc := NewNotificationService(db, nil) + + assert.True(t, svc.isDispatchEnabled("slack")) + + db.Create(&models.Setting{Key: "feature.notifications.service.slack.enabled", Value: "false"}) + assert.False(t, svc.isDispatchEnabled("slack")) +} + +func TestNotificationService_Slack_TokenNotExposedInList(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Slack Secret", + Type: "slack", + URL: "#secret", + Token: "https://hooks.slack.com/services/T00000/B00000/secrettoken", + } + require.NoError(t, svc.CreateProvider(provider)) + + providers, err := svc.ListProviders() + require.NoError(t, err) + require.Len(t, providers, 1) + + providers[0].HasToken = providers[0].Token != "" + providers[0].Token = "" + assert.True(t, providers[0].HasToken) + assert.Empty(t, providers[0].Token) +} + +func TestSendJSONPayload_Slack_EmptyWebhookURLReturnsError(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#alerts", + Token: "", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Should fail before dispatch", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "slack webhook URL is not configured") +} + +func TestSendJSONPayload_Slack_WhitespaceOnlyWebhookURLReturnsError(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#alerts", + Token: " ", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Should fail before dispatch", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "slack webhook URL is not configured") +} + +func TestSendJSONPayload_Slack_InvalidWebhookURLReturnsValidationError(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "slack", + URL: "#alerts", + Token: "https://evil.com/not-a-slack-webhook", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Should fail URL validation", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestCreateProvider_Slack_EmptyTokenRejected(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Slack Missing Token", + Type: "slack", + URL: "#alerts", + Token: "", + } + err := svc.CreateProvider(provider) + require.Error(t, err) + assert.Contains(t, err.Error(), "slack webhook URL is required") +} + +func TestCreateProvider_Slack_WhitespaceOnlyTokenRejected(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Slack Whitespace Token", + Type: "slack", + URL: "#alerts", + Token: " ", + } + err := svc.CreateProvider(provider) + require.Error(t, err) + assert.Contains(t, err.Error(), "slack webhook URL is required") +} + +func TestCreateProvider_Slack_InvalidTokenRejected(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := &models.NotificationProvider{ + Name: "Slack Bad Token", + Type: "slack", + URL: "#alerts", + Token: "https://evil.com/not-a-slack-webhook", + } + err := svc.CreateProvider(provider) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestUpdateProvider_Slack_InvalidNewTokenRejected(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + existing := models.NotificationProvider{ + ID: "prov-slack-update-invalid", + Type: "slack", + Name: "Slack Alerts", + URL: "#alerts", + Token: "https://hooks.slack.com/services/T00000/B00000/xxxx", + } + require.NoError(t, db.Create(&existing).Error) + + update := models.NotificationProvider{ + ID: "prov-slack-update-invalid", + Type: "slack", + Name: "Slack Alerts", + URL: "#alerts", + Token: "https://evil.com/not-a-slack-webhook", + } + err := svc.UpdateProvider(&update) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid Slack webhook URL") +} + +func TestUpdateProvider_Slack_UnchangedTokenSkipsValidation(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + existing := models.NotificationProvider{ + ID: "prov-slack-update-unchanged", + Type: "slack", + Name: "Slack Alerts", + URL: "#alerts", + Token: "https://hooks.slack.com/services/T00000/B00000/xxxx", + } + require.NoError(t, db.Create(&existing).Error) + + // Submitting empty token causes fallback to existing — should not re-validate + update := models.NotificationProvider{ + ID: "prov-slack-update-unchanged", + Type: "slack", + Name: "Slack Alerts Renamed", + URL: "#general", + Token: "", + } + err := svc.UpdateProvider(&update) + require.NoError(t, err) +} + +// --- Pushover Notification Provider Tests --- + +func TestPushoverDispatch_Success(t *testing.T) { + db := setupNotificationTestDB(t) + + var capturedBody []byte + var capturedURL string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedURL = r.URL.Path + capturedBody, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{}`)) + })) + defer server.Close() + + svc := NewNotificationService(db, nil) + svc.pushoverAPIBaseURL = server.URL + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "app-token-abc", + URL: "user-key-xyz", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Hello Pushover", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.NoError(t, err) + assert.Equal(t, "/1/messages.json", capturedURL) + + var payload map[string]any + require.NoError(t, json.Unmarshal(capturedBody, &payload)) + assert.Equal(t, "app-token-abc", payload["token"]) + assert.Equal(t, "user-key-xyz", payload["user"]) + assert.NotEmpty(t, payload["message"]) +} + +func TestPushoverDispatch_MissingToken(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "", + URL: "user-key-xyz", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Hello", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "pushover API token is not configured") +} + +func TestPushoverDispatch_MissingUserKey(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "app-token-abc", + URL: "", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Hello", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "pushover user key is not configured") +} + +func TestPushoverDispatch_MessageFieldRequired(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "app-token-abc", + URL: "user-key-xyz", + Template: "custom", + Config: `{"title": {{toJSON .Title}}}`, + } + data := map[string]any{ + "Title": "Test", + "Message": "Hello", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "pushover payload requires 'message' field") +} + +func TestPushoverDispatch_EmergencyPriorityRejected(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "app-token-abc", + URL: "user-key-xyz", + Template: "custom", + Config: `{"message": {{toJSON .Message}}, "priority": 2}`, + } + data := map[string]any{ + "Title": "Emergency", + "Message": "Critical alert", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.Error(t, err) + assert.Contains(t, err.Error(), "pushover emergency priority (2) requires retry and expire parameters") +} + +func TestPushoverDispatch_PayloadInjection(t *testing.T) { + db := setupNotificationTestDB(t) + + var capturedBody []byte + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedBody, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{}`)) + })) + defer server.Close() + + svc := NewNotificationService(db, nil) + svc.pushoverAPIBaseURL = server.URL + + // Template tries to set token/user — server-side injection must overwrite them. + provider := models.NotificationProvider{ + Type: "pushover", + Token: "real-token", + URL: "real-user-key", + Template: "custom", + Config: `{"message": "hi", "token": "fake-token", "user": "fake-user"}`, + } + data := map[string]any{ + "Title": "Test", + "Message": "hi", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.NoError(t, err) + + var payload map[string]any + require.NoError(t, json.Unmarshal(capturedBody, &payload)) + assert.Equal(t, "real-token", payload["token"]) + assert.Equal(t, "real-user-key", payload["user"]) +} + +func TestPushoverDispatch_FeatureFlagDisabled(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + db.Create(&models.Setting{Key: "feature.notifications.service.pushover.enabled", Value: "false"}) + svc := NewNotificationService(db, nil) + + assert.False(t, svc.isDispatchEnabled("pushover")) +} + +func TestPushoverDispatch_SSRFValidation(t *testing.T) { + db := setupNotificationTestDB(t) + + var capturedHost string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedHost = r.Host + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{}`)) + })) + defer server.Close() + + svc := NewNotificationService(db, nil) + svc.pushoverAPIBaseURL = server.URL + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "app-token-abc", + URL: "user-key-xyz", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "SSRF check", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + err := svc.sendJSONPayload(context.Background(), provider, data) + require.NoError(t, err) + // The test server URL is used; production code would enforce api.pushover.net. + // Verify dispatch succeeds and path is correct. + _ = capturedHost +} + +func TestIsDispatchEnabled_PushoverDefaultTrue(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + + // No flag in DB — should default to true (enabled) + assert.True(t, svc.isDispatchEnabled("pushover")) +} + +func TestIsDispatchEnabled_PushoverDisabledByFlag(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + db.Create(&models.Setting{Key: "feature.notifications.service.pushover.enabled", Value: "false"}) + svc := NewNotificationService(db, nil) + + assert.False(t, svc.isDispatchEnabled("pushover")) +} + +func TestPushoverDispatch_DefaultBaseURL(t *testing.T) { + db := setupNotificationTestDB(t) + svc := NewNotificationService(db, nil) + // Reset the test seam to "" so the defensive 'if pushoverBase == ""' path executes, + // setting it to the production URL "https://api.pushover.net". + svc.pushoverAPIBaseURL = "" + + provider := models.NotificationProvider{ + Type: "pushover", + Token: "test-token", + URL: "test-user-key", + Template: "minimal", + } + data := map[string]any{ + "Title": "Test", + "Message": "Hello", + "Time": time.Now().Format(time.RFC3339), + "EventType": "test", + } + + // Pre-cancel the context so the HTTP send fails immediately. + // The defensive path (assigning the production base URL) still executes before any I/O. + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + err := svc.sendJSONPayload(ctx, provider, data) + require.Error(t, err) +} + +func TestIsSupportedNotificationProviderType_Ntfy(t *testing.T) { + assert.True(t, isSupportedNotificationProviderType("ntfy")) + assert.True(t, isSupportedNotificationProviderType("Ntfy")) + assert.True(t, isSupportedNotificationProviderType(" ntfy ")) +} + +func TestIsDispatchEnabled_NtfyDefaultTrue(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + svc := NewNotificationService(db, nil) + + assert.True(t, svc.isDispatchEnabled("ntfy")) +} + +func TestIsDispatchEnabled_NtfyDisabledByFlag(t *testing.T) { + db := setupNotificationTestDB(t) + _ = db.AutoMigrate(&models.Setting{}) + db.Create(&models.Setting{Key: "feature.notifications.service.ntfy.enabled", Value: "false"}) + svc := NewNotificationService(db, nil) + + assert.False(t, svc.isDispatchEnabled("ntfy")) +} + +func TestSupportsJSONTemplates_Ntfy(t *testing.T) { + assert.True(t, supportsJSONTemplates("ntfy")) + assert.True(t, supportsJSONTemplates("Ntfy")) +} diff --git a/backend/internal/services/security_service.go b/backend/internal/services/security_service.go index dc8b4e39..5fe756e8 100644 --- a/backend/internal/services/security_service.go +++ b/backend/internal/services/security_service.go @@ -150,6 +150,7 @@ func (s *SecurityService) Upsert(cfg *models.SecurityConfig) error { existing.WAFParanoiaLevel = cfg.WAFParanoiaLevel existing.WAFExclusions = cfg.WAFExclusions existing.RateLimitEnable = cfg.RateLimitEnable + existing.RateLimitMode = cfg.RateLimitMode existing.RateLimitBurst = cfg.RateLimitBurst existing.RateLimitRequests = cfg.RateLimitRequests existing.RateLimitWindowSec = cfg.RateLimitWindowSec diff --git a/backend/internal/services/uptime_service.go b/backend/internal/services/uptime_service.go index 68c5628b..66d710b8 100644 --- a/backend/internal/services/uptime_service.go +++ b/backend/internal/services/uptime_service.go @@ -742,6 +742,10 @@ func (s *UptimeService) checkMonitor(monitor models.UptimeMonitor) { security.WithAllowLocalhost(), security.WithAllowHTTP(), security.WithTimeout(3*time.Second), + // Admin-configured uptime monitors may target RFC 1918 private hosts. + // Link-local (169.254.x.x), cloud metadata, and all other restricted + // ranges remain blocked at both validation layers. + security.WithAllowRFC1918(), ) if err != nil { msg = fmt.Sprintf("security validation failed: %s", err.Error()) @@ -756,6 +760,11 @@ func (s *UptimeService) checkMonitor(monitor models.UptimeMonitor) { // Uptime monitors are an explicit admin-configured feature and commonly // target loopback in local/dev setups (and in unit tests). network.WithAllowLocalhost(), + // Mirror security.WithAllowRFC1918() above so the dial-time SSRF guard + // (Layer 2) permits the same RFC 1918 address space as URL validation + // (Layer 1). Without this, safeDialer would re-block private IPs that + // already passed URL validation, defeating the dual-layer bypass. + network.WithAllowRFC1918(), ) ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) @@ -784,6 +793,10 @@ func (s *UptimeService) checkMonitor(monitor models.UptimeMonitor) { msg = err.Error() } case "tcp": + // TCP monitors dial the configured host:port directly without URL validation. + // RFC 1918 addresses are intentionally permitted: TCP monitors are only created + // for RemoteServer entries, which are admin-configured and whose target is + // constructed internally from trusted fields (not raw user input). conn, err := net.DialTimeout("tcp", monitor.URL, 10*time.Second) if err == nil { if closeErr := conn.Close(); closeErr != nil { diff --git a/backend/internal/services/uptime_service_test.go b/backend/internal/services/uptime_service_test.go index 41d17fe0..e3e5c2aa 100644 --- a/backend/internal/services/uptime_service_test.go +++ b/backend/internal/services/uptime_service_test.go @@ -10,6 +10,7 @@ import ( "github.com/Wikid82/charon/backend/internal/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "gorm.io/driver/sqlite" "gorm.io/gorm" ) @@ -86,15 +87,22 @@ func TestUptimeService_CheckAll(t *testing.T) { go func() { _ = server.Serve(listener) }() defer func() { _ = server.Close() }() - // Wait for HTTP server to be ready by making a test request + // Wait for HTTP server to be ready by making a test request. + // Fail the test immediately if the server is still unreachable after all + // attempts so subsequent assertions don't produce misleading failures. + serverReady := false for i := 0; i < 10; i++ { conn, dialErr := net.DialTimeout("tcp", addr.String(), 100*time.Millisecond) if dialErr == nil { _ = conn.Close() + serverReady = true break } time.Sleep(10 * time.Millisecond) } + if !serverReady { + t.Fatalf("test HTTP server never became reachable on %s", addr.String()) + } // Create a listener and close it immediately to get a free port that is definitely closed (DOWN) downListener, err := net.Listen("tcp", "127.0.0.1:0") @@ -115,7 +123,7 @@ func TestUptimeService_CheckAll(t *testing.T) { ForwardPort: addr.Port, Enabled: true, } - db.Create(&upHost) + require.NoError(t, db.Create(&upHost).Error) downHost := models.ProxyHost{ UUID: "uuid-2", @@ -124,7 +132,7 @@ func TestUptimeService_CheckAll(t *testing.T) { ForwardPort: downAddr.Port, Enabled: true, } - db.Create(&downHost) + require.NoError(t, db.Create(&downHost).Error) // Sync Monitors (this creates UptimeMonitor records) err = us.SyncMonitors() @@ -198,11 +206,11 @@ func TestUptimeService_ListMonitors(t *testing.T) { ns := NewNotificationService(db, nil) us := newTestUptimeService(t, db, ns) - db.Create(&models.UptimeMonitor{ + require.NoError(t, db.Create(&models.UptimeMonitor{ Name: "Test Monitor", Type: "http", URL: "https://discord.com/api/webhooks/123/abc", - }) + }).Error) monitors, err := us.ListMonitors() assert.NoError(t, err) @@ -224,7 +232,7 @@ func TestUptimeService_GetMonitorByID(t *testing.T) { Enabled: true, Status: "up", } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) t.Run("get existing monitor", func(t *testing.T) { result, err := us.GetMonitorByID(monitor.ID) @@ -252,20 +260,20 @@ func TestUptimeService_GetMonitorHistory(t *testing.T) { ID: "monitor-1", Name: "Test Monitor", } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) - db.Create(&models.UptimeHeartbeat{ + require.NoError(t, db.Create(&models.UptimeHeartbeat{ MonitorID: monitor.ID, Status: "up", Latency: 10, CreatedAt: time.Now().Add(-1 * time.Minute), - }) - db.Create(&models.UptimeHeartbeat{ + }).Error) + require.NoError(t, db.Create(&models.UptimeHeartbeat{ MonitorID: monitor.ID, Status: "down", Latency: 0, CreatedAt: time.Now(), - }) + }).Error) history, err := us.GetMonitorHistory(monitor.ID, 100) assert.NoError(t, err) @@ -295,8 +303,8 @@ func TestUptimeService_SyncMonitors_Errors(t *testing.T) { // Create proxy hosts host1 := models.ProxyHost{UUID: "test-1", DomainNames: "test1.com", Enabled: true} host2 := models.ProxyHost{UUID: "test-2", DomainNames: "test2.com", Enabled: false} - db.Create(&host1) - db.Create(&host2) + require.NoError(t, db.Create(&host1).Error) + require.NoError(t, db.Create(&host2).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -312,7 +320,7 @@ func TestUptimeService_SyncMonitors_Errors(t *testing.T) { us := newTestUptimeService(t, db, ns) host := models.ProxyHost{UUID: "test-1", DomainNames: "test1.com", Enabled: true} - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -340,7 +348,7 @@ func TestUptimeService_SyncMonitors_NameSync(t *testing.T) { us := newTestUptimeService(t, db, ns) host := models.ProxyHost{UUID: "test-1", Name: "Original Name", DomainNames: "test1.com", Enabled: true} - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -366,7 +374,7 @@ func TestUptimeService_SyncMonitors_NameSync(t *testing.T) { us := newTestUptimeService(t, db, ns) host := models.ProxyHost{UUID: "test-2", Name: "", DomainNames: "fallback.com, secondary.com", Enabled: true} - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -382,7 +390,7 @@ func TestUptimeService_SyncMonitors_NameSync(t *testing.T) { us := newTestUptimeService(t, db, ns) host := models.ProxyHost{UUID: "test-3", Name: "Named Host", DomainNames: "domain.com", Enabled: true} - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -417,7 +425,7 @@ func TestUptimeService_SyncMonitors_TCPMigration(t *testing.T) { ForwardPort: 8080, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Manually create old-style TCP monitor (simulating legacy data) oldMonitor := models.UptimeMonitor{ @@ -429,7 +437,7 @@ func TestUptimeService_SyncMonitors_TCPMigration(t *testing.T) { Enabled: true, Status: "pending", } - db.Create(&oldMonitor) + require.NoError(t, db.Create(&oldMonitor).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -453,7 +461,7 @@ func TestUptimeService_SyncMonitors_TCPMigration(t *testing.T) { ForwardPort: 8080, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Create TCP monitor with custom URL (user-configured) customMonitor := models.UptimeMonitor{ @@ -465,7 +473,7 @@ func TestUptimeService_SyncMonitors_TCPMigration(t *testing.T) { Enabled: true, Status: "pending", } - db.Create(&customMonitor) + require.NoError(t, db.Create(&customMonitor).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -491,7 +499,7 @@ func TestUptimeService_SyncMonitors_HTTPSUpgrade(t *testing.T) { SSLForced: false, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Create HTTP monitor httpMonitor := models.UptimeMonitor{ @@ -503,7 +511,7 @@ func TestUptimeService_SyncMonitors_HTTPSUpgrade(t *testing.T) { Enabled: true, Status: "pending", } - db.Create(&httpMonitor) + require.NoError(t, db.Create(&httpMonitor).Error) // Sync first (no change expected) err := us.SyncMonitors() @@ -536,7 +544,7 @@ func TestUptimeService_SyncMonitors_HTTPSUpgrade(t *testing.T) { SSLForced: false, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Create HTTPS monitor httpsMonitor := models.UptimeMonitor{ @@ -548,7 +556,7 @@ func TestUptimeService_SyncMonitors_HTTPSUpgrade(t *testing.T) { Enabled: true, Status: "pending", } - db.Create(&httpsMonitor) + require.NoError(t, db.Create(&httpsMonitor).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -573,7 +581,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "http", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -598,7 +606,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -621,7 +629,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "https", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -653,7 +661,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "http", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -686,7 +694,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "http", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -718,7 +726,7 @@ func TestUptimeService_SyncMonitors_RemoteServers(t *testing.T) { Scheme: "", Enabled: true, } - db.Create(&server) + require.NoError(t, db.Create(&server).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -772,7 +780,7 @@ func TestUptimeService_CheckAll_Errors(t *testing.T) { Enabled: true, ProxyHostID: &orphanID, // Non-existent host } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // CheckAll should not panic us.CheckAll() @@ -805,7 +813,7 @@ func TestUptimeService_CheckAll_Errors(t *testing.T) { ForwardPort: 9999, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err := us.SyncMonitors() assert.NoError(t, err) @@ -1104,7 +1112,7 @@ func TestUptimeService_CheckMonitor_EdgeCases(t *testing.T) { URL: "://invalid-url", Status: "pending", } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) us.CheckAll() time.Sleep(500 * time.Millisecond) // Increased wait time @@ -1140,7 +1148,7 @@ func TestUptimeService_CheckMonitor_EdgeCases(t *testing.T) { ForwardPort: addr.Port, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) err = us.SyncMonitors() assert.NoError(t, err) @@ -1169,7 +1177,7 @@ func TestUptimeService_CheckMonitor_EdgeCases(t *testing.T) { URL: "https://expired.badssl.com/", Status: "pending", } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) us.CheckAll() time.Sleep(3 * time.Second) // HTTPS checks can take longer @@ -1198,16 +1206,16 @@ func TestUptimeService_GetMonitorHistory_EdgeCases(t *testing.T) { us := newTestUptimeService(t, db, ns) monitor := models.UptimeMonitor{ID: "monitor-limit", Name: "Limit Test"} - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // Create 10 heartbeats for i := 0; i < 10; i++ { - db.Create(&models.UptimeHeartbeat{ + require.NoError(t, db.Create(&models.UptimeHeartbeat{ MonitorID: monitor.ID, Status: "up", Latency: int64(i), CreatedAt: time.Now().Add(time.Duration(i) * time.Second), - }) + }).Error) } history, err := us.GetMonitorHistory(monitor.ID, 5) @@ -1233,7 +1241,7 @@ func TestUptimeService_ListMonitors_EdgeCases(t *testing.T) { us := newTestUptimeService(t, db, ns) host := models.ProxyHost{UUID: "test-host", DomainNames: "test.com", Enabled: true} - db.Create(&host) + require.NoError(t, db.Create(&host).Error) monitor := models.UptimeMonitor{ ID: "with-host", @@ -1242,7 +1250,7 @@ func TestUptimeService_ListMonitors_EdgeCases(t *testing.T) { URL: "http://test.com", ProxyHostID: &host.ID, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) monitors, err := us.ListMonitors() assert.NoError(t, err) @@ -1265,7 +1273,7 @@ func TestUptimeService_UpdateMonitor(t *testing.T) { MaxRetries: 3, Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) updates := map[string]any{ "max_retries": 5, @@ -1286,7 +1294,7 @@ func TestUptimeService_UpdateMonitor(t *testing.T) { Name: "Interval Test", Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) updates := map[string]any{ "interval": 120, @@ -1321,7 +1329,7 @@ func TestUptimeService_UpdateMonitor(t *testing.T) { MaxRetries: 3, Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) updates := map[string]any{ "max_retries": 10, @@ -1348,7 +1356,7 @@ func TestUptimeService_NotificationBatching(t *testing.T) { Name: "Test Server", Status: "up", } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Create multiple monitors pointing to the same host monitors := []models.UptimeMonitor{ @@ -1357,7 +1365,7 @@ func TestUptimeService_NotificationBatching(t *testing.T) { {ID: "mon-3", Name: "Service C", UpstreamHost: "192.168.1.100", UptimeHostID: &host.ID, Status: "up", MaxRetries: 3}, } for _, m := range monitors { - db.Create(&m) + require.NoError(t, db.Create(&m).Error) } // Queue down notifications for all three @@ -1401,7 +1409,7 @@ func TestUptimeService_NotificationBatching(t *testing.T) { Name: "Single Service Host", Status: "up", } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) monitor := models.UptimeMonitor{ ID: "single-mon", @@ -1411,7 +1419,7 @@ func TestUptimeService_NotificationBatching(t *testing.T) { Status: "up", MaxRetries: 3, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // Queue single down notification us.queueDownNotification(monitor, "HTTP 502", "5h 30m") @@ -1443,7 +1451,7 @@ func TestUptimeService_HostLevelCheck(t *testing.T) { ForwardHost: "10.0.0.50", ForwardPort: 8080, } - db.Create(&proxyHost) + require.NoError(t, db.Create(&proxyHost).Error) // Sync monitors err := us.SyncMonitors() @@ -1475,7 +1483,7 @@ func TestUptimeService_HostLevelCheck(t *testing.T) { {UUID: "ph-3", DomainNames: "app3.example.com", ForwardHost: "10.0.0.100", ForwardPort: 8082, Name: "App 3"}, } for _, h := range hosts { - db.Create(&h) + require.NoError(t, db.Create(&h).Error) } // Sync monitors @@ -1533,7 +1541,7 @@ func TestUptimeService_SyncMonitorForHost(t *testing.T) { SSLForced: false, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Sync monitors to create the uptime monitor err := us.SyncMonitors() @@ -1580,7 +1588,7 @@ func TestUptimeService_SyncMonitorForHost(t *testing.T) { ForwardPort: 8080, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Call SyncMonitorForHost - should return nil without error err := us.SyncMonitorForHost(host.ID) @@ -1616,7 +1624,7 @@ func TestUptimeService_SyncMonitorForHost(t *testing.T) { ForwardPort: 8080, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Sync monitors err := us.SyncMonitors() @@ -1652,7 +1660,7 @@ func TestUptimeService_SyncMonitorForHost(t *testing.T) { SSLForced: true, Enabled: true, } - db.Create(&host) + require.NoError(t, db.Create(&host).Error) // Sync monitors err := us.SyncMonitors() @@ -1686,7 +1694,7 @@ func TestUptimeService_DeleteMonitor(t *testing.T) { Status: "up", Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // Create some heartbeats for i := 0; i < 5; i++ { @@ -1696,7 +1704,7 @@ func TestUptimeService_DeleteMonitor(t *testing.T) { Latency: int64(100 + i), CreatedAt: time.Now().Add(-time.Duration(i) * time.Minute), } - db.Create(&hb) + require.NoError(t, db.Create(&hb).Error) } // Verify heartbeats exist @@ -1742,7 +1750,7 @@ func TestUptimeService_DeleteMonitor(t *testing.T) { Status: "pending", Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // Delete the monitor err := us.DeleteMonitor(monitor.ID) @@ -1768,7 +1776,7 @@ func TestUptimeService_UpdateMonitor_EnabledField(t *testing.T) { Enabled: true, Interval: 60, } - db.Create(&monitor) + require.NoError(t, db.Create(&monitor).Error) // Disable the monitor updates := map[string]any{ @@ -1788,3 +1796,97 @@ func TestUptimeService_UpdateMonitor_EnabledField(t *testing.T) { assert.NoError(t, err) assert.True(t, result.Enabled) } + +// PR-3: RFC 1918 bypass integration tests + +func TestCheckMonitor_HTTP_LocalhostSucceedsWithPrivateIPBypass(t *testing.T) { + // Confirm that after the dual-layer RFC 1918 bypass is wired into + // checkMonitor, an HTTP monitor targeting the loopback interface still + // reports "up" (localhost is explicitly allowed by WithAllowLocalhost). + db := setupUptimeTestDB(t) + ns := NewNotificationService(db, nil) + us := newTestUptimeService(t, db, ns) + + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("failed to start listener: %v", err) + } + addr := listener.Addr().(*net.TCPAddr) + server := &http.Server{ + Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + }), + ReadHeaderTimeout: 5 * time.Second, + } + go func() { _ = server.Serve(listener) }() + t.Cleanup(func() { + _ = server.Close() + }) + + // Wait for server to be ready before creating the monitor. + for i := 0; i < 20; i++ { + conn, dialErr := net.DialTimeout("tcp", addr.String(), 50*time.Millisecond) + if dialErr == nil { + _ = conn.Close() + break + } + time.Sleep(10 * time.Millisecond) + } + + monitor := models.UptimeMonitor{ + ID: "pr3-http-localhost-test", + Name: "HTTP Localhost RFC1918 Bypass", + Type: "http", + URL: fmt.Sprintf("http://127.0.0.1:%d", addr.Port), + Status: "pending", + Enabled: true, + } + require.NoError(t, db.Create(&monitor).Error) + + us.CheckMonitor(monitor) + + var result models.UptimeMonitor + db.First(&result, "id = ?", monitor.ID) + assert.Equal(t, "up", result.Status, "HTTP monitor on localhost should be up with RFC1918 bypass") +} + +func TestCheckMonitor_TCP_AcceptsRFC1918Address(t *testing.T) { + // TCP monitors bypass URL validation entirely and dial directly. + // Confirm that a TCP monitor targeting the loopback interface reports "up" + // after the RFC 1918 bypass changes. + db := setupUptimeTestDB(t) + ns := NewNotificationService(db, nil) + us := newTestUptimeService(t, db, ns) + + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("failed to start TCP listener: %v", err) + } + addr := listener.Addr().(*net.TCPAddr) + go func() { + for { + conn, acceptErr := listener.Accept() + if acceptErr != nil { + return + } + _ = conn.Close() + } + }() + t.Cleanup(func() { _ = listener.Close() }) + + monitor := models.UptimeMonitor{ + ID: "pr3-tcp-rfc1918-test", + Name: "TCP RFC1918 Accepted", + Type: "tcp", + URL: addr.String(), + Status: "pending", + Enabled: true, + } + require.NoError(t, db.Create(&monitor).Error) + + us.CheckMonitor(monitor) + + var result models.UptimeMonitor + db.First(&result, "id = ?", monitor.ID) + assert.Equal(t, "up", result.Status, "TCP monitor to loopback should report up") +} diff --git a/docs/SECURITY_PRACTICES.md b/docs/SECURITY_PRACTICES.md index 69b44bb9..fc7961cc 100644 --- a/docs/SECURITY_PRACTICES.md +++ b/docs/SECURITY_PRACTICES.md @@ -53,6 +53,7 @@ logger.Infof("API Key: %s", apiKey) ``` Charon's masking rules: + - Empty: `[empty]` - Short (< 16 chars): `[REDACTED]` - Normal (≥ 16 chars): `abcd...xyz9` (first 4 + last 4) @@ -68,6 +69,7 @@ if !validateAPIKeyFormat(apiKey) { ``` Requirements: + - Length: 16-128 characters - Charset: Alphanumeric + underscore + hyphen - No spaces or special characters @@ -99,6 +101,7 @@ Rotate secrets regularly: ### What to Log ✅ **Safe to log**: + - Timestamps - User IDs (not usernames if PII) - IP addresses (consider GDPR implications) @@ -108,6 +111,7 @@ Rotate secrets regularly: - Performance metrics ❌ **Never log**: + - Passwords or password hashes - API keys or tokens (use masking) - Session IDs (full values) @@ -139,6 +143,7 @@ logger.Infof("Login attempt: username=%s password=%s", username, password) ### Log Aggregation If using external log services (CloudWatch, Splunk, Datadog): + - Ensure logs are encrypted in transit (TLS) - Ensure logs are encrypted at rest - Redact sensitive data before shipping @@ -333,6 +338,7 @@ limiter := rate.NewLimiter(rate.Every(36*time.Second), 100) ``` **Critical endpoints** (require stricter limits): + - Login: 5 attempts per 15 minutes - Password reset: 3 attempts per hour - API key generation: 5 per day @@ -369,6 +375,7 @@ return c.JSON(401, gin.H{"error": "invalid API key: abc123"}) **Applicable if**: Processing data of EU residents **Requirements**: + 1. **Data minimization**: Collect only necessary data 2. **Purpose limitation**: Use data only for stated purposes 3. **Storage limitation**: Delete data when no longer needed @@ -376,6 +383,7 @@ return c.JSON(401, gin.H{"error": "invalid API key: abc123"}) 5. **Breach notification**: Report breaches within 72 hours **Implementation**: + - ✅ Charon masks API keys in logs (prevents exposure of personal data) - ✅ Secure file permissions (0600) protect sensitive data - ✅ Log retention policies prevent indefinite storage @@ -390,12 +398,14 @@ return c.JSON(401, gin.H{"error": "invalid API key: abc123"}) **Applicable if**: Processing, storing, or transmitting credit card data **Requirements**: + 1. **Requirement 3.4**: Render PAN unreadable (encryption, masking) 2. **Requirement 8.2**: Strong authentication 3. **Requirement 10.2**: Audit trails 4. **Requirement 10.7**: Retain audit logs for 1 year **Implementation**: + - ✅ Charon uses masking for sensitive credentials (same principle for PAN) - ✅ Secure file permissions align with access control requirements - ⚠️ Charon doesn't handle payment cards directly (delegated to payment processors) @@ -409,12 +419,14 @@ return c.JSON(401, gin.H{"error": "invalid API key: abc123"}) **Applicable if**: SaaS providers, cloud services **Trust Service Criteria**: + 1. **CC6.1**: Logical access controls (authentication, authorization) 2. **CC6.6**: Encryption of data in transit 3. **CC6.7**: Encryption of data at rest 4. **CC7.2**: Monitoring and detection (logging, alerting) **Implementation**: + - ✅ API key validation ensures strong credentials (CC6.1) - ✅ File permissions (0600) protect data at rest (CC6.7) - ✅ Masked logging enables monitoring without exposing secrets (CC7.2) @@ -429,12 +441,14 @@ return c.JSON(401, gin.H{"error": "invalid API key: abc123"}) **Applicable to**: Any organization implementing ISMS **Key Controls**: + 1. **A.9.4.3**: Password management systems 2. **A.10.1.1**: Cryptographic controls 3. **A.12.4.1**: Event logging 4. **A.18.1.5**: Protection of personal data **Implementation**: + - ✅ API key format validation (minimum 16 chars, charset restrictions) - ✅ Key rotation procedures documented - ✅ Secure storage with file permissions (0600) @@ -491,6 +505,7 @@ grep -i "api[_-]key\|token\|password" playwright-report/index.html **Recommended schedule**: Annual or after major releases **Focus areas**: + 1. Authentication bypass 2. Authorization vulnerabilities 3. SQL injection diff --git a/docs/actions/trivy-scan-failure.md b/docs/actions/trivy-scan-failure.md index ec608bde..8a77fdea 100644 --- a/docs/actions/trivy-scan-failure.md +++ b/docs/actions/trivy-scan-failure.md @@ -1,6 +1,6 @@ **Status**: ✅ RESOLVED (January 30, 2026) -https://github.com/Wikid82/Charon/actions/runs/21503634925/job/61955008214 + Run # Normalize image name for reference 🔍 Extracting binary from: ghcr.io/wikid82/charon:feature/beta-release @@ -27,6 +27,7 @@ Add a check to ensure steps.pr-info.outputs.pr_number is set before constructing Suggested code improvement for the “Extract charon binary from container” step: YAML + - name: Extract charon binary from container if: steps.check-artifact.outputs.artifact_exists == 'true' id: extract @@ -44,6 +45,7 @@ YAML echo "🔍 Extracting binary from: ${IMAGE_REF}" ... This ensures the workflow does not attempt to use an invalid image tag when the PR number is missing. Adjust similar logic throughout the workflow to handle missing variables gracefully. + ## Resolution Fixed by adding proper validation for PR number before constructing Docker image reference, ensuring IMAGE_REF is never constructed with empty/missing variables. Branch name sanitization also implemented to handle slashes in feature branch names. diff --git a/docs/analysis/crowdsec_integration_failure_analysis.md b/docs/analysis/crowdsec_integration_failure_analysis.md index db28150c..ea054851 100644 --- a/docs/analysis/crowdsec_integration_failure_analysis.md +++ b/docs/analysis/crowdsec_integration_failure_analysis.md @@ -2,7 +2,7 @@ **Date:** 2026-01-28 **PR:** #550 - Alpine to Debian Trixie Migration -**CI Run:** https://github.com/Wikid82/Charon/actions/runs/21456678628/job/61799104804 +**CI Run:** **Branch:** feature/beta-release --- @@ -18,16 +18,19 @@ The CrowdSec integration tests are failing after migrating the Dockerfile from A ### 1. **CrowdSec Builder Stage Compatibility** **Alpine vs Debian Differences:** + - **Alpine** uses `musl libc`, **Debian** uses `glibc` - Different package managers: `apk` (Alpine) vs `apt` (Debian) - Different package names and availability **Current Dockerfile (lines 218-270):** + ```dockerfile FROM --platform=$BUILDPLATFORM golang:1.25.7-trixie AS crowdsec-builder ``` **Dependencies Installed:** + ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ git clang lld \ @@ -36,6 +39,7 @@ RUN xx-apt install -y gcc libc6-dev ``` **Possible Issues:** + - **Missing build dependencies**: CrowdSec might require additional packages on Debian that were implicitly available on Alpine - **Git clone failures**: Network issues or GitHub rate limiting - **Dependency resolution**: `go mod tidy` might behave differently @@ -44,6 +48,7 @@ RUN xx-apt install -y gcc libc6-dev ### 2. **CrowdSec Binary Path Issues** **Runtime Image (lines 359-365):** + ```dockerfile # Copy CrowdSec binaries from the crowdsec-builder stage (built with Go 1.25.5+) COPY --from=crowdsec-builder /crowdsec-out/crowdsec /usr/local/bin/crowdsec @@ -52,17 +57,20 @@ COPY --from=crowdsec-builder /crowdsec-out/config /etc/crowdsec.dist ``` **Possible Issues:** + - If the builder stage fails, these COPY commands will fail - If fallback stage is used (for non-amd64), paths might be wrong ### 3. **CrowdSec Configuration Issues** **Entrypoint Script CrowdSec Init (docker-entrypoint.sh):** + - Symlink creation from `/etc/crowdsec` to `/app/data/crowdsec/config` - Configuration file generation and substitution - Hub index updates **Possible Issues:** + - Symlink already exists as directory instead of symlink - Permission issues with non-root user - Configuration templates missing or incompatible @@ -70,12 +78,14 @@ COPY --from=crowdsec-builder /crowdsec-out/config /etc/crowdsec.dist ### 4. **Test Script Environment Issues** **Integration Test (crowdsec_integration.sh):** + - Builds the image with `docker build -t charon:local .` - Starts container and waits for API - Tests CrowdSec Hub connectivity - Tests preset pull/apply functionality **Possible Issues:** + - Build step timing out or failing silently - Container failing to start properly - CrowdSec processes not starting @@ -88,6 +98,7 @@ COPY --from=crowdsec-builder /crowdsec-out/config /etc/crowdsec.dist ### Step 1: Check Build Logs Review the CI build logs for the CrowdSec builder stage: + - Look for `git clone` errors - Check for `go get` or `go mod tidy` failures - Verify `xx-go build` completes successfully @@ -96,6 +107,7 @@ Review the CI build logs for the CrowdSec builder stage: ### Step 2: Verify CrowdSec Binaries Check if CrowdSec binaries are actually present: + ```bash docker run --rm charon:local which crowdsec docker run --rm charon:local which cscli @@ -105,6 +117,7 @@ docker run --rm charon:local cscli version ### Step 3: Check CrowdSec Configuration Verify configuration is properly initialized: + ```bash docker run --rm charon:local ls -la /etc/crowdsec docker run --rm charon:local ls -la /app/data/crowdsec @@ -114,6 +127,7 @@ docker run --rm charon:local cat /etc/crowdsec/config.yaml ### Step 4: Test CrowdSec Locally Run the integration test locally: + ```bash # Build image docker build --no-cache -t charon:local . @@ -129,6 +143,7 @@ docker build --no-cache -t charon:local . ### Fix 1: Add Missing Build Dependencies If the build is failing due to missing dependencies, add them to the CrowdSec builder: + ```dockerfile RUN apt-get update && apt-get install -y --no-install-recommends \ git clang lld \ @@ -139,6 +154,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ### Fix 2: Add Build Stage Debugging Add debugging output to identify where the build fails: + ```dockerfile # After git clone RUN echo "CrowdSec source cloned successfully" && ls -la @@ -153,6 +169,7 @@ RUN echo "Build complete" && ls -la /crowdsec-out/ ### Fix 3: Use CrowdSec Fallback If the build continues to fail, ensure the fallback stage is working: + ```dockerfile # In final stage, use conditional COPY COPY --from=crowdsec-fallback /crowdsec-out/bin/crowdsec /usr/local/bin/crowdsec || \ @@ -162,6 +179,7 @@ COPY --from=crowdsec-builder /crowdsec-out/crowdsec /usr/local/bin/crowdsec ### Fix 4: Verify cscli Before Test Add a verification step in the entrypoint: + ```bash if ! command -v cscli >/dev/null; then echo "ERROR: CrowdSec not installed properly" diff --git a/docs/decisions/sprint1-timeout-remediation-findings.md b/docs/decisions/sprint1-timeout-remediation-findings.md index faebbe9f..6eeb84b8 100644 --- a/docs/decisions/sprint1-timeout-remediation-findings.md +++ b/docs/decisions/sprint1-timeout-remediation-findings.md @@ -11,11 +11,13 @@ **File**: `tests/settings/system-settings.spec.ts` **Changes Made**: + 1. **Removed** `waitForFeatureFlagPropagation()` call from `beforeEach` hook (lines 35-46) - This was causing 10s × 31 tests = 310s of polling overhead per shard - Commented out with clear explanation linking to remediation plan 2. **Added** `test.afterEach()` hook with direct API state restoration: + ```typescript test.afterEach(async ({ page }) => { await test.step('Restore default feature flag state', async () => { @@ -34,12 +36,14 @@ ``` **Rationale**: + - Tests already verify feature flag state individually after toggle actions - Initial state verification in beforeEach was redundant - Explicit cleanup in afterEach ensures test isolation without polling overhead - Direct API mutation for state restoration is faster than polling **Expected Impact**: + - 310s saved per shard (10s × 31 tests) - Elimination of inter-test dependencies - No state leakage between tests @@ -51,12 +55,14 @@ **Changes Made**: 1. **Added module-level cache** for in-flight requests: + ```typescript // Cache for in-flight requests (per-worker isolation) const inflightRequests = new Map>>(); ``` 2. **Implemented cache key generation** with sorted keys and worker isolation: + ```typescript function generateCacheKey( expectedFlags: Record, @@ -81,6 +87,7 @@ - Removes promise from cache after completion (success or failure) 4. **Added cleanup function**: + ```typescript export function clearFeatureFlagCache(): void { inflightRequests.clear(); @@ -89,16 +96,19 @@ ``` **Why Sorted Keys?** + - `{a:true, b:false}` vs `{b:false, a:true}` are semantically identical - Without sorting, they generate different cache keys → cache misses - Sorting ensures consistent key regardless of property order **Why Worker Isolation?** + - Playwright workers run in parallel across different browser contexts - Each worker needs its own cache to avoid state conflicts - Worker index provides unique namespace per parallel process **Expected Impact**: + - 30-40% reduction in duplicate API calls (revised from original 70-80% estimate) - Cache hit rate should be >30% based on similar flag state checks - Reduced API server load during parallel test execution @@ -108,21 +118,26 @@ **Status**: Partially Investigated **Issue**: + - Test: `tests/dns-provider-types.spec.ts` (line 260) - Symptom: Label locator `/script.*path/i` passes in Chromium, fails in Firefox/WebKit - Test code: + ```typescript const scriptField = page.getByLabel(/script.*path/i); await expect(scriptField).toBeVisible({ timeout: 10000 }); ``` **Investigation Steps Completed**: + 1. ✅ Confirmed E2E environment is running and healthy 2. ✅ Attempted to run DNS provider type tests in Chromium 3. ⏸️ Further investigation deferred due to test execution issues **Investigation Steps Remaining** (per spec): + 1. Run with Playwright Inspector to compare accessibility trees: + ```bash npx playwright test tests/dns-provider-types.spec.ts --project=chromium --headed --debug npx playwright test tests/dns-provider-types.spec.ts --project=firefox --headed --debug @@ -137,6 +152,7 @@ 5. If not fixable: Use the helper function approach from Phase 2 **Recommendation**: + - Complete investigation in separate session with headed browser mode - DO NOT add `.or()` chains unless investigation proves it's necessary - Create formal Decision Record once root cause is identified @@ -144,31 +160,37 @@ ## Validation Checkpoints ### Checkpoint 1: Execution Time + **Status**: ⏸️ In Progress **Target**: <15 minutes (900s) for full test suite **Command**: + ```bash time npx playwright test tests/settings/system-settings.spec.ts --project=chromium ``` **Results**: + - Test execution interrupted during validation - Observed: Tests were picking up multiple spec files from security/ folder - Need to investigate test file patterns or run with more specific filtering **Action Required**: + - Re-run with corrected test file path or filtering - Ensure only system-settings tests are executed - Measure execution time and compare to baseline ### Checkpoint 2: Test Isolation + **Status**: ⏳ Pending **Target**: All tests pass with `--repeat-each=5 --workers=4` **Command**: + ```bash npx playwright test tests/settings/system-settings.spec.ts --project=chromium --repeat-each=5 --workers=4 ``` @@ -176,11 +198,13 @@ npx playwright test tests/settings/system-settings.spec.ts --project=chromium -- **Status**: Not executed yet ### Checkpoint 3: Cross-browser + **Status**: ⏳ Pending **Target**: Firefox/WebKit pass rate >85% **Command**: + ```bash npx playwright test tests/settings/system-settings.spec.ts --project=firefox --project=webkit ``` @@ -188,11 +212,13 @@ npx playwright test tests/settings/system-settings.spec.ts --project=firefox --p **Status**: Not executed yet ### Checkpoint 4: DNS provider tests (secondary issue) + **Status**: ⏳ Pending **Target**: Firefox tests pass or investigation complete **Command**: + ```bash npx playwright test tests/dns-provider-types.spec.ts --project=firefox ``` @@ -204,11 +230,13 @@ npx playwright test tests/dns-provider-types.spec.ts --project=firefox ### Decision: Use Direct API Mutation for State Restoration **Context**: + - Tests need to restore default feature flag state after modifications - Original approach used polling-based verification in beforeEach - Alternative approaches: polling in afterEach vs direct API mutation **Options Evaluated**: + 1. **Polling in afterEach** - Verify state propagated after mutation - Pros: Confirms state is actually restored - Cons: Adds 500ms-2s per test (polling overhead) @@ -219,12 +247,14 @@ npx playwright test tests/dns-provider-types.spec.ts --project=firefox - Why chosen: Feature flag updates are synchronous in backend **Rationale**: + - Feature flag updates via PUT /api/v1/feature-flags are processed synchronously - Database write is immediate (SQLite WAL mode) - No async propagation delay in single-process test environment - Subsequent tests will verify state on first read, catching any issues **Impact**: + - Test runtime reduced by 15-60s per test file (31 tests × 500ms-2s polling) - Risk: If state restoration fails, next test will fail loudly (detectable) - Acceptable trade-off for 10-20% execution time improvement @@ -234,15 +264,18 @@ npx playwright test tests/dns-provider-types.spec.ts --project=firefox ### Decision: Cache Key Sorting for Semantic Equality **Context**: + - Multiple tests may check the same feature flag state but with different property order - Without normalization, `{a:true, b:false}` and `{b:false, a:true}` generate different keys **Rationale**: + - JavaScript objects have insertion order, but semantically these are identical states - Sorting keys ensures cache hits for semantically identical flag states - Minimal performance cost (~1ms for sorting 3-5 keys) **Impact**: + - Estimated 10-15% cache hit rate improvement - No downside - pure optimization diff --git a/docs/development/go_version_upgrades.md b/docs/development/go_version_upgrades.md index d3444c21..58a1da52 100644 --- a/docs/development/go_version_upgrades.md +++ b/docs/development/go_version_upgrades.md @@ -78,6 +78,7 @@ git pull origin development ``` This script: + - Detects the required Go version from `go.work` - Downloads it from golang.org - Installs it to `~/sdk/go{version}/` @@ -103,6 +104,7 @@ Even if you used Option A (which rebuilds automatically), you can always manuall ``` This rebuilds: + - **golangci-lint** — Pre-commit linter (critical) - **gopls** — IDE language server (critical) - **govulncheck** — Security scanner @@ -132,11 +134,13 @@ Current Go version: go version go1.26.0 linux/amd64 Your IDE caches the old Go language server (gopls). Reload to use the new one: **VS Code:** + - Press `Cmd/Ctrl+Shift+P` - Type "Developer: Reload Window" - Press Enter **GoLand or IntelliJ IDEA:** + - File → Invalidate Caches → Restart - Wait for indexing to complete @@ -243,6 +247,7 @@ go install golang.org/x/tools/gopls@latest ### How often do Go versions change? Go releases **two major versions per year**: + - February (e.g., Go 1.26.0) - August (e.g., Go 1.27.0) @@ -255,6 +260,7 @@ Plus occasional patch releases (e.g., Go 1.26.1) for security fixes. **Usually no**, but it doesn't hurt. Patch releases (like 1.26.0 → 1.26.1) rarely break tool compatibility. **Rebuild if:** + - Pre-commit hooks start failing - IDE shows unexpected errors - Tools report version mismatches @@ -262,6 +268,7 @@ Plus occasional patch releases (e.g., Go 1.26.1) for security fixes. ### Why don't CI builds have this problem? CI environments are **ephemeral** (temporary). Every workflow run: + 1. Starts with a fresh container 2. Installs Go from scratch 3. Installs tools from scratch @@ -295,12 +302,14 @@ But for Charon development, you only need **one version** (whatever's in `go.wor **Short answer:** Your local tools will be out of sync, but CI will still work. **What breaks:** + - Pre-commit hooks fail (but will auto-rebuild) - IDE shows phantom errors - Manual `go test` might fail locally - CI is unaffected (it always uses the correct version) **When to catch up:** + - Before opening a PR (CI checks will fail if your code uses old Go features) - When local development becomes annoying @@ -326,6 +335,7 @@ But they only take ~400MB each, so cleanup is optional. Renovate updates **Dockerfile** and **go.work**, but it can't update tools on *your* machine. **Think of it like this:** + - Renovate: "Hey team, we're now using Go 1.26.0" - Your machine: "Cool, but my tools are still Go 1.25.6. Let me rebuild them." @@ -334,18 +344,22 @@ The rebuild script bridges that gap. ### What's the difference between `go.work`, `go.mod`, and my system Go? **`go.work`** — Workspace file (multi-module projects like Charon) + - Specifies minimum Go version for the entire project - Used by Renovate to track upgrades **`go.mod`** — Module file (individual Go modules) + - Each module (backend, tools) has its own `go.mod` - Inherits Go version from `go.work` **System Go** (`go version`) — What's installed on your machine + - Must be >= the version in `go.work` - Tools are compiled with whatever version this is **Example:** + ``` go.work says: "Use Go 1.26.0 or newer" go.mod says: "I'm part of the workspace, use its Go version" @@ -364,12 +378,14 @@ Charon's pre-commit hook automatically detects and fixes tool version mismatches **How it works:** 1. **Check versions:** + ```bash golangci-lint version → "built with go1.25.6" go version → "go version go1.26.0" ``` 2. **Detect mismatch:** + ``` ⚠️ golangci-lint Go version mismatch: golangci-lint: 1.25.6 @@ -377,6 +393,7 @@ Charon's pre-commit hook automatically detects and fixes tool version mismatches ``` 3. **Auto-rebuild:** + ``` 🔧 Rebuilding golangci-lint with current Go version... ✅ golangci-lint rebuilt successfully @@ -406,11 +423,13 @@ If you want manual control, edit `scripts/pre-commit-hooks/golangci-lint-fast.sh ## Need Help? **Open a [Discussion](https://github.com/Wikid82/charon/discussions)** if: + - These instructions didn't work for you - You're seeing errors not covered in troubleshooting - You have suggestions for improving this guide **Open an [Issue](https://github.com/Wikid82/charon/issues)** if: + - The rebuild script crashes - Pre-commit auto-rebuild isn't working - CI is failing for Go version reasons diff --git a/docs/development/running-e2e.md b/docs/development/running-e2e.md index d599f546..a1d831a2 100644 --- a/docs/development/running-e2e.md +++ b/docs/development/running-e2e.md @@ -3,16 +3,20 @@ This document explains how to run Playwright tests using a real browser (headed) on Linux machines and in the project's Docker E2E environment. ## Key points + - Playwright's interactive Test UI (--ui) requires an X server (a display). On headless CI or servers, use Xvfb. - Prefer the project's E2E Docker image for integration-like runs; use the local `--ui` flow for manual debugging. ## Quick commands (local Linux) + - Headless (recommended for CI / fast runs): + ```bash npm run e2e ``` - Headed UI on a headless machine (auto-starts Xvfb): + ```bash npm run e2e:ui:headless-server # or, if you prefer manual control: @@ -20,37 +24,46 @@ This document explains how to run Playwright tests using a real browser (headed) ``` - Headed UI on a workstation with an X server already running: + ```bash npx playwright test --ui ``` - Open the running Docker E2E app in your system browser (one-step via VS Code task): - Run the VS Code task: **Open: App in System Browser (Docker E2E)** - - This will rebuild the E2E container (if needed), wait for http://localhost:8080 to respond, and open your system browser automatically. + - This will rebuild the E2E container (if needed), wait for to respond, and open your system browser automatically. - Open the running Docker E2E app in VS Code Simple Browser: - Run the VS Code task: **Open: App in Simple Browser (Docker E2E)** - Then use the command palette: `Simple Browser: Open URL` → paste `http://localhost:8080` ## Using the project's E2E Docker image (recommended for parity with CI) + 1. Rebuild/start the E2E container (this sets up the full test environment): + ```bash .github/skills/scripts/skill-runner.sh docker-rebuild-e2e ``` + If you need a clean rebuild after integration alignment changes: + ```bash .github/skills/scripts/skill-runner.sh docker-rebuild-e2e --clean --no-cache ``` -2. Run the UI against the container (you still need an X server on your host): + +1. Run the UI against the container (you still need an X server on your host): + ```bash PLAYWRIGHT_BASE_URL=http://localhost:8080 npm run e2e:ui:headless-server ``` ## CI guidance + - Do not run Playwright `--ui` in CI. Use headless runs or the E2E Docker image and collect traces/videos for failures. - For coverage, use the provided skill: `.github/skills/scripts/skill-runner.sh test-e2e-playwright-coverage` ## Troubleshooting + - Playwright error: "Looks like you launched a headed browser without having a XServer running." → run `npm run e2e:ui:headless-server` or install Xvfb. - If `npm run e2e:ui:headless-server` fails with an exit code like `148`: - Inspect Xvfb logs: `tail -n 200 /tmp/xvfb.playwright.log` @@ -59,11 +72,13 @@ This document explains how to run Playwright tests using a real browser (headed) - If running inside Docker, prefer the skill-runner which provisions the required services; the UI still needs host X (or use VNC). ## Developer notes (what we changed) + - Added `scripts/run-e2e-ui.sh` — wrapper that auto-starts Xvfb when DISPLAY is unset. - Added `npm run e2e:ui:headless-server` to run the Playwright UI on headless machines. - Playwright config now auto-starts Xvfb when `--ui` is requested locally and prints an actionable error if Xvfb is not available. ## Security & hygiene + - Playwright auth artifacts are ignored by git (`playwright/.auth/`). Do not commit credentials. --- diff --git a/docs/features.md b/docs/features.md index 3ff05722..139348d8 100644 --- a/docs/features.md +++ b/docs/features.md @@ -237,7 +237,7 @@ Watch requests flow through your proxy in real-time. Filter by domain, status co ### 🔔 Notifications -Get alerted when it matters. Charon notifications now run through the Notify HTTP wrapper with support for Discord, Gotify, and Custom Webhook providers. Payload-focused test coverage is included to help catch formatting and delivery regressions before release. +Get alerted when it matters. Charon sends notifications through Discord, Gotify, Ntfy, Pushover, Slack, Email, and Custom Webhook providers. Choose a built-in JSON template or write your own to control exactly what your alerts look like. → [Learn More](features/notifications.md) diff --git a/docs/features/api.md b/docs/features/api.md index 089ab019..95a0c68c 100644 --- a/docs/features/api.md +++ b/docs/features/api.md @@ -23,6 +23,7 @@ Authorization: Bearer your-api-token-here ``` Tokens support granular permissions: + - **Read-only**: View configurations without modification - **Full access**: Complete CRUD operations - **Scoped**: Limit to specific resource types diff --git a/docs/features/caddyfile-import.md b/docs/features/caddyfile-import.md index 1d27562f..7e5cec26 100644 --- a/docs/features/caddyfile-import.md +++ b/docs/features/caddyfile-import.md @@ -52,6 +52,7 @@ Caddyfile import parses your existing Caddy configuration files and converts the Choose one of three methods: **Paste Content:** + ``` example.com { reverse_proxy localhost:3000 @@ -63,10 +64,12 @@ api.example.com { ``` **Upload File:** + - Click **Choose File** - Select your Caddyfile **Fetch from URL:** + - Enter URL to raw Caddyfile content - Useful for version-controlled configurations diff --git a/docs/features/dns-challenge.md b/docs/features/dns-challenge.md index bd696891..ba3bba18 100644 --- a/docs/features/dns-challenge.md +++ b/docs/features/dns-challenge.md @@ -447,6 +447,7 @@ Charon displays instructions to remove the TXT record after certificate issuance **Symptom**: Certificate request stuck at "Waiting for Propagation" or validation fails. **Causes**: + - DNS TTL is high (cached old records) - DNS provider has slow propagation - Regional DNS inconsistency @@ -497,6 +498,7 @@ Charon displays instructions to remove the TXT record after certificate issuance **Symptom**: Connection test passes, but record creation fails. **Causes**: + - API token has read-only permissions - Zone/domain not accessible with current credentials - Rate limiting or account restrictions @@ -513,6 +515,7 @@ Charon displays instructions to remove the TXT record after certificate issuance **Symptom**: "Record already exists" error during certificate request. **Causes**: + - Previous challenge attempt left orphaned record - Manual DNS record with same name exists - Another ACME client managing the same domain @@ -551,6 +554,7 @@ Charon displays instructions to remove the TXT record after certificate issuance **Symptom**: "Too many requests" or "Rate limit exceeded" errors. **Causes**: + - Too many certificate requests in short period - DNS provider API rate limits - Let's Encrypt rate limits diff --git a/docs/features/docker-integration.md b/docs/features/docker-integration.md index a0f892af..d5b9e343 100644 --- a/docs/features/docker-integration.md +++ b/docs/features/docker-integration.md @@ -47,6 +47,7 @@ Docker auto-discovery eliminates manual IP address hunting and port memorization For Charon to discover containers, it needs Docker API access. **Docker Compose:** + ```yaml services: charon: @@ -56,6 +57,7 @@ services: ``` **Docker Run:** + ```bash docker run -v /var/run/docker.sock:/var/run/docker.sock:ro charon ``` diff --git a/docs/features/notifications.md b/docs/features/notifications.md index 6dc421dd..9166d18b 100644 --- a/docs/features/notifications.md +++ b/docs/features/notifications.md @@ -16,7 +16,10 @@ Notifications can be triggered by various events: | Service | JSON Templates | Native API | Rich Formatting | |---------|----------------|------------|-----------------| | **Discord** | ✅ Yes | ✅ Webhooks | ✅ Embeds | +| **Slack** | ✅ Yes | ✅ Webhooks | ✅ Native Formatting | | **Gotify** | ✅ Yes | ✅ HTTP API | ✅ Priority + Extras | +| **Pushover** | ✅ Yes | ✅ HTTP API | ✅ Priority + Sound | +| **Ntfy** | ✅ Yes | ✅ HTTP API | ✅ Priority + Tags | | **Custom Webhook** | ✅ Yes | ✅ HTTP API | ✅ Template-Controlled | | **Email** | ❌ No | ✅ SMTP | ✅ HTML Branded Templates | @@ -36,8 +39,6 @@ Email notifications send HTML-branded alerts directly to one or more email addre Email notifications use built-in HTML templates with Charon branding — no JSON template editing is required. -> **Feature Flag:** Email notifications must be enabled via `feature.notifications.service.email.enabled` in **Settings** → **Feature Flags** before the Email provider option appears. - ### Why JSON Templates? JSON templates give you complete control over notification formatting, allowing you to: @@ -60,7 +61,7 @@ JSON templates give you complete control over notification formatting, allowing ### JSON Template Support -For JSON-based services (Discord, Gotify, and Custom Webhook), you can choose from three template options. Email uses its own built-in HTML templates and does not use JSON templates. +For JSON-based services (Discord, Slack, Gotify, and Custom Webhook), you can choose from three template options. Email uses its own built-in HTML templates and does not use JSON templates. #### 1. Minimal Template (Default) @@ -174,11 +175,141 @@ Discord supports rich embeds with colors, fields, and timestamps. - `16776960` - Yellow (warning) - `3066993` - Green (success) +### Slack Webhooks + +Slack notifications send messages to a channel using an Incoming Webhook URL. + +**Setup:** + +1. In Slack, go to **[Your Apps](https://api.slack.com/apps)** → **Create New App** → **From scratch** +2. Under **Features**, select **Incoming Webhooks** and toggle it **on** +3. Click **"Add New Webhook to Workspace"** and choose the channel to post to +4. Copy the Webhook URL (it looks like `https://hooks.slack.com/services/T.../B.../...`) +5. In Charon, go to **Settings** → **Notifications** and click **"Add Provider"** +6. Select **Slack** as the service type +7. Paste your Webhook URL into the **Webhook URL** field +8. Optionally enter a channel display name (e.g., `#alerts`) for easy identification +9. Configure notification triggers and save + +> **Security:** Your Webhook URL is stored securely and is never exposed in API responses. The settings page only shows a `has_token: true` indicator, so your URL stays private even if someone gains read-only access to the API. + +#### Basic Message + +```json +{ + "text": "{{.Title}}: {{.Message}}" +} +``` + +#### Formatted Message with Context + +```json +{ + "text": "*{{.Title}}*\n{{.Message}}\n\n• *Event:* {{.EventType}}\n• *Host:* {{.HostName}}\n• *Severity:* {{.Severity}}\n• *Time:* {{.Timestamp}}" +} +``` + +**Slack formatting tips:** + +- Use `*bold*` for emphasis +- Use `\n` for line breaks +- Use `•` for bullet points +- Slack automatically linkifies URLs + +### Pushover + +Pushover delivers push notifications directly to your iOS, Android, or desktop devices. + +**Setup:** + +1. Create an account at [pushover.net](https://pushover.net) and install the Pushover app on your device +2. From your Pushover dashboard, copy your **User Key** +3. Create a new **Application/API Token** for Charon +4. In Charon, go to **Settings** → **Notifications** and click **"Add Provider"** +5. Select **Pushover** as the service type +6. Enter your **Application API Token** in the token field +7. Enter your **User Key** in the User Key field +8. Configure notification triggers and save + +> **Security:** Your Application API Token is stored securely and is never exposed in API responses. + +#### Basic Message + +```json +{ + "title": "{{.Title}}", + "message": "{{.Message}}" +} +``` + +#### Message with Priority + +```json +{ + "title": "{{.Title}}", + "message": "{{.Message}}", + "priority": 1 +} +``` + +**Pushover priority levels:** + +- `-2` - Lowest (no sound or vibration) +- `-1` - Low (quiet) +- `0` - Normal (default) +- `1` - High (bypass quiet hours) + +> **Note:** Emergency priority (`2`) is not supported and will be rejected with a clear error. + +### Ntfy + +Ntfy delivers push notifications to your phone or desktop using a simple HTTP-based publish/subscribe model. Works with the free hosted service at [ntfy.sh](https://ntfy.sh) or your own self-hosted instance. + +**Setup:** + +1. Pick a topic name (or use an existing one) on [ntfy.sh](https://ntfy.sh) or your self-hosted server +2. In Charon, go to **Settings** → **Notifications** and click **"Add Provider"** +3. Select **Ntfy** as the service type +4. Enter your Topic URL (e.g., `https://ntfy.sh/charon-alerts` or `https://ntfy.example.com/charon-alerts`) +5. (Optional) Add an access token if your topic requires authentication +6. Configure notification triggers and save + +> **Security:** Your access token is stored securely and is never exposed in API responses. + +#### Basic Message + +```json +{ + "topic": "charon-alerts", + "title": "{{.Title}}", + "message": "{{.Message}}" +} +``` + +#### Message with Priority and Tags + +```json +{ + "topic": "charon-alerts", + "title": "{{.Title}}", + "message": "{{.Message}}", + "priority": 4, + "tags": ["rotating_light"] +} +``` + +**Ntfy priority levels:** + +- `1` - Min +- `2` - Low +- `3` - Default +- `4` - High +- `5` - Max (urgent) + ## Planned Provider Expansion -Additional providers (for example Slack and Telegram) are planned for later -staged releases. This page will be expanded as each provider is validated and -released. +Additional providers (for example Telegram) are planned for later staged +releases. This page will be expanded as each provider is validated and released. ## Template Variables @@ -341,6 +472,7 @@ Use separate Discord providers for different event types: Be mindful of service limits: - **Discord**: 5 requests per 2 seconds per webhook +- **Slack**: 1 request per second per webhook - **Email**: Subject to your SMTP server's sending limits ### 6. Keep Templates Maintainable diff --git a/docs/features/plugin-security.md b/docs/features/plugin-security.md index 067e1907..a3b7b723 100644 --- a/docs/features/plugin-security.md +++ b/docs/features/plugin-security.md @@ -35,18 +35,21 @@ CHARON_PLUGIN_SIGNATURES='{"pluginname": "sha256:..."}' ### Examples **Permissive mode (default)**: + ```bash # Unset — all plugins load without verification unset CHARON_PLUGIN_SIGNATURES ``` **Strict block-all**: + ```bash # Empty object — no external plugins will load export CHARON_PLUGIN_SIGNATURES='{}' ``` **Allowlist specific plugins**: + ```bash # Only powerdns and custom-provider plugins are allowed export CHARON_PLUGIN_SIGNATURES='{"powerdns": "sha256:a1b2c3d4...", "custom-provider": "sha256:e5f6g7h8..."}' @@ -63,6 +66,7 @@ sha256sum myplugin.so | awk '{print "sha256:" $1}' ``` **Example output**: + ``` sha256:a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0u1v2w3x4y5z6a7b8c9d0e1f2 ``` @@ -96,6 +100,7 @@ services: ``` This prevents runtime modification of plugin files, mitigating: + - Time-of-check to time-of-use (TOCTOU) attacks - Malicious plugin replacement after signature verification @@ -113,6 +118,7 @@ services: ``` Or in Dockerfile: + ```dockerfile FROM charon:latest USER charon @@ -128,6 +134,7 @@ Plugin directories must **not** be world-writable. Charon enforces this at start | `0777` (world-writable) | ❌ Rejected — plugin loading disabled | **Set secure permissions**: + ```bash chmod 755 /path/to/plugins chmod 644 /path/to/plugins/*.so # Or 755 for executable @@ -192,22 +199,26 @@ After updating plugins, always update your `CHARON_PLUGIN_SIGNATURES` with the n ### Checking if a Plugin Loaded **Check startup logs**: + ```bash docker compose logs charon | grep -i plugin ``` **Expected success output**: + ``` INFO Loaded DNS provider plugin type=powerdns name="PowerDNS" version="1.0.0" INFO Loaded 1 external DNS provider plugins (0 failed) ``` **If using allowlist**: + ``` INFO Plugin signature allowlist enabled with 2 entries ``` **Via API**: + ```bash curl http://localhost:8080/api/admin/plugins \ -H "Authorization: Bearer YOUR-TOKEN" @@ -220,6 +231,7 @@ curl http://localhost:8080/api/admin/plugins \ **Cause**: The plugin filename (without `.so`) is not in `CHARON_PLUGIN_SIGNATURES`. **Solution**: Add the plugin to your allowlist: + ```bash # Get the signature sha256sum powerdns.so | awk '{print "sha256:" $1}' @@ -233,6 +245,7 @@ export CHARON_PLUGIN_SIGNATURES='{"powerdns": "sha256:YOUR_HASH_HERE"}' **Cause**: The plugin file's SHA-256 hash doesn't match the allowlist. **Solution**: + 1. Verify you have the correct plugin file 2. Re-compute the signature: `sha256sum plugin.so` 3. Update `CHARON_PLUGIN_SIGNATURES` with the correct hash @@ -242,6 +255,7 @@ export CHARON_PLUGIN_SIGNATURES='{"powerdns": "sha256:YOUR_HASH_HERE"}' **Cause**: The plugin directory is world-writable (mode `0777` or similar). **Solution**: + ```bash chmod 755 /path/to/plugins chmod 644 /path/to/plugins/*.so @@ -252,11 +266,13 @@ chmod 644 /path/to/plugins/*.so **Cause**: Malformed JSON in the environment variable. **Solution**: Validate your JSON: + ```bash echo '{"powerdns": "sha256:abc123"}' | jq . ``` Common issues: + - Missing quotes around keys or values - Trailing commas - Single quotes instead of double quotes @@ -266,6 +282,7 @@ Common issues: **Cause**: File permissions too restrictive or ownership mismatch. **Solution**: + ```bash # Check current permissions ls -la /path/to/plugins/ @@ -278,27 +295,32 @@ chown charon:charon /path/to/plugins/*.so ### Debugging Checklist 1. **Is the plugin directory configured?** + ```bash echo $CHARON_PLUGINS_DIR ``` 2. **Does the plugin file exist?** + ```bash ls -la $CHARON_PLUGINS_DIR/*.so ``` 3. **Are directory permissions secure?** + ```bash stat -c "%a %n" $CHARON_PLUGINS_DIR # Should be 755 or stricter ``` 4. **Is the signature correct?** + ```bash sha256sum $CHARON_PLUGINS_DIR/myplugin.so ``` 5. **Is the JSON valid?** + ```bash echo "$CHARON_PLUGIN_SIGNATURES" | jq . ``` diff --git a/docs/features/proxy-headers.md b/docs/features/proxy-headers.md index a6c514cc..d77730fe 100644 --- a/docs/features/proxy-headers.md +++ b/docs/features/proxy-headers.md @@ -69,22 +69,26 @@ X-Forwarded-Host preserves the original domain: Your backend must trust proxy headers from Charon. Common configurations: **Node.js/Express:** + ```javascript app.set('trust proxy', true); ``` **Django:** + ```python SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') USE_X_FORWARDED_HOST = True ``` **Rails:** + ```ruby config.action_dispatch.trusted_proxies = [IPAddr.new('10.0.0.0/8')] ``` **PHP/Laravel:** + ```php // In TrustProxies middleware protected $proxies = '*'; diff --git a/docs/features/ssl-certificates.md b/docs/features/ssl-certificates.md index 45b7515a..1b3cc5e8 100644 --- a/docs/features/ssl-certificates.md +++ b/docs/features/ssl-certificates.md @@ -62,6 +62,21 @@ When you delete a proxy host, Charon automatically: This prevents certificate accumulation and keeps your system tidy. +## Manual Certificate Deletion + +Over time, expired or unused certificates can pile up in the Certificates list. You can remove them manually: + +| Certificate Type | When You Can Delete It | +|------------------|----------------------| +| **Expired Let's Encrypt** | When it's not attached to any proxy host | +| **Custom (uploaded)** | When it's not attached to any proxy host | +| **Staging** | When it's not attached to any proxy host | +| **Valid Let's Encrypt** | Managed automatically — no delete button shown | + +If a certificate is still attached to a proxy host, the delete button is disabled and a tooltip explains which host is using it. Remove the certificate from the proxy host first, then come back to delete it. + +A confirmation dialog appears before anything is removed. Charon creates a backup before deleting, so you have a safety net. + ## Troubleshooting | Issue | Solution | diff --git a/docs/getting-started.md b/docs/getting-started.md index f4ac3076..88e69be2 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -21,6 +21,24 @@ Imagine you have several apps running on your computer. Maybe a blog, a file sto ## Step 1: Install Charon +### Required Secrets (Generate Before Installing) + +Two secrets must be set before starting Charon. Omitting them will cause **sessions to reset on every container restart**, locking users out. + +Generate both values now and keep them somewhere safe: + +```bash +# JWT secret — signs and validates login sessions +openssl rand -hex 32 + +# Encryption key — protects stored credentials at rest +openssl rand -base64 32 +``` + +> **Why this matters:** If `CHARON_JWT_SECRET` is not set, Charon generates a random key on each boot. Any active login session becomes invalid the moment the container restarts, producing a "Session validation failed" error. + +--- + ### Option A: Docker Compose (Easiest) Create a file called `docker-compose.yml`: @@ -43,6 +61,8 @@ services: - /var/run/docker.sock:/var/run/docker.sock:ro environment: - CHARON_ENV=production + - CHARON_JWT_SECRET= + - CHARON_ENCRYPTION_KEY= ``` Then run: @@ -64,6 +84,8 @@ docker run -d \ -v ./charon-data:/app/data \ -v /var/run/docker.sock:/var/run/docker.sock:ro \ -e CHARON_ENV=production \ + -e CHARON_JWT_SECRET= \ + -e CHARON_ENCRYPTION_KEY= \ wikid82/charon:latest ``` @@ -78,6 +100,8 @@ docker run -d \ -v ./charon-data:/app/data \ -v /var/run/docker.sock:/var/run/docker.sock:ro \ -e CHARON_ENV=production \ + -e CHARON_JWT_SECRET= \ + -e CHARON_ENCRYPTION_KEY= \ ghcr.io/wikid82/charon:latest ``` @@ -205,16 +229,19 @@ The emergency token is a security feature that allows bypassing all security mod Choose your platform: **Linux/macOS (recommended):** + ```bash openssl rand -hex 32 ``` **Windows PowerShell:** + ```powershell [Convert]::ToBase64String([System.Security.Cryptography.RandomNumberGenerator]::GetBytes(32)) ``` **Node.js (all platforms):** + ```bash node -e "console.log(require('crypto').randomBytes(32).toString('hex'))" ``` @@ -228,11 +255,13 @@ CHARON_EMERGENCY_TOKEN= ``` **Example:** + ```bash CHARON_EMERGENCY_TOKEN=7b3b8a36a6fad839f1b3122131ed4b1f05453118a91b53346482415796e740e2 ``` **Verify:** + ```bash # Token should be exactly 64 characters echo -n "$(grep CHARON_EMERGENCY_TOKEN .env | cut -d= -f2)" | wc -c @@ -263,20 +292,23 @@ For continuous integration, store the token in GitHub Secrets: ### Security Best Practices ✅ **DO:** + - Generate tokens using cryptographically secure methods - Store in `.env` (gitignored) or secrets management - Rotate quarterly or after security events - Use minimum 64 characters ❌ **DON'T:** + - Commit tokens to repository (even in examples) - Share tokens via email or chat - Use weak or predictable values - Reuse tokens across environments --- -2. **Settings table** for `security.crowdsec.enabled = "true"` -3. **Starts CrowdSec** if either condition is true + +1. **Settings table** for `security.crowdsec.enabled = "true"` +2. **Starts CrowdSec** if either condition is true **How it works:** @@ -558,7 +590,7 @@ Click "Watch" → "Custom" → Select "Security advisories" on the [Charon repos **2. Notifications and Automatic Updates with Dockhand** - - Dockhand is a free service that monitors Docker images for updates and can send notifications or trigger auto-updates. https://github.com/Finsys/dockhand +- Dockhand is a free service that monitors Docker images for updates and can send notifications or trigger auto-updates. **Best Practices:** diff --git a/docs/github-setup.md b/docs/github-setup.md index 9f211530..09265e0c 100644 --- a/docs/github-setup.md +++ b/docs/github-setup.md @@ -68,6 +68,7 @@ E2E tests require an emergency token to be configured in GitHub Secrets. This to ### Why This Is Needed The emergency token is used by E2E tests to: + - Disable security modules (ACL, WAF, CrowdSec) after testing them - Prevent cascading test failures due to leftover security state - Ensure tests can always access the API regardless of security configuration @@ -77,16 +78,19 @@ The emergency token is used by E2E tests to: 1. **Generate emergency token:** **Linux/macOS:** + ```bash openssl rand -hex 32 ``` **Windows PowerShell:** + ```powershell [Convert]::ToBase64String([System.Security.Cryptography.RandomNumberGenerator]::GetBytes(32)) ``` **Node.js (all platforms):** + ```bash node -e "console.log(require('crypto').randomBytes(32).toString('hex'))" ``` @@ -141,11 +145,13 @@ If the secret is missing or invalid, the workflow will fail with a clear error m ### Security Best Practices ✅ **DO:** + - Use cryptographically secure generation methods - Rotate quarterly or after security events - Store separately for local dev (`.env`) and CI/CD (GitHub Secrets) ❌ **DON'T:** + - Share tokens via email or chat - Commit tokens to repository (even in example files) - Reuse tokens across different environments @@ -154,11 +160,13 @@ If the secret is missing or invalid, the workflow will fail with a clear error m ### Troubleshooting **Error: "CHARON_EMERGENCY_TOKEN not set"** + - Check secret name is exactly `CHARON_EMERGENCY_TOKEN` (case-sensitive) - Verify secret is repository-level, not environment-level - Re-run workflow after adding secret **Error: "Token too short"** + - Hex method must generate exactly 64 characters - Verify you copied the entire token value - Regenerate if needed diff --git a/docs/guides/crowdsec-setup.md b/docs/guides/crowdsec-setup.md index c93b1b84..c6c889e8 100644 --- a/docs/guides/crowdsec-setup.md +++ b/docs/guides/crowdsec-setup.md @@ -88,6 +88,7 @@ In CrowdSec terms: > **✅ Good News: Charon Handles This For You!** > > When you enable CrowdSec for the first time, Charon automatically: +> > 1. Starts the CrowdSec engine > 2. Registers a bouncer and generates a valid API key > 3. Saves the key so it survives container restarts @@ -317,11 +318,13 @@ Replace `YOUR_ENROLLMENT_KEY` with the key from your Console. **Solution:** 1. Check if you're manually setting an API key: + ```bash grep -i "crowdsec_api_key" docker-compose.yml ``` 2. If you find one, **remove it**: + ```yaml # REMOVE this line: - CHARON_SECURITY_CROWDSEC_API_KEY=anything @@ -330,6 +333,7 @@ Replace `YOUR_ENROLLMENT_KEY` with the key from your Console. 3. Follow the [Manual Bouncer Registration](#manual-bouncer-registration) steps above 4. Restart the container: + ```bash docker restart charon ``` @@ -347,6 +351,7 @@ Replace `YOUR_ENROLLMENT_KEY` with the key from your Console. 1. Wait 60 seconds after container start 2. Check if CrowdSec is running: + ```bash docker exec charon cscli lapi status ``` @@ -354,6 +359,7 @@ Replace `YOUR_ENROLLMENT_KEY` with the key from your Console. 3. If you see "connection refused," try toggling CrowdSec OFF then ON in the GUI 4. Check the logs: + ```bash docker logs charon | grep -i crowdsec ``` @@ -431,6 +437,7 @@ If you already run CrowdSec separately (not inside Charon), you can connect to i **Steps:** 1. Register a bouncer on your external CrowdSec: + ```bash cscli bouncers add charon-bouncer ``` @@ -438,6 +445,7 @@ If you already run CrowdSec separately (not inside Charon), you can connect to i 2. Save the API key that's generated (you won't see it again!) 3. In your docker-compose.yml: + ```yaml environment: - CHARON_SECURITY_CROWDSEC_API_URL=http://your-crowdsec-server:8080 @@ -445,6 +453,7 @@ If you already run CrowdSec separately (not inside Charon), you can connect to i ``` 4. Restart Charon: + ```bash docker restart charon ``` diff --git a/docs/issues/certificate-delete-manual-test.md b/docs/issues/certificate-delete-manual-test.md new file mode 100644 index 00000000..0188a0df --- /dev/null +++ b/docs/issues/certificate-delete-manual-test.md @@ -0,0 +1,68 @@ +--- +title: "Manual Testing: Certificate Deletion UX Enhancement" +labels: + - testing + - feature + - frontend +priority: medium +assignees: [] +--- + +# Manual Testing: Certificate Deletion UX Enhancement + +## Description + +Manual test plan for expanded certificate deletion. Focuses on edge cases and race conditions that automated E2E tests cannot fully cover. + +## Pre-requisites + +- A running Charon instance with certificates in various states: + - At least one expired Let's Encrypt certificate **not** attached to a proxy host + - At least one custom (uploaded) certificate **not** attached to a proxy host + - At least one certificate **attached** to a proxy host (in use) + - At least one valid (non-expired) Let's Encrypt production certificate not in use +- Access to the Charon Certificates page + +## Test Cases + +### Happy Path + +- [ ] **Delete expired LE cert not in use**: Click the delete button on an expired Let's Encrypt certificate that is not attached to any proxy host. Confirm in the dialog. Certificate disappears from the list and a success toast appears. +- [ ] **Delete custom cert not in use**: Click the delete button on an uploaded custom certificate not attached to any host. Confirm. Certificate is removed with a success toast. +- [ ] **Delete staging cert not in use**: Click the delete button on a staging certificate not attached to any host. Confirm. Certificate is removed with a success toast. + +### Delete Prevention + +- [ ] **In-use cert shows disabled button**: Find a certificate attached to a proxy host. Verify the delete button is visible but disabled. +- [ ] **In-use cert tooltip**: Hover over the disabled delete button. A tooltip should explain that the certificate is in use and cannot be deleted. +- [ ] **Valid LE cert hides delete button**: Find a valid (non-expired) Let's Encrypt production certificate not attached to any host. Verify no delete button is shown — Charon manages these automatically. + +### Confirmation Dialog + +- [ ] **Cancel does not delete**: Click the delete button on a deletable certificate. In the confirmation dialog, click Cancel. The certificate should remain in the list. +- [ ] **Escape key closes dialog**: Open the confirmation dialog. Press Escape. The dialog closes and the certificate remains. +- [ ] **Click overlay closes dialog**: Open the confirmation dialog. Click outside the dialog (on the overlay). The dialog closes and the certificate remains. +- [ ] **Confirm deletes**: Open the confirmation dialog. Click the Delete/Confirm button. The certificate is removed and a success toast appears. + +### Keyboard Navigation + +- [ ] **Tab through dialog**: Open the confirmation dialog. Press Tab to move focus between the Cancel and Delete buttons. Focus order should be logical (Cancel → Delete or Delete → Cancel). +- [ ] **Enter activates focused button**: Tab to the Cancel button and press Enter — dialog closes, certificate remains. Repeat with the Delete button — certificate is removed. +- [ ] **Focus trap**: With the dialog open, Tab should cycle within the dialog and not escape to the page behind it. + +### Edge Cases & Race Conditions + +- [ ] **Rapid double-click on delete**: Quickly double-click the delete button. Only one confirmation dialog should appear. Only one delete request should be sent. +- [ ] **Cert becomes in-use between dialog open and confirm**: Open the delete dialog for a certificate. In another tab, attach that certificate to a proxy host. Return and confirm deletion. The server should return a 409 error and the UI should show an appropriate error message — the certificate should remain. +- [ ] **Delete when backup may fail (low disk space)**: If testable, simulate low disk space. Attempt a deletion. The server creates a backup before deleting — verify the error is surfaced to the user if the backup fails. +- [ ] **Network error during delete**: Open the delete dialog and disconnect from the network (or throttle to offline in DevTools). Confirm deletion. An error message should appear and the certificate should remain. + +### Visual & UX Consistency + +- [ ] **Dialog styling**: The confirmation dialog should match the application theme (dark/light mode). +- [ ] **Toast messages**: Success and error toasts should appear in the expected position and auto-dismiss. +- [ ] **List updates without full reload**: After a successful deletion, the certificate list should update without requiring a page refresh. + +## Related + +- [Automatic HTTPS Certificates](../features/ssl-certificates.md) diff --git a/docs/issues/ntfy-notification-provider-manual-testing.md b/docs/issues/ntfy-notification-provider-manual-testing.md new file mode 100644 index 00000000..51d38eb5 --- /dev/null +++ b/docs/issues/ntfy-notification-provider-manual-testing.md @@ -0,0 +1,98 @@ +--- +title: "Manual Testing: Ntfy Notification Provider" +labels: + - testing + - feature + - frontend + - backend +priority: medium +milestone: "v0.2.0-beta.2" +assignees: [] +--- + +# Manual Testing: Ntfy Notification Provider + +## Description + +Manual testing plan for the Ntfy notification provider feature. Covers UI/UX +validation, dispatch behavior, token security, and edge cases that E2E tests +cannot fully cover. + +## Prerequisites + +- Ntfy instance accessible (cloud: ntfy.sh, or self-hosted) +- Test topic created (e.g., `https://ntfy.sh/charon-test-XXXX`) +- Ntfy mobile/desktop app installed for push verification +- Optional: password-protected topic with access token for auth testing + +## Test Cases + +### UI/UX Validation + +- [ ] Select "Ntfy" from provider type dropdown — token field and "Topic URL" label appear +- [ ] URL placeholder shows `https://ntfy.sh/my-topic` +- [ ] Token label shows "Access Token (optional)" +- [ ] Token field is a password field (dots, not cleartext) +- [ ] JSON template section (minimal/detailed/custom) appears for Ntfy +- [ ] Switching from Ntfy to Discord clears token field and hides it +- [ ] Switching from Discord to Ntfy shows token field again +- [ ] URL field is required — form rejects empty URL submission +- [ ] Keyboard navigation: tab through all Ntfy form fields without focus traps + +### CRUD Operations + +- [ ] Create Ntfy provider with URL only (no token) — succeeds +- [ ] Create Ntfy provider with URL + token — succeeds +- [ ] Edit Ntfy provider: change URL — preserves token (shows "Leave blank to keep") +- [ ] Edit Ntfy provider: clear and re-enter token — updates token +- [ ] Delete Ntfy provider — removed from list +- [ ] Create multiple Ntfy providers with different topics — all coexist + +### Dispatch Verification (Requires Real Ntfy Instance) + +- [ ] Send test notification to ntfy.sh cloud topic — push received on device +- [ ] Send test notification to self-hosted ntfy instance — push received +- [ ] Send test notification with minimal template — message body is correct +- [ ] Send test notification with detailed template — title and body formatted correctly +- [ ] Send test notification with custom JSON template — all fields arrive as specified +- [ ] Token-protected topic with valid token — notification delivered +- [ ] Token-protected topic with no token — notification rejected by ntfy (expected 401) +- [ ] Token-protected topic with invalid token — notification rejected by ntfy (expected 401) + +### Token Security + +- [ ] After creating provider with token: GET provider response has `has_token: true` but no raw token +- [ ] Browser DevTools Network tab: confirm token never appears in any API response body +- [ ] Edit provider: token field is empty (not pre-filled with existing token) +- [ ] Application logs: confirm no token values in backend logs during dispatch + +### Edge Cases + +- [ ] Invalid URL (not http/https) — form validation rejects +- [ ] Self-hosted ntfy URL with non-standard port (e.g., `http://192.168.1.50:8080/alerts`) — accepted and dispatches +- [ ] Very long topic name in URL — accepted +- [ ] Unicode characters in message template — dispatches correctly +- [ ] Feature flag disabled (`feature.notifications.service.ntfy.enabled = false`) — ntfy dispatch silently skipped +- [ ] Network timeout to unreachable ntfy server — error handled gracefully, no crash + +### Accessibility + +- [ ] Screen reader: form field labels announced correctly for Ntfy fields +- [ ] Screen reader: token help text associated via aria-describedby +- [ ] High contrast mode: Ntfy form fields visible and readable +- [ ] Voice access: "Click Topic URL" activates the correct field +- [ ] Keyboard only: complete full CRUD workflow without mouse + +## Acceptance Criteria + +- [ ] All UI/UX tests pass +- [ ] All CRUD operations work correctly +- [ ] At least one real dispatch to ntfy.sh confirmed +- [ ] Token never exposed in API responses or logs +- [ ] No accessibility regressions + +## Related + +- Spec: `docs/plans/current_spec.md` +- QA Report: `docs/reports/qa_report_ntfy_notifications.md` +- E2E Tests: `tests/settings/ntfy-notification-provider.spec.ts` diff --git a/docs/issues/pr5-tcp-monitor-ux-manual-test.md b/docs/issues/pr5-tcp-monitor-ux-manual-test.md new file mode 100644 index 00000000..4422dd63 --- /dev/null +++ b/docs/issues/pr5-tcp-monitor-ux-manual-test.md @@ -0,0 +1,161 @@ +--- +title: "Manual Test Plan - PR-5 TCP Monitor UX Fix" +labels: + - testing + - frontend + - bug +priority: high +--- + +# Manual Test Plan - PR-5 TCP Monitor UX Fix + +## Scope + +PR-5 only. + +This plan covers manual verification of the five UX fixes applied to the TCP monitor creation form: + +1. Corrected URL placeholder (removed misleading `tcp://` prefix) +2. Dynamic per-type placeholder (HTTP vs TCP) +3. Per-type helper text below the URL input +4. Client-side TCP scheme validation with inline error +5. Form field reorder: type selector now appears before URL input + +Out of scope: +- Backend monitor logic or storage +- Any other monitor type beyond HTTP and TCP +- Notification provider changes + +## Preconditions + +- [ ] Environment is running (Docker E2E or local dev). +- [ ] Tester can access the Monitors page and open the Create Monitor modal. +- [ ] Browser DevTools Network tab is available for TC-PR5-007. + +--- + +## Track A — Smoke Tests (Existing HTTP Behaviour) + +### TC-PR5-001 HTTP monitor creation still works + +- [ ] Open the Create Monitor modal. +- [ ] Select type **HTTP**. +- [ ] Enter a valid URL: `https://example.com`. +- [ ] Fill in remaining required fields and click **Create**. +- Expected result: monitor is created successfully; no errors shown. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-009 Existing HTTP monitors display correctly in the list + +- [ ] Navigate to the Monitors list. +- [ ] Confirm any pre-existing HTTP monitors are still shown with correct URLs. +- Expected result: no regressions in list display. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-010 Existing TCP monitors display correctly in the list + +- [ ] Navigate to the Monitors list. +- [ ] Confirm any pre-existing TCP monitors are still shown with correct host:port values. +- Expected result: no regressions in list display. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +--- + +## Track B — Core Fix (TCP Scheme Validation) + +### TC-PR5-002 TCP monitor with `tcp://` prefix shows inline error and blocks submission + +- [ ] Open the Create Monitor modal. +- [ ] Select type **TCP**. +- [ ] Enter URL: `tcp://192.168.1.1:8080`. +- [ ] Click **Create** (or attempt to submit). +- Expected result: an inline error appears on the URL field; the form is not submitted; no new monitor appears in the list. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-003 TCP monitor with valid `host:port` format succeeds + +- [ ] Open the Create Monitor modal. +- [ ] Select type **TCP**. +- [ ] Enter URL: `192.168.1.1:8080`. +- [ ] Fill in remaining required fields and click **Create**. +- Expected result: monitor is created successfully; no errors shown. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +--- + +## Track C — Dynamic Placeholder & Helper Text + +### TC-PR5-005 Form field order: Type selector appears above URL input + +- [ ] Open the Create Monitor modal. +- [ ] Inspect the visual layout of the form. +- Expected result: the monitor **Type** selector is positioned above the **URL** input field. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-006 Helper text updates when switching between HTTP and TCP + +- [ ] Open the Create Monitor modal. +- [ ] Select type **HTTP** and note the helper text shown beneath the URL input. +- [ ] Switch type to **TCP** and note the helper text again. +- Expected result: helper text differs between HTTP and TCP types, giving format guidance appropriate to each. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-006b Placeholder updates when switching between HTTP and TCP + +- [ ] Open the Create Monitor modal. +- [ ] Select type **HTTP** and note the URL input placeholder. +- [ ] Switch type to **TCP** and note the placeholder again. +- Expected result: HTTP placeholder shows a full URL (e.g. `https://example.com`); TCP placeholder shows `host:port` format (no scheme). +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +--- + +## Track D — Interaction Edge Cases + +### TC-PR5-004 Switching type from TCP (with error) to HTTP clears the inline error + +- [ ] Open the Create Monitor modal. +- [ ] Select type **TCP** and enter `tcp://192.168.1.1:8080` to trigger the inline error. +- [ ] Switch type to **HTTP**. +- Expected result: the scheme-prefix inline error disappears immediately after the type change. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +### TC-PR5-007 Submit guard: no API call fires when scheme prefix error is present + +- [ ] Open browser DevTools and go to the **Network** tab. +- [ ] Open the Create Monitor modal. +- [ ] Select type **TCP** and enter `tcp://192.168.1.1:8080`. +- [ ] Click **Create**. +- Expected result: the inline error is shown and no outbound POST/PUT request to the monitors API endpoint appears in the Network tab. +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +--- + +## Track E — Localisation (Optional) + +### TC-PR5-008 New translation keys appear correctly in non-English locales + +- [ ] Switch the application language to **German**, **French**, **Spanish**, or **Chinese**. +- [ ] Open the Create Monitor modal and select type **TCP**. +- [ ] Observe the URL placeholder, helper text, and inline error message (trigger it with `tcp://host:port`). +- Expected result: all three UI strings appear in the selected language without showing raw translation key strings (e.g. no `urlPlaceholder.tcp` visible to the user). +- Status: [ ] Not run [ ] Pass [ ] Fail +- Notes: + +--- + +## Sign-off + +| Tester | Date | Environment | Result | +|--------|------|-------------|--------| +| | | | | diff --git a/docs/issues/slack-manual-testing.md b/docs/issues/slack-manual-testing.md new file mode 100644 index 00000000..7a2fc373 --- /dev/null +++ b/docs/issues/slack-manual-testing.md @@ -0,0 +1,76 @@ +--- +title: "Manual Testing: Slack Notification Provider" +labels: + - testing + - feature + - frontend + - backend +priority: medium +milestone: "v0.2.0-beta.2" +assignees: [] +--- + +# Manual Testing: Slack Notification Provider + +## Description + +Manual test plan for the Slack notification provider feature. Covers scenarios that automated E2E tests cannot fully validate, such as real Slack workspace delivery, message formatting, and edge cases around webhook lifecycle. + +## Pre-requisites + +- A Slack workspace with at least one channel +- An Incoming Webhook URL created via Slack App configuration (https://api.slack.com/messaging/webhooks) +- Access to the Charon instance + +## Test Cases + +### Provider CRUD + +- [ ] **Create**: Add a Slack provider with a valid webhook URL and optional channel name (`#alerts`) +- [ ] **Edit**: Change the channel display name — verify webhook URL is preserved (not cleared) +- [ ] **Test**: Click "Send Test Notification" — verify message appears in Slack channel +- [ ] **Delete**: Remove the Slack provider — verify it no longer appears in the list +- [ ] **Re-create**: Add a new Slack provider after deletion — verify clean state + +### Security + +- [ ] Webhook URL is NOT visible in the provider list UI (only `has_token: true` indicator) +- [ ] Webhook URL is NOT returned in GET `/api/v1/notifications/providers` response body +- [ ] Editing an existing provider does NOT expose the webhook URL in any form field +- [ ] Browser DevTools Network tab shows no webhook URL in any API response + +### Message Delivery + +- [ ] Default template sends a readable notification to Slack +- [ ] Custom JSON template with `text` field renders correctly +- [ ] Custom JSON template with `blocks` renders Block Kit layout +- [ ] Notifications triggered by proxy host changes arrive in Slack +- [ ] Notifications triggered by certificate events arrive in Slack +- [ ] Notifications triggered by uptime events arrive in Slack (if enabled) + +### Error Handling + +- [ ] Invalid webhook URL (not matching `hooks.slack.com/services/` pattern) shows validation error +- [ ] Expired/revoked webhook URL returns `no_service` classification error +- [ ] Disabled feature flag (`feature.notifications.service.slack.enabled=false`) prevents Slack dispatch + +### Edge Cases + +- [ ] Creating provider with empty URL field succeeds (URL is optional channel display name) +- [ ] Very long channel name in URL field is handled gracefully +- [ ] Multiple Slack providers with different webhooks can coexist +- [ ] Switching provider type from Slack to Discord clears the token field appropriately +- [ ] Switching provider type from Discord to Slack shows the webhook URL input field + +### Cross-Browser + +- [ ] Provider CRUD works in Chrome/Chromium +- [ ] Provider CRUD works in Firefox +- [ ] Provider CRUD works in Safari/WebKit + +## Acceptance Criteria + +- [ ] All security test cases pass — webhook URL never exposed +- [ ] End-to-end message delivery confirmed in a real Slack workspace +- [ ] No console errors during any provider operations +- [ ] Feature flag correctly gates Slack functionality diff --git a/docs/issues/vite-8-beta-manual-testing.md b/docs/issues/vite-8-beta-manual-testing.md new file mode 100644 index 00000000..87bc6871 --- /dev/null +++ b/docs/issues/vite-8-beta-manual-testing.md @@ -0,0 +1,81 @@ +# Manual Testing: Vite 8.0.0-beta.18 Upgrade + +**Date:** 2026-03-12 +**Status:** Open +**Priority:** Medium +**Related Commit:** chore(frontend): upgrade to Vite 8 beta with Rolldown bundler + +--- + +## Context + +Vite 8 replaces Rollup with Rolldown (Rust-based bundler) and esbuild with Oxc for +JS transforms/minification. Lightning CSS replaces esbuild for CSS minification. These +are fundamental changes to the build pipeline that automated tests may not fully cover. + +## Manual Test Cases + +### 1. Production Build Output Verification + +- [ ] Deploy the Docker image to a staging environment +- [ ] Verify the application loads without console errors +- [ ] Verify all CSS renders correctly (Lightning CSS minification change) +- [ ] Check browser DevTools Network tab — confirm single JS bundle loads +- [ ] Verify sourcemaps work correctly in browser DevTools + +### 2. CJS Interop Regression Check + +Vite 8 changes how CommonJS default exports are handled. + +- [ ] Verify axios API calls succeed (login, proxy host CRUD, settings) +- [ ] Verify react-hot-toast notifications render on success/error actions +- [ ] Verify react-hook-form validation works on all forms +- [ ] Verify @tanstack/react-query data fetching and caching works + +### 3. Dynamic Import / Code Splitting + +The `codeSplitting: false` config replaces the old `inlineDynamicImports: true`. + +- [ ] Verify lazy-loaded routes load correctly +- [ ] Verify no "chunk load failed" errors during navigation +- [ ] Check that the React initialization issue (original reason for the workaround) does not resurface + +### 4. Development Server + +- [ ] Run `npm run dev` in frontend — verify HMR (Hot Module Replacement) works +- [ ] Make a CSS change — verify it hot-reloads without full page refresh +- [ ] Make a React component change — verify it hot-reloads preserving state +- [ ] Verify the dev server proxy to backend API still works + +### 5. Cross-Browser Verification + +Test in each browser to catch any Rolldown/Oxc output differences: + +- [ ] Chrome/Chromium — full functional test +- [ ] Firefox — full functional test +- [ ] Safari/WebKit — full functional test + +### 6. Docker Build Verification + +- [ ] Build Docker image on the target deployment architecture +- [ ] Verify the image starts and passes health checks +- [ ] Verify Rolldown native bindings resolve correctly (no missing .node errors) +- [ ] Test with `--platform=linux/amd64` explicitly + +### 7. Edge Cases + +- [ ] Test with browser cache cleared (ensure no stale Vite 7 chunks cached) +- [ ] Test login flow end-to-end +- [ ] Test certificate management flows +- [ ] Test DNS provider configuration +- [ ] Test access list creation and assignment + +## Known Issues to Monitor + +1. **Oxc Minifier assumptions** — if runtime errors occur after build but not in dev, the minifier is the likely cause. Disable with `build.minify: false` to diagnose. +2. **Lightning CSS bundle size** — may differ slightly from esbuild. Compare `dist/assets/` sizes. +3. **Beta software stability** — track Vite 8 releases for fixes to any issues found. + +## Pass Criteria + +All checkboxes above must be verified. Any failure should be filed as a separate issue with the `vite-8-beta` label. diff --git a/docs/maintenance/README.md b/docs/maintenance/README.md index 5ca7e03f..4a5e166b 100644 --- a/docs/maintenance/README.md +++ b/docs/maintenance/README.md @@ -9,6 +9,7 @@ This directory contains operational maintenance guides for keeping Charon runnin **When to use:** Docker build fails with GeoLite2-Country.mmdb checksum mismatch **Topics covered:** + - Automated weekly checksum verification workflow - Manual checksum update procedures (5 minutes) - Verification script for checking upstream changes @@ -16,6 +17,7 @@ This directory contains operational maintenance guides for keeping Charon runnin - Alternative sources if upstream mirrors are unavailable **Quick fix:** + ```bash # Download and update checksum automatically NEW_CHECKSUM=$(curl -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" | sha256sum | cut -d' ' -f1) @@ -34,6 +36,7 @@ Found a maintenance issue not covered here? Please: 3. **Update this index** with a link to your guide **Format:** + ```markdown ### [Guide Title](filename.md) diff --git a/docs/maintenance/geolite2-checksum-update.md b/docs/maintenance/geolite2-checksum-update.md index d319f171..b6758e9b 100644 --- a/docs/maintenance/geolite2-checksum-update.md +++ b/docs/maintenance/geolite2-checksum-update.md @@ -15,6 +15,7 @@ Charon uses the [MaxMind GeoLite2-Country database](https://dev.maxmind.com/geoi Update the checksum when: 1. **Docker build fails** with the following error: + ``` sha256sum: /app/data/geoip/GeoLite2-Country.mmdb: FAILED sha256sum: WARNING: 1 computed checksum did NOT match @@ -29,6 +30,7 @@ Update the checksum when: ## Automated Workflow (Recommended) Charon includes a GitHub Actions workflow that automatically: + - Checks for upstream GeoLite2 database changes weekly - Calculates the new checksum - Creates a pull request with the update @@ -39,6 +41,7 @@ Charon includes a GitHub Actions workflow that automatically: **Schedule:** Mondays at 2 AM UTC (weekly) **Manual Trigger:** + ```bash gh workflow run update-geolite2.yml ``` @@ -75,16 +78,19 @@ sha256sum /tmp/geolite2-test.mmdb **File:** [`Dockerfile`](../../Dockerfile) (line ~352) **Find this line:** + ```dockerfile ARG GEOLITE2_COUNTRY_SHA256= ``` **Replace with the new checksum:** + ```dockerfile ARG GEOLITE2_COUNTRY_SHA256=436135ee98a521da715a6d483951f3dbbd62557637f2d50d1987fc048874bd5d ``` **Using sed (automated):** + ```bash NEW_CHECKSUM=$(curl -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" | sha256sum | cut -d' ' -f1) @@ -119,6 +125,7 @@ docker run --rm charon:test-checksum /app/charon --version ``` **Expected output:** + ``` ✅ GeoLite2-Country.mmdb: OK ✅ Successfully tagged charon:test-checksum @@ -171,11 +178,13 @@ fi ``` **Make executable:** + ```bash chmod +x scripts/verify-geolite2-checksum.sh ``` **Run verification:** + ```bash ./scripts/verify-geolite2-checksum.sh ``` @@ -187,22 +196,26 @@ chmod +x scripts/verify-geolite2-checksum.sh ### Issue: Build Still Fails After Update **Symptoms:** + - Checksum verification fails - "FAILED" error persists **Solutions:** 1. **Clear Docker build cache:** + ```bash docker builder prune -af ``` 2. **Verify the checksum was committed:** + ```bash git show HEAD:Dockerfile | grep "GEOLITE2_COUNTRY_SHA256" ``` 3. **Re-download and verify upstream file:** + ```bash curl -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" -o /tmp/test.mmdb sha256sum /tmp/test.mmdb @@ -212,28 +225,31 @@ chmod +x scripts/verify-geolite2-checksum.sh ### Issue: Upstream File Unavailable (404) **Symptoms:** + - `curl` returns 404 Not Found - Automated workflow fails with `download_failed` error **Investigation Steps:** 1. **Check upstream repository:** - - Visit: https://github.com/P3TERX/GeoLite.mmdb + - Visit: - Verify the file still exists at the raw URL - Check for repository status or announcements 2. **Check MaxMind status:** - - Visit: https://status.maxmind.com/ + - Visit: - Check for service outages or maintenance **Temporary Solutions:** 1. **Use cached Docker layer** (if available): + ```bash docker build --cache-from ghcr.io/wikid82/charon:latest -t charon:latest . ``` 2. **Use local copy** (temporary): + ```bash # Download from a working container docker run --rm ghcr.io/wikid82/charon:latest cat /app/data/geoip/GeoLite2-Country.mmdb > /tmp/GeoLite2-Country.mmdb @@ -249,12 +265,14 @@ chmod +x scripts/verify-geolite2-checksum.sh ### Issue: Checksum Mismatch on Re-download **Symptoms:** + - Checksum calculated locally differs from what's in the Dockerfile - Checksum changes between downloads **Investigation Steps:** 1. **Verify file integrity:** + ```bash # Download multiple times and compare for i in {1..3}; do @@ -267,12 +285,14 @@ chmod +x scripts/verify-geolite2-checksum.sh - Try from different network locations 3. **Verify no MITM proxy:** + ```bash # Download via HTTPS and verify certificate curl -v -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" -o /tmp/test.mmdb 2>&1 | grep "CN=" ``` **If confirmed as supply chain attack:** + - **STOP** and do not proceed - Report to security team - See [Security Incident Response](../security-incident-response.md) @@ -280,6 +300,7 @@ chmod +x scripts/verify-geolite2-checksum.sh ### Issue: Multi-Platform Build Fails (arm64) **Symptoms:** + - `linux/amd64` build succeeds - `linux/arm64` build fails with checksum error @@ -290,12 +311,14 @@ chmod +x scripts/verify-geolite2-checksum.sh - Should be identical across all platforms 2. **Check buildx platform emulation:** + ```bash docker buildx ls docker buildx inspect ``` 3. **Test arm64 build explicitly:** + ```bash docker buildx build --platform linux/arm64 --load -t test-arm64 . ``` @@ -308,8 +331,8 @@ chmod +x scripts/verify-geolite2-checksum.sh - **Implementation Plan:** [`docs/plans/current_spec.md`](../plans/current_spec.md) - **QA Report:** [`docs/reports/qa_report.md`](../reports/qa_report.md) - **Dockerfile:** [`Dockerfile`](../../Dockerfile) (line ~352) -- **MaxMind GeoLite2:** https://dev.maxmind.com/geoip/geolite2-free-geolocation-data -- **P3TERX Mirror:** https://github.com/P3TERX/GeoLite.mmdb +- **MaxMind GeoLite2:** +- **P3TERX Mirror:** --- @@ -321,9 +344,10 @@ chmod +x scripts/verify-geolite2-checksum.sh **Solution:** Updated one line in `Dockerfile` (line 352) with the correct checksum and implemented an automated workflow to prevent future occurrences. -**Build Failure URL:** https://github.com/Wikid82/Charon/actions/runs/21584236523/job/62188372617 +**Build Failure URL:** **Related PRs:** + - Fix implementation: (link to PR) - Automated workflow addition: (link to PR) diff --git a/docs/patches/e2e_workflow_timeout_fix.patch.md b/docs/patches/e2e_workflow_timeout_fix.patch.md index 1998f991..dda0bc2a 100644 --- a/docs/patches/e2e_workflow_timeout_fix.patch.md +++ b/docs/patches/e2e_workflow_timeout_fix.patch.md @@ -6,8 +6,9 @@ index efbcccda..64fcc121 100644 if: | ((inputs.browser || 'all') == 'chromium' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 40 -+ timeout-minutes: 60 + +- timeout-minutes: 40 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -15,42 +16,45 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=chromium \ -+ --output=playwright-output/security-chromium \ +- --output=playwright-output/security-chromium \ tests/security-enforcement/ \ tests/security/ \ tests/integration/multi-feature-workflows.spec.ts || STATUS=$? + @@ -370,6 +371,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: e2e-diagnostics-chromium-security -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: e2e-diagnostics-chromium-security +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | + @@ -394,7 +414,7 @@ jobs: if: | ((inputs.browser || 'all') == 'firefox' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 40 -+ timeout-minutes: 60 + +- timeout-minutes: 40 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -58,42 +62,45 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=firefox \ -+ --output=playwright-output/security-firefox \ +- --output=playwright-output/security-firefox \ tests/security-enforcement/ \ tests/security/ \ tests/integration/multi-feature-workflows.spec.ts || STATUS=$? + @@ -559,6 +580,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: e2e-diagnostics-firefox-security -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: e2e-diagnostics-firefox-security +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | + @@ -583,7 +623,7 @@ jobs: if: | ((inputs.browser || 'all') == 'webkit' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 40 -+ timeout-minutes: 60 + +- timeout-minutes: 40 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -101,42 +108,45 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=webkit \ -+ --output=playwright-output/security-webkit \ +- --output=playwright-output/security-webkit \ tests/security-enforcement/ \ tests/security/ \ tests/integration/multi-feature-workflows.spec.ts || STATUS=$? + @@ -748,6 +789,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: e2e-diagnostics-webkit-security -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: e2e-diagnostics-webkit-security +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | + @@ -779,7 +839,7 @@ jobs: if: | ((inputs.browser || 'all') == 'chromium' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'non-security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 30 -+ timeout-minutes: 60 + +- timeout-minutes: 30 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -144,57 +154,61 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=chromium \ --shard=${{ matrix.shard }}/${{ matrix.total-shards }} \ -+ --output=playwright-output/chromium-shard-${{ matrix.shard }} \ +- --output=playwright-output/chromium-shard-${{ matrix.shard }} \ tests/core \ tests/dns-provider-crud.spec.ts \ tests/dns-provider-types.spec.ts \ + @@ -915,6 +976,14 @@ jobs: path: playwright-report/ retention-days: 14 -+ - name: Upload Playwright output (Chromium shard ${{ matrix.shard }}) -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: playwright-output-chromium-shard-${{ matrix.shard }} -+ path: playwright-output/chromium-shard-${{ matrix.shard }}/ -+ retention-days: 7 -+ +- - name: Upload Playwright output (Chromium shard ${{ matrix.shard }}) +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: playwright-output-chromium-shard-${{ matrix.shard }} +- path: playwright-output/chromium-shard-${{ matrix.shard }}/ +- retention-days: 7 +- - name: Upload Chromium coverage (if enabled) if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1') uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + @@ -931,6 +1000,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: e2e-diagnostics-chromium-shard-${{ matrix.shard }} -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: e2e-diagnostics-chromium-shard-${{ matrix.shard }} +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | + @@ -955,7 +1043,7 @@ jobs: if: | ((inputs.browser || 'all') == 'firefox' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'non-security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 30 -+ timeout-minutes: 60 + +- timeout-minutes: 30 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -202,57 +216,61 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=firefox \ --shard=${{ matrix.shard }}/${{ matrix.total-shards }} \ -+ --output=playwright-output/firefox-shard-${{ matrix.shard }} \ +- --output=playwright-output/firefox-shard-${{ matrix.shard }} \ tests/core \ tests/dns-provider-crud.spec.ts \ tests/dns-provider-types.spec.ts \ + @@ -1099,6 +1188,14 @@ jobs: path: playwright-report/ retention-days: 14 -+ - name: Upload Playwright output (Firefox shard ${{ matrix.shard }}) -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: playwright-output-firefox-shard-${{ matrix.shard }} -+ path: playwright-output/firefox-shard-${{ matrix.shard }}/ -+ retention-days: 7 -+ +- - name: Upload Playwright output (Firefox shard ${{ matrix.shard }}) +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: playwright-output-firefox-shard-${{ matrix.shard }} +- path: playwright-output/firefox-shard-${{ matrix.shard }}/ +- retention-days: 7 +- - name: Upload Firefox coverage (if enabled) if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1') uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + @@ -1115,6 +1212,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 -+ with: -+ name: e2e-diagnostics-firefox-shard-${{ matrix.shard }} -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 +- with: +- name: e2e-diagnostics-firefox-shard-${{ matrix.shard }} +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | + @@ -1139,7 +1255,7 @@ jobs: if: | ((inputs.browser || 'all') == 'webkit' || (inputs.browser || 'all') == 'all') && ((inputs.test_category || 'all') == 'non-security' || (inputs.test_category || 'all') == 'all') -- timeout-minutes: 30 -+ timeout-minutes: 60 + +- timeout-minutes: 30 +- timeout-minutes: 60 env: CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }} CHARON_EMERGENCY_SERVER_ENABLED: "true" @@ -260,48 +278,50 @@ index efbcccda..64fcc121 100644 npx playwright test \ --project=webkit \ --shard=${{ matrix.shard }}/${{ matrix.total-shards }} \ -+ --output=playwright-output/webkit-shard-${{ matrix.shard }} \ +- --output=playwright-output/webkit-shard-${{ matrix.shard }} \ tests/core \ tests/dns-provider-crud.spec.ts \ tests/dns-provider-types.spec.ts \ + @@ -1283,6 +1400,14 @@ jobs: path: playwright-report/ retention-days: 14 -+ - name: Upload Playwright output (WebKit shard ${{ matrix.shard }}) -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 -+ with: -+ name: playwright-output-webkit-shard-${{ matrix.shard }} -+ path: playwright-output/webkit-shard-${{ matrix.shard }}/ -+ retention-days: 7 -+ +- - name: Upload Playwright output (WebKit shard ${{ matrix.shard }}) +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 +- with: +- name: playwright-output-webkit-shard-${{ matrix.shard }} +- path: playwright-output/webkit-shard-${{ matrix.shard }}/ +- retention-days: 7 +- - name: Upload WebKit coverage (if enabled) if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1') uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + @@ -1299,6 +1424,25 @@ jobs: path: test-results/**/*.zip retention-days: 7 -+ - name: Collect diagnostics -+ if: always() -+ run: | -+ mkdir -p diagnostics -+ uptime > diagnostics/uptime.txt -+ free -m > diagnostics/free-m.txt -+ df -h > diagnostics/df-h.txt -+ ps aux > diagnostics/ps-aux.txt -+ docker ps -a > diagnostics/docker-ps.txt || true -+ docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true -+ -+ - name: Upload diagnostics -+ if: always() -+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 -+ with: -+ name: e2e-diagnostics-webkit-shard-${{ matrix.shard }} -+ path: diagnostics/ -+ retention-days: 7 -+ +- - name: Collect diagnostics +- if: always() +- run: | +- mkdir -p diagnostics +- uptime > diagnostics/uptime.txt +- free -m > diagnostics/free-m.txt +- df -h > diagnostics/df-h.txt +- ps aux > diagnostics/ps-aux.txt +- docker ps -a > diagnostics/docker-ps.txt || true +- docker logs --tail 500 charon-e2e > diagnostics/docker-charon-e2e.log 2>&1 || true +- +- - name: Upload diagnostics +- if: always() +- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 +- with: +- name: e2e-diagnostics-webkit-shard-${{ matrix.shard }} +- path: diagnostics/ +- retention-days: 7 +- - name: Collect Docker logs on failure if: failure() run: | diff --git a/docs/performance/feature-flags-endpoint.md b/docs/performance/feature-flags-endpoint.md index f63a31ff..c61ef10f 100644 --- a/docs/performance/feature-flags-endpoint.md +++ b/docs/performance/feature-flags-endpoint.md @@ -31,6 +31,7 @@ for _, s := range settings { ``` **Key Improvements:** + - **Single Query:** `WHERE key IN (?, ?, ?)` fetches all flags in one database round-trip - **O(1) Lookups:** Map-based access eliminates linear search overhead - **Error Handling:** Explicit error logging and HTTP 500 response on failure @@ -56,6 +57,7 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { ``` **Key Improvements:** + - **Atomic Updates:** All flag changes commit or rollback together - **Error Recovery:** Transaction rollback prevents partial state - **Improved Logging:** Explicit error messages for debugging @@ -65,10 +67,12 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { ### Before Optimization (Baseline - N+1 Pattern) **Architecture:** + - GetFlags(): 3 sequential `WHERE key = ?` queries (one per flag) - UpdateFlags(): Multiple separate transactions **Measured Latency (Expected):** + - **GET P50:** 300ms (CI environment) - **GET P95:** 500ms - **GET P99:** 600ms @@ -77,20 +81,24 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { - **PUT P99:** 600ms **Query Count:** + - GET: 3 queries (N+1 pattern, N=3 flags) - PUT: 1-3 queries depending on flag count **CI Impact:** + - Test flakiness: ~30% failure rate due to timeouts - E2E test pass rate: ~70% ### After Optimization (Current - Batch Query + Transaction) **Architecture:** + - GetFlags(): 1 batch query `WHERE key IN (?, ?, ?)` - UpdateFlags(): 1 transaction wrapping all updates **Measured Latency (Target):** + - **GET P50:** 100ms (3x faster) - **GET P95:** 150ms (3.3x faster) - **GET P99:** 200ms (3x faster) @@ -99,10 +107,12 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { - **PUT P99:** 200ms (3x faster) **Query Count:** + - GET: 1 batch query (N+1 eliminated) - PUT: 1 transaction (atomic) **CI Impact (Expected):** + - Test flakiness: 0% (with retry logic + polling) - E2E test pass rate: 100% @@ -125,11 +135,13 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { **Status:** Complete **Changes:** + - Added `defer` timing to GetFlags() and UpdateFlags() - Log format: `[METRICS] GET/PUT /feature-flags: {duration}ms` - CI pipeline captures P50/P95/P99 metrics **Files Modified:** + - `backend/internal/api/handlers/feature_flags_handler.go` ### Phase 1: Backend Optimization - N+1 Query Fix @@ -139,16 +151,19 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { **Priority:** P0 - Critical CI Blocker **Changes:** + - **GetFlags():** Replaced N+1 loop with batch query `WHERE key IN (?)` - **UpdateFlags():** Wrapped updates in single transaction - **Tests:** Added batch query and transaction rollback tests - **Benchmarks:** Added BenchmarkGetFlags and BenchmarkUpdateFlags **Files Modified:** + - `backend/internal/api/handlers/feature_flags_handler.go` - `backend/internal/api/handlers/feature_flags_handler_test.go` **Expected Impact:** + - 3-6x latency reduction (600ms → 200ms P99) - Elimination of N+1 query anti-pattern - Atomic updates with rollback on error @@ -159,32 +174,38 @@ if err := h.DB.Transaction(func(tx *gorm.DB) error { ### Test Helpers Used **Polling Helper:** `waitForFeatureFlagPropagation()` + - Polls `/api/v1/feature-flags` until expected state confirmed - Default interval: 500ms - Default timeout: 30s (150x safety margin over 200ms P99) **Retry Helper:** `retryAction()` + - 3 max attempts with exponential backoff (2s, 4s, 8s) - Handles transient network/DB failures ### Timeout Strategy **Helper Defaults:** + - `clickAndWaitForResponse()`: 30s timeout - `waitForAPIResponse()`: 30s timeout - No explicit timeouts in test files (rely on helper defaults) **Typical Poll Count:** + - Local: 1-2 polls (50-200ms response + 500ms interval) - CI: 1-3 polls (50-200ms response + 500ms interval) ### Test Files **E2E Tests:** + - `tests/settings/system-settings.spec.ts` - Feature toggle tests - `tests/utils/wait-helpers.ts` - Polling and retry helpers **Backend Tests:** + - `backend/internal/api/handlers/feature_flags_handler_test.go` - `backend/internal/api/handlers/feature_flags_handler_coverage_test.go` @@ -205,11 +226,13 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Benchmark Analysis **GetFlags Benchmark:** + - Measures single batch query performance - Tests with 3 flags in database - Includes JSON serialization overhead **UpdateFlags Benchmark:** + - Measures transaction wrapping performance - Tests atomic update of 3 flags - Includes JSON deserialization and validation @@ -219,14 +242,17 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Why Batch Query Over Individual Queries? **Problem:** N+1 pattern causes linear latency scaling + - 3 flags = 3 queries × 200ms = 600ms total - 10 flags = 10 queries × 200ms = 2000ms total **Solution:** Single batch query with IN clause + - N flags = 1 query × 200ms = 200ms total - Constant time regardless of flag count **Trade-offs:** + - ✅ 3-6x latency reduction - ✅ Scales to more flags without performance degradation - ⚠️ Slightly more complex code (map-based lookup) @@ -234,14 +260,17 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Why Transaction Wrapping? **Problem:** Multiple separate writes risk partial state + - Flag 1 succeeds, Flag 2 fails → inconsistent state - No rollback mechanism for failed updates **Solution:** Single transaction for all updates + - All succeed together or all rollback - ACID guarantees for multi-flag updates **Trade-offs:** + - ✅ Atomic updates with rollback on error - ✅ Prevents partial state corruption - ⚠️ Slightly longer locks (mitigated by fast SQLite) @@ -253,11 +282,13 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ **Status:** Not implemented (not needed after Phase 1 optimization) **Rationale:** + - Current latency (50-200ms) is acceptable for feature flags - Feature flags change infrequently (not a hot path) - Adding cache increases complexity without significant benefit **If Needed:** + - Use Redis or in-memory cache with TTL=60s - Invalidate on PUT operations - Expected improvement: 50-200ms → 10-50ms @@ -267,11 +298,13 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ **Status:** SQLite default indexes sufficient **Rationale:** + - `settings.key` column used in WHERE clauses - SQLite automatically indexes primary key - Query plan analysis shows index usage **If Needed:** + - Add explicit index: `CREATE INDEX idx_settings_key ON settings(key)` - Expected improvement: Minimal (already fast) @@ -280,11 +313,13 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ **Status:** GORM default pooling sufficient **Rationale:** + - GORM uses `database/sql` pool by default - Current concurrency limits adequate - No connection exhaustion observed **If Needed:** + - Tune `SetMaxOpenConns()` and `SetMaxIdleConns()` - Expected improvement: 10-20% under high load @@ -293,12 +328,14 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Metrics to Track **Backend Metrics:** + - P50/P95/P99 latency for GET and PUT operations - Query count per request (should remain 1 for GET) - Transaction count per PUT (should remain 1) - Error rate (target: <0.1%) **E2E Metrics:** + - Test pass rate for feature toggle tests - Retry attempt frequency (target: <5%) - Polling iteration count (typical: 1-3) @@ -307,11 +344,13 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Alerting Thresholds **Backend Alerts:** + - P99 > 500ms → Investigate regression (2.5x slower than optimized) - Error rate > 1% → Check database health - Query count > 1 for GET → N+1 pattern reintroduced **E2E Alerts:** + - Test pass rate < 95% → Check for new flakiness - Timeout errors > 0 → Investigate CI environment - Retry rate > 10% → Investigate transient failure source @@ -319,10 +358,12 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Dashboard **CI Metrics:** + - Link: `.github/workflows/e2e-tests.yml` artifacts - Extracts `[METRICS]` logs for P50/P95/P99 analysis **Backend Logs:** + - Docker container logs with `[METRICS]` tag - Example: `[METRICS] GET /feature-flags: 120ms` @@ -331,15 +372,18 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### High Latency (P99 > 500ms) **Symptoms:** + - E2E tests timing out - Backend logs show latency spikes **Diagnosis:** + 1. Check query count: `grep "SELECT" backend/logs/query.log` 2. Verify batch query: Should see `WHERE key IN (...)` 3. Check transaction wrapping: Should see single `BEGIN ... COMMIT` **Remediation:** + - If N+1 pattern detected: Verify batch query implementation - If transaction missing: Verify transaction wrapping - If database locks: Check concurrent access patterns @@ -347,15 +391,18 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### Transaction Rollback Errors **Symptoms:** + - PUT requests return 500 errors - Backend logs show transaction failure **Diagnosis:** + 1. Check error message: `grep "Failed to update feature flags" backend/logs/app.log` 2. Verify database constraints: Unique key constraints, foreign keys 3. Check database connectivity: Connection pool exhaustion **Remediation:** + - If constraint violation: Fix invalid flag key or value - If connection issue: Tune connection pool settings - If deadlock: Analyze concurrent access patterns @@ -363,15 +410,18 @@ go test ./internal/api/handlers/ -bench=Benchmark.*Flags -benchmem -run=^$ ### E2E Test Flakiness **Symptoms:** + - Tests pass locally, fail in CI - Timeout errors in Playwright logs **Diagnosis:** + 1. Check backend latency: `grep "[METRICS]" ci-logs.txt` 2. Verify retry logic: Should see retry attempts in logs 3. Check polling behavior: Should see multiple GET requests **Remediation:** + - If backend slow: Investigate CI environment (disk I/O, CPU) - If no retries: Verify `retryAction()` wrapper in test - If no polling: Verify `waitForFeatureFlagPropagation()` usage diff --git a/docs/plans/archive/cve_remediation_spec.md b/docs/plans/archive/cve_remediation_spec.md new file mode 100644 index 00000000..e05b3fa4 --- /dev/null +++ b/docs/plans/archive/cve_remediation_spec.md @@ -0,0 +1,282 @@ +# CI Supply Chain CVE Remediation Plan + +**Status:** Active +**Created:** 2026-03-13 +**Branch:** `feature/beta-release` +**Context:** Three HIGH vulnerabilities (CVE-2025-69650, CVE-2025-69649, CVE-2026-3805) in the Docker runtime image are blocking the CI supply-chain scan. Two Grype ignore-rule entries are also expired and require maintenance. + +--- + +## 1. Executive Summary + +| # | Action | Severity Reduction | Effort | +|---|--------|--------------------|--------| +| 1 | Remove `curl` from runtime image (replace with `wget`) | Eliminates 1 HIGH + ~7 MEDIUMs + 2 LOWs | ~30 min | +| 2 | Remove `binutils` + `libc-utils` from runtime image | Eliminates 2 HIGH + 3 MEDIUMs | ~5 min | +| 3 | Update expired Grype ignore rules | Prevents false scan failures at next run | ~10 min | + +**Bottom line:** All three HIGH CVEs are eliminated at root rather than suppressed. After Phase 1 and Phase 2, `fail-on-severity: high` passes cleanly. Phase 3 is maintenance-only. + +--- + +## 2. CVE Inventory + +### Blocking HIGH CVEs + +| CVE | Package | Version | CVSS | Fix State | Notes | +|-----|---------|---------|------|-----------|-------| +| CVE-2026-3805 | `curl` | 8.17.0-r1 | 7.5 | `unknown` | **New** — appeared in Grype DB 2026-03-13, published 2026-03-11. SMB protocol use-after-free. Charon uses HTTPS/HTTP only. | +| CVE-2025-69650 | `binutils` | 2.45.1-r0 | 7.5 | `` (none) | Double-free in `readelf`. Charon never invokes `readelf`. | +| CVE-2025-69649 | `binutils` | 2.45.1-r0 | 7.5 | `` (none) | Null-ptr deref in `readelf`. Charon never invokes `readelf`. | + +### Associated MEDIUM/LOW CVEs eliminated as side-effects + +| CVEs | Package | Count | Eliminated by | +|------|---------|-------|---------------| +| CVE-2025-14819, CVE-2025-15079, CVE-2025-14524, CVE-2025-13034, CVE-2025-14017 | `curl` | 5 × MEDIUM | Phase 1 | +| CVE-2025-69652, CVE-2025-69644, CVE-2025-69651 | `binutils` | 3 × MEDIUM | Phase 2 | + +### Expired Grype Ignore Rules + +| Entry | Expiry | Status | Action | +|-------|--------|--------|--------| +| `CVE-2026-22184` (zlib) | 2026-03-14 | Expires tomorrow; underlying CVE already fixed via `apk upgrade --no-cache zlib` | **Remove entirely** | +| `GHSA-69x3-g4r3-p962` (nebula) | 2026-03-05 | **Expired 8 days ago**; upstream fix still unavailable | **Extend to 2026-04-13** | + +--- + +## 3. Phase 1 — Remove `curl` from Runtime Image + +### Rationale + +`curl` is present solely for: +1. GeoLite2 DB download at build time (Dockerfile, runtime stage `RUN` block) +2. HEALTHCHECK probe (Dockerfile `HEALTHCHECK` directive) +3. Caddy admin API readiness poll (`.docker/docker-entrypoint.sh`) + +`busybox` (already installed on Alpine as a transitive dependency of `busybox-extras`, which is explicitly installed) provides `wget` with sufficient functionality for all three uses. + +### 3.1 `wget` Translation Reference + +| `curl` invocation | `wget` equivalent | Notes | +|-------------------|--------------------|-------| +| `curl -fSL -m 10 "URL" -o FILE 2>/dev/null` | `wget -qO FILE -T 10 "URL" 2>/dev/null` | `-q` = quiet; `-T` = timeout (seconds); exits nonzero on failure | +| `curl -fSL -m 30 --retry 3 "URL" -o FILE` | `wget -qO FILE -T 30 -t 4 "URL"` | `-t 4` = 4 total tries (1 initial + 3 retries); add `&& [ -s FILE ]` guard | +| `curl -f http://HOST/path \|\| exit 1` | `wget -q -O /dev/null http://HOST/path \|\| exit 1` | HEALTHCHECK; wget exits nonzero on HTTP error | +| `curl -sf http://HOST/path > /dev/null 2>&1` | `wget -qO /dev/null http://HOST/path 2>/dev/null` | Silent readiness probe | + +**busybox wget notes:** +- `-T N` is per-connection timeout in seconds (equivalent to `curl --max-time`). +- `-t N` is total number of tries, not retries; `-t 4` = 3 retries. +- On download failure, busybox wget may leave a zero-byte or partial file at the output path. The `[ -s FILE ]` guard (`-s` = non-empty) prevents a corrupted placeholder from passing the sha256 check. + +### 3.2 Dockerfile Changes + +**File:** `Dockerfile` + +**Change A — Remove `curl`, `binutils`, `libc-utils` from `apk add` (runtime stage, line ~413):** + +Current: +```dockerfile +RUN apk add --no-cache \ + bash ca-certificates sqlite-libs sqlite tzdata curl gettext libcap libcap-utils \ + c-ares binutils libc-utils busybox-extras \ + && apk upgrade --no-cache zlib +``` + +New: +```dockerfile +RUN apk add --no-cache \ + bash ca-certificates sqlite-libs sqlite tzdata gettext libcap libcap-utils \ + c-ares busybox-extras \ + && apk upgrade --no-cache zlib +``` + +*(This single edit covers both Phase 1 and Phase 2 removals.)* + +**Change B — GeoLite2 download block, CI path (line ~437):** + +Current: +```dockerfile +if curl -fSL -m 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \ + -o /app/data/geoip/GeoLite2-Country.mmdb 2>/dev/null; then +``` + +New: +```dockerfile +if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \ + -T 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" 2>/dev/null; then +``` + +**Change C — GeoLite2 download block, non-CI path (line ~445):** + +Current: +```dockerfile +if curl -fSL -m 30 --retry 3 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \ + -o /app/data/geoip/GeoLite2-Country.mmdb; then + if echo "${GEOLITE2_COUNTRY_SHA256} /app/data/geoip/GeoLite2-Country.mmdb" | sha256sum -c -; then +``` + +New: +```dockerfile +if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \ + -T 30 -t 4 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb"; then + if [ -s /app/data/geoip/GeoLite2-Country.mmdb ] && \ + echo "${GEOLITE2_COUNTRY_SHA256} /app/data/geoip/GeoLite2-Country.mmdb" | sha256sum -c -; then +``` + +The `[ -s FILE ]` check is added before `sha256sum` to guard against wget leaving an empty file on partial failure. + +**Change D — HEALTHCHECK directive (line ~581):** + +Current: +```dockerfile +HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ + CMD curl -f http://localhost:8080/api/v1/health || exit 1 +``` + +New: +```dockerfile +HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ + CMD wget -q -O /dev/null http://localhost:8080/api/v1/health || exit 1 +``` + +### 3.3 Entrypoint Changes + +**File:** `.docker/docker-entrypoint.sh` + +**Change E — Caddy readiness poll (line ~368):** + +Current: +```sh +if curl -sf http://127.0.0.1:2019/config/ > /dev/null 2>&1; then +``` + +New: +```sh +if wget -qO /dev/null http://127.0.0.1:2019/config/ 2>/dev/null; then +``` + +--- + +## 4. Phase 2 — Remove `binutils` and `libc-utils` from Runtime Image + +### Rationale + +`binutils` is installed solely for `objdump`, used in `.docker/docker-entrypoint.sh` to detect DWARF debug symbols when `CHARON_DEBUG=1`. The entrypoint already has a graceful fallback (lines ~401–404): + +```sh +else + # objdump not available, try to run Delve anyway with a warning + echo "Note: Cannot verify debug symbols (objdump not found). Attempting Delve..." + run_as_charon /usr/local/bin/dlv exec "$bin_path" ... +fi +``` + +When `objdump` is absent the container functions correctly for all standard and debug-mode runs. The check is advisory. + +`libc-utils` appears **only once** across the entire codebase (confirmed by grep across `*.sh`, `Dockerfile`, `*.yml`): as a sibling entry on the same `apk add` line as `binutils`. It provides glibc-compatible headers for musl-based Alpine and has no independent consumer in this image. It is safe to remove together with `binutils`. + +### 4.1 Dockerfile Change + +Already incorporated in Phase 1 Change A — the `apk add` line removes both `binutils` and `libc-utils` in a single edit. No additional changes are required. + +### 4.2 Why Not Suppress Instead? + +Suppressing in Grype requires two new ignore entries with expiry maintenance every 30 days indefinitely (no upstream Alpine fix exists). Removing the packages eliminates the CVEs permanently. There is no functional regression given the working fallback. + +--- + +## 5. Phase 3 — Update Expired Grype Ignore Rules + +**File:** `.grype.yaml` + +### 5.1 Remove `CVE-2026-22184` (zlib) Block + +**Action:** Delete the entire `CVE-2026-22184` ignore entry. + +**Reason:** The Dockerfile runtime stage already contains `&& apk upgrade --no-cache zlib`, which upgrades zlib from 1.3.1-r2 to 1.3.2-r0, resolving CVE-2026-22184. Suppressing a resolved CVE creates false confidence and obscures scan accuracy. The entry's own removal criteria have been met: Alpine released `zlib 1.3.2-r0`. + +### 5.2 Extend `GHSA-69x3-g4r3-p962` (nebula) Expiry + +**Action:** Update the `expiry` field and review comment in the nebula block. + +Current: +```yaml + expiry: "2026-03-05" # Re-evaluate in 14 days (2026-02-19 + 14 days) +``` + +New: +```yaml + expiry: "2026-04-13" # Re-evaluated 2026-03-13: smallstep/certificates stable still v0.27.5, no nebula v1.10+ requirement. Extended 30 days. +``` + +Update the review comment line: +``` + # - Next review: 2026-04-13. + # - Reviewed 2026-03-13: smallstep stable still v0.27.5 (no nebula v1.10+ requirement). Extended 30 days. + # - Remove suppression immediately once upstream fixes. +``` + +**Reason:** As of 2026-03-13, `smallstep/certificates` has not released a stable version requiring nebula v1.10+. The constraint analysis from 2026-02-19 remains valid. Expiry extended 30 days to 2026-04-13. + +--- + +## 6. File Change Summary + +| File | Change | Scope | +|------|--------|-------| +| `Dockerfile` | Remove `curl`, `binutils`, `libc-utils` from `apk add` | Line ~413–415 | +| `Dockerfile` | Replace `curl` with `wget` in GeoLite2 CI download path | Line ~437–441 | +| `Dockerfile` | Replace `curl` with `wget` in GeoLite2 non-CI path; add `[ -s FILE ]` guard | Line ~445–452 | +| `Dockerfile` | Replace `curl` with `wget` in HEALTHCHECK | Line ~581 | +| `.docker/docker-entrypoint.sh` | Replace `curl` with `wget` in Caddy readiness poll | Line ~368 | +| `.grype.yaml` | Delete `CVE-2026-22184` (zlib) ignore block entirely | zlib block | +| `.grype.yaml` | Extend `GHSA-69x3-g4r3-p962` expiry to 2026-04-13; update review comment | nebula block | + +--- + +## 7. Commit Slicing Strategy + +**Single PR** — all changes are security-related and tightly coupled. Splitting curl removal from binutils removal would produce an intermediate commit with partially resolved HIGHs, offering no validation benefit and complicating rollback. + +Suggested commit message: +``` +fix(security): remove curl and binutils from runtime image + +Replace curl with busybox wget for GeoLite2 downloads, HEALTHCHECK, +and the Caddy readiness probe. Remove binutils and libc-utils from the +runtime image; the entrypoint objdump check has a documented fallback +for missing objdump. Eliminates CVE-2026-3805 (curl HIGH), CVE-2025-69650 +and CVE-2025-69649 (binutils HIGH), plus 8 associated MEDIUM findings. + +Remove the now-resolved CVE-2026-22184 (zlib) suppression from +.grype.yaml and extend GHSA-69x3-g4r3-p962 (nebula) expiry to +2026-04-13 pending upstream smallstep/certificates update. +``` + +--- + +## 8. Expected Scan Results After Fix + +| Metric | Before | After | Delta | +|--------|--------|-------|-------| +| HIGH count | 3 | **0** | −3 | +| MEDIUM count | ~13 | ~5 | −8 | +| LOW count | ~2 | ~0 | −2 | +| `fail-on-severity: high` | ❌ FAIL | ✅ PASS | — | +| CI supply-chain scan | ❌ BLOCKED | ✅ GREEN | — | + +Remaining MEDIUMs after fix (~5): +- `busybox` / `busybox-extras` / `ssl_client` — CVE-2025-60876 (CRLF injection in wget/ssl_client; no Alpine fix; Charon application code does not invoke `wget` directly at runtime) + +--- + +## 9. Validation Steps + +1. Rebuild Docker image: `docker build -t charon:test .` +2. Run Grype scan: `grype charon:test` — confirm zero HIGH findings +3. Confirm HEALTHCHECK probe passes: start container, check `docker inspect` for `healthy` status +4. Confirm Caddy readiness: inspect entrypoint logs for `"Caddy is ready!"` +5. Run E2E suite: `npx playwright test --project=firefox` +6. Push branch and confirm CI supply-chain workflow exits green diff --git a/docs/plans/archive/eslint-ts-vite-upgrade-spec.md b/docs/plans/archive/eslint-ts-vite-upgrade-spec.md new file mode 100644 index 00000000..b3e6d3a3 --- /dev/null +++ b/docs/plans/archive/eslint-ts-vite-upgrade-spec.md @@ -0,0 +1,1158 @@ +# Major Dependency Upgrade Plan — ESLint v10, TypeScript 6.0, Vite 8 + +**Date:** 2026-03-12 +**Author:** Planning Agent +**Status:** Ready for Review +**Confidence Score:** 82% (High for ESLint v10 + TS 6.0; Medium for Vite 8 — beta with Rolldown migration) + +--- + +## 1. Executive Summary + +This plan covers the upgrade of three major frontend toolchain dependencies in the Charon project: + +| Dependency | Current Version | Target Version | Status | Risk | +|---|---|---|---|---| +| **ESLint** | `^9.39.3 <10.0.0` | `^10.0.0` | Released | **Medium** — plugin compat gate | +| **TypeScript** | `^5.9.3` | `^6.0.0` | Beta (Feb 11) / RC (Mar 6) | **Medium** — 17+ deprecations | +| **Vite** | `^7.3.1` | `8.0.0-beta.18` | Beta (Dec 3, 2025) | **High** — beta, Rolldown replaces Rollup+esbuild | + +### Key Findings + +1. **ESLint v10** is released with a comprehensive migration guide. The primary blocker is a note in `lefthook.yml`: _"ESLint pinned at v9.x.x — do not upgrade until react-hooks plugin supports v10."_ The `eslint-plugin-react-hooks@7.0.1` must be verified for ESLint v10 compatibility before proceeding. + +2. **TypeScript 6.0** is real (Beta: Feb 11, 2026; RC: Mar 6, 2026). It is explicitly designed as a **bridge release** between TS 5.9 and the native Go-based TS 7.0. It introduces 17+ deprecations/breaking changes (new defaults for `strict`, `module`, `target`, `types`, `rootDir`; removal of `outFile`, legacy module systems; deprecated `baseUrl`, `moduleResolution: node`). Charon's current `tsconfig.json` is well-positioned — it already uses `moduleResolution: bundler`, `strict: true`, and `module: ESNext`. The **critical impact** is the `types` default changing to `[]`. + +3. **Vite 8 exists as `8.0.0-beta.18`** (announced Dec 3, 2025). The headline change is **Rolldown replaces both Rollup and esbuild**. JS transforms and minification now use Oxc; CSS minification uses Lightning CSS. The `build.rollupOptions` config key is deprecated in favor of `build.rolldownOptions`, and `output.manualChunks` (object form) is removed. Charon's `vite.config.ts` uses `rollupOptions` with `inlineDynamicImports: true` — both need migration. Ecosystem packages (`@vitejs/plugin-react`, `vitest`) require beta versions for Vite 8 compatibility. + +### Recommended Execution Order + +``` +PR-1: TypeScript 6.0 upgrade (fewer external dependencies, most self-contained) +PR-2: ESLint v10 upgrade (blocked on plugin compat verification) +PR-3: Vite 8 upgrade (beta — stacked on PR-1 + PR-2 branch) +``` + +--- + +## 2. Current Dependency Inventory + +### Root `package.json` (`/projects/Charon/package.json`) + +| Package | Current Version | Category | +|---|---|---| +| `typescript` | `^5.9.3` | devDependency | +| `vite` | `^7.3.1` | devDependency | +| `@playwright/test` | `^1.58.2` | devDependency | +| `prettier` | `^3.8.1` | devDependency | +| `markdownlint-cli2` | `^0.21.0` | devDependency | + +### Frontend `package.json` (`/projects/Charon/frontend/package.json`) + +| Package | Current Version | Category | +|---|---|---| +| `typescript` | `^5.9.3` | devDependency | +| `vite` | `^7.3.1` | devDependency | +| `vitest` | `^4.0.18` | devDependency | +| `eslint` | `^9.39.3 <10.0.0` | devDependency | +| `@eslint/js` | `^9.39.3 <10.0.0` | devDependency | +| `@eslint/css` | `^1.0.0` | devDependency | +| `@eslint/json` | `^1.1.0` | devDependency | +| `@eslint/markdown` | `^7.5.1` | devDependency | +| `typescript-eslint` | `^8.57.0` | devDependency | +| `@typescript-eslint/eslint-plugin` | `^8.57.0` | devDependency | +| `@typescript-eslint/parser` | `^8.57.0` | devDependency | +| `@vitejs/plugin-react` | `^5.1.4` | devDependency | +| `@vitest/coverage-istanbul` | `^4.0.18` | devDependency | +| `@vitest/coverage-v8` | `^4.0.18` | devDependency | +| `@vitest/eslint-plugin` | `^1.6.10` | devDependency | +| `react` | `^19.2.4` | dependency | +| `react-dom` | `^19.2.4` | dependency | +| `react-router-dom` | `^7.13.1` | dependency | +| `@tanstack/react-query` | `^5.90.21` | dependency | + +### ESLint Plugin Inventory (18 plugins) + +| Plugin | Current Version | ESLint v10 Risk | +|---|---|---| +| `eslint-plugin-react-hooks` | `^7.0.1` | **HIGH** — explicit blocker in `lefthook.yml` | +| `eslint-plugin-react-compiler` | `^19.1.0-rc.2` | Medium — RC, check compat | +| `eslint-plugin-react-refresh` | `^0.5.2` | Low | +| `eslint-plugin-import-x` | `^4.16.1` | Low — modern fork | +| `eslint-plugin-jsx-a11y` | `^6.10.2` | Medium | +| `eslint-plugin-security` | `^4.0.0` | Low | +| `eslint-plugin-sonarjs` | `^4.0.2` | Low | +| `eslint-plugin-unicorn` | `^63.0.0` | Low — actively maintained | +| `eslint-plugin-promise` | `^7.2.1` | Low | +| `eslint-plugin-unused-imports` | `^4.4.1` | Low | +| `eslint-plugin-no-unsanitized` | `^4.1.5` | Medium | +| `eslint-plugin-testing-library` | `^7.16.0` | Low | +| `typescript-eslint` | `^8.57.0` | Low — tracks ESLint closely | +| `@vitest/eslint-plugin` | `^1.6.10` | Low | +| `@eslint/css` | `^1.0.0` | Low — official ESLint | +| `@eslint/json` | `^1.1.0` | Low — official ESLint | +| `@eslint/markdown` | `^7.5.1` | Low — official ESLint | + +### Config Files Affected + +| File | Impact Area | +|---|---| +| `frontend/tsconfig.json` | TS 6.0 — `types`, `lib`, defaults | +| `frontend/tsconfig.node.json` | TS 6.0 — minor | +| `frontend/tsconfig.build.json` | TS 6.0 — extends base | +| `frontend/eslint.config.js` | ESLint v10 — plugin compat | +| `eslint.config.js` (root) | ESLint v10 — imports frontend config | +| `frontend/package.json` | All — version bumps | +| `package.json` (root) | TS + Vite version bumps | +| `lefthook.yml` | ESLint v10 — remove pin note | +| `Dockerfile` | Node.js version (already compatible) | + +### Infrastructure + +- **Node.js:** `24.14.0-alpine` (Dockerfile) — meets all upgrade requirements +- **No `.npmrc` file exists** in the project +- **Go:** `1.26.1` (not affected by frontend upgrades) + +--- + +## 3. Breaking Changes Analysis + +### 3.1 ESLint v10 Breaking Changes + +**Source:** [ESLint v10 Migration Guide](https://eslint.org/docs/latest/use/migrate-to-10.0.0) + +| # | Breaking Change | Impact on Charon | Action Required | +|---|---|---|---| +| 1 | **Node.js ≥ v20.19, v22.13, or v24** required | None — already on Node 24.14.0 | None | +| 2 | **`eslint:recommended` updated** — 3 new rules: `no-unassigned-vars`, `no-useless-assignment`, `preserve-caught-error` | May flag new violations in codebase | Fix flagged code or disable rules | +| 3 | **New config file lookup** — searches from linted file, not cwd | Flat config already used; minor risk for monorepo patterns | Verify root config is found correctly | +| 4 | **Old `.eslintrc` format completely removed** | None — already using flat config | None | +| 5 | **JSX references now tracked** — fixes `no-unused-vars` for JSX components | Positive — fewer false positives | May surface new true positives | +| 6 | **`eslint-env` comments reported as errors** | Search codebase for `/* eslint-env */` | Remove if found | +| 7 | **Jiti ≥ v2.2.0 required** | Check transitive dep version | May need explicit install | +| 8 | **Removed deprecated `context` members** — `context.getScope()`, `context.getAncestors()`, etc. | Affects **plugins**, not our config directly | All 18 plugins must be compatible | +| 9 | **Removed deprecated `SourceCode` methods** | Same — plugin concern | Plugin compat verification | +| 10 | **Program AST node range spans entire source** | Unlikely to affect us | None | + +**Critical Plugin Gate:** The `eslint-plugin-react-hooks` compatibility with ESLint v10 must be verified. The `lefthook.yml` at line ~98 explicitly states: _"NOTE: ESLint pinned at v9.x.x — do not upgrade until react-hooks plugin supports v10."_ + +### 3.2 TypeScript 6.0 Breaking Changes + +**Source:** [TypeScript 6.0 Beta Announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-6-0-beta/) and [6.0 Deprecation List](https://github.com/microsoft/TypeScript/issues/54500) + +#### Default Value Changes + +| Setting | Old Default | New Default | Charon Current | Action | +|---|---|---|---|---| +| `strict` | `false` | **`true`** | `true` (explicit) | None — already set | +| `module` | `commonjs` | **`esnext`** | `ESNext` (explicit) | None — already set | +| `target` | `es5` | **`es2025`** (floating) | `ES2022` (explicit) | None — already set | +| `types` | `["*"]` (all @types) | **`[]`** (none) | **Not set** | **ACTION: Add `"types": []`** | +| `rootDir` | inferred | **`.`** (tsconfig dir) | Not set | Verify — no emit, `noEmit: true` | +| `noUncheckedSideEffectImports` | `false` | **`true`** | Not set | Verify no side-effect import issues | +| `libReplacement` | `true` | **`false`** | Not set | None — improves perf | + +#### Deprecations (with `ignoreDeprecations: "6.0"` escape hatch) + +| Deprecation | Charon Uses? | Impact | +|---|---|---| +| `target: es5` | No (`ES2022`) | None | +| `--outFile` | No | None | +| `--downlevelIteration` | No | None | +| `--moduleResolution node/node10` | No (`bundler`) | None | +| `--moduleResolution classic` | No | None | +| `--baseUrl` | No | None | +| `module: amd/umd/systemjs` | No (`ESNext`) | None | +| `esModuleInterop: false` | Not explicitly set | None | +| `allowSyntheticDefaultImports: false` | Not set (`true` in tsconfig.node) | None | +| `alwaysStrict: false` | Not set (`strict: true` covers) | None | +| Legacy `module` keyword for namespaces | No | None | +| `asserts` keyword on imports | No | None | +| `no-default-lib` directives | No | None | + +#### New Features Available + +| Feature | Relevance | +|---|---| +| `import defer` syntax | Future use — deferred module evaluation | +| `--module node20` | Not needed — using bundler | +| `es2025` target/lib | Can update `target` from `ES2022` to `ES2025` | +| Temporal types | Available via `esnext` lib | +| `dom.iterable` included in `dom` | Can simplify `lib` array | +| `--stableTypeOrdering` | Useful for TS 7.0 migration prep | +| Expandable hovers | Editor UX improvement | +| `Map.getOrInsert` / `getOrInsertComputed` | Available via `esnext` lib | +| `RegExp.escape` | Available via `es2025` lib | +| `#/` subpath imports | Available for future module aliasing | + +#### lib.d.ts Changes — ArrayBuffer/Buffer Breaking Change + +TypeScript 5.9 introduced a behavioral change where `ArrayBuffer` is no longer a supertype of several `TypedArray` types. This may cause errors like: + +``` +error TS2345: Argument of type 'ArrayBufferLike' is not assignable to parameter of type 'BufferSource'. +error TS2322: Type 'Buffer' is not assignable to type 'Uint8Array'. +``` + +**Mitigation:** Ensure `@types/node` is at latest version. This is a 5.9 → 6.0 carryover that must be verified. + +### 3.3 Vite 8 Breaking Changes + +**Source:** [Vite 8 Beta Announcement](https://vite.dev/blog/announcing-vite8-beta) and [Migration from v7 Guide](https://main.vite.dev/guide/migration) + +**Version:** `8.0.0-beta.18` (dist-tag: `beta`, announced Dec 3, 2025) + +#### Core Architecture Change: Rolldown Replaces Rollup + esbuild + +Vite 8's defining change is replacing **two bundlers** (esbuild for dev transforms, Rollup for production builds) with a single Rust-based toolchain: + +| Component | Vite 7 | Vite 8 | Impact on Charon | +|---|---|---|---| +| **Bundler** | Rollup | **Rolldown** (`1.0.0-rc.8`) | `rollupOptions` → `rolldownOptions` | +| **JS Transforms** | esbuild | **Oxc** (`@oxc-project/runtime@0.115.0`) | `esbuild` config key deprecated | +| **JS Minification** | esbuild | **Oxc Minifier** | Different minification assumptions | +| **CSS Minification** | esbuild | **Lightning CSS** (`^1.31.1`) | Slightly different output, bundle size may change | +| **Dep Optimization** | esbuild | **Rolldown** | `optimizeDeps.esbuildOptions` deprecated | + +#### Breaking Changes Impacting Charon + +| # | Breaking Change | Impact on Charon | Action Required | +|---|---|---|---| +| 1 | **Node.js `^20.19.0 \|\| >=22.12.0`** required | None — already on Node 24.14.0 | None | +| 2 | **`build.rollupOptions` deprecated** → `build.rolldownOptions` | **HIGH** — `vite.config.ts` uses `rollupOptions` | Rename config key | +| 3 | **`output.manualChunks` object form removed**, function form deprecated | **HIGH** — config sets `manualChunks: undefined` | Remove or migrate to `codeSplitting` | +| 4 | **`output.inlineDynamicImports`** — supported in Rolldown but **deprecated** in favor of `codeSplitting: false` ([rolldown docs](https://rolldown.rs/reference/OutputOptions.inlineDynamicImports)) | **HIGH** — config uses `inlineDynamicImports: true` as temporary workaround | Migrate to `codeSplitting: false`; `inlineDynamicImports` works as fallback | +| 5 | **Default browser targets updated** (Chrome 107→111, Firefox 104→114, Safari 16.0→16.4) | Low — Charon doesn't set explicit `build.target` | None — new defaults are fine | +| 6 | **esbuild no longer a direct dependency** | Low — Charon doesn't use esbuild config | None | +| 7 | **Oxc Minifier** replaces esbuild minifier | Low — different assumptions about source code | Test build output; verify no minification breakage | +| 8 | **Lightning CSS** for CSS minification | Low — may produce slightly different CSS output | Verify CSS output visually | +| 9 | **Consistent CommonJS interop** — `default` import behavior changes for CJS modules | Medium — could affect CJS dependencies (axios, etc.) | Test all runtime imports | +| 10 | **Module resolution format sniffing removed** — `browser`/`module` field heuristic gone | Low — modern packages use `exports` field | Verify no resolution regressions | +| 11 | **`@vitejs/plugin-react` 5.x does NOT support Vite 8** — requires `6.0.0-beta.0` | **HIGH** — must upgrade plugin-react | Upgrade to `@vitejs/plugin-react@6.0.0-beta.0` | +| 12 | **Plugin-react 6.0 uses `@rolldown/pluginutils`** instead of Rollup utils | Low — internal plugin change | None — handled by plugin upgrade | + +#### New Features Available + +| Feature | Relevance to Charon | +|---|---| +| Built-in tsconfig `paths` support (`resolve.tsconfigPaths: true`) | Could replace manual alias config if needed | +| `emitDecoratorMetadata` support | Not needed — Charon doesn't use decorators | +| Performance: 10–30× faster production builds | Direct benefit — faster Docker builds and CI | +| Full Bundle Mode (upcoming) | Future — 3× faster dev server startup | +| Module-level persistent cache (upcoming) | Future — faster rebuilds | + +#### Dockerfile Impact: Rollup Native Skip Flags + +The current Dockerfile sets: + +```dockerfile +ENV npm_config_rollup_skip_nodejs_native=1 \ + ROLLUP_SKIP_NODEJS_NATIVE=1 +``` + +These env vars are **Rollup-specific** for cross-platform builds. With Vite 8, Rollup is replaced by Rolldown, which uses its own native bindings (`@rolldown/binding-linux-x64-musl` for Alpine). These env vars become no-ops but do not cause harm. Rolldown's native bindings are installed per-platform by npm's `optionalDependencies` mechanism — the same mechanism that works for the `$BUILDPLATFORM` Docker flag. + +**Action:** Remove the Rollup skip flags from Dockerfile and verify cross-platform builds still work. Rolldown includes `@rolldown/binding-linux-x64-musl` which is exactly what Alpine requires. + +--- + +## 4. Compatibility Matrix + +### ESLint v10 Plugin Compatibility Verification Matrix + +Each plugin must be verified before the ESLint v10 upgrade. The agent performing PR-2 must run these checks: + +```bash +# For each plugin, check peer dependency support +npm info eslint-plugin-react-hooks peerDependencies +npm info eslint-plugin-react-compiler peerDependencies +npm info eslint-plugin-jsx-a11y peerDependencies +npm info eslint-plugin-import-x peerDependencies +npm info eslint-plugin-security peerDependencies +npm info eslint-plugin-sonarjs peerDependencies +npm info eslint-plugin-unicorn peerDependencies +npm info eslint-plugin-promise peerDependencies +npm info eslint-plugin-unused-imports peerDependencies +npm info eslint-plugin-no-unsanitized peerDependencies +npm info eslint-plugin-testing-library peerDependencies +npm info eslint-plugin-react-refresh peerDependencies +npm info @vitest/eslint-plugin peerDependencies +npm info typescript-eslint peerDependencies +npm info @eslint/css peerDependencies +npm info @eslint/json peerDependencies +npm info @eslint/markdown peerDependencies +``` + +**Decision Gate:** If `eslint-plugin-react-hooks` does NOT support ESLint v10 in its `peerDependencies`, the ESLint v10 upgrade is **BLOCKED**. Do not use `--legacy-peer-deps` or `--force` as a workaround. + +### TypeScript 6.0 Ecosystem Compatibility + +| Tool | TS 6.0 Compat | Notes | +|---|---|---| +| `typescript-eslint@8.57.0` | Likely — tracks TS closely | Verify with `npm install` | +| `vite@7.3.1` | Yes — Vite uses esbuild/swc, not tsc directly | Type-check is separate | +| `vitest@4.0.18` | Yes — same reasoning | Type-check is separate | +| `@vitejs/plugin-react@5.1.4` | Yes | No TS compiler dependency | +| `react@19.2.4` / `@types/react` | Yes | Ensure `@types/react` latest | +| `@tanstack/react-query@5.90.21` | Likely — popular library | TanStack already preparing for TS 6 | +| `knip@5.86.0` | Verify | Uses TS programmatic API | + +### Node.js Compatibility + +| Tool | Min Node.js | Charon Node.js | Status | +|---|---|---|---| +| ESLint v10 | 20.19 / 22.13 / 24+ | 24.14.0 | Compatible | +| TypeScript 6.0 | TBD (likely same as 5.9) | 24.14.0 | Compatible | +| Vite 7 | 20.19 / 22.12+ | 24.14.0 | Compatible | +| Vite 8 | 20.19 / 22.12+ | 24.14.0 | Compatible | + +### Vite 8 Ecosystem Compatibility Matrix + +All Vite-related packages must be updated together. Stable releases do **not** support Vite 8. + +| Package | Current Version | Vite 8 Compatible? | Required Version | Override Needed? | +|---|---|---|---|---| +| `vite` | `^7.3.1` | — | `8.0.0-beta.18` | No — direct install | +| `@vitejs/plugin-react` | `^5.1.4` | **No** (5.x peer: `vite: ^4.2.0 \|\| ^5.0.0 \|\| ^6.0.0 \|\| ^7.0.0`) | `6.0.0-beta.0` (peer: `vite: ^8.0.0` — verified via `npm info`) | No — direct install | +| `vitest` | `^4.0.18` | **No** (deps: `^6.0.0 \|\| ^7.0.0`) | `4.1.0-beta.6` (deps: `^6.0.0 \|\| ^7.0.0 \|\| ^8.0.0-0`) | No — 4.1.0-beta.6 dep range includes Vite 8 | +| `@vitest/coverage-istanbul` | `^4.0.18` | **No** (peer: `vitest: 4.0.18`) | `4.1.0-beta.6` | No — matches vitest beta | +| `@vitest/coverage-v8` | `^4.0.18` | **No** (peer: `vitest: 4.0.18`) | `4.1.0-beta.6` | No — matches vitest beta | +| `@vitest/ui` | `^4.0.18` | **No** (peer: `vitest: 4.0.18`) | `4.1.0-beta.6` | No — matches vitest beta | +| `@vitest/eslint-plugin` | `^1.6.10` | Yes (peer: `vitest: *`) | Keep current | No | +| `@bgotink/playwright-coverage` | `^0.3.2` | Yes (no Vite peer dep) | Keep current | No | +| `@playwright/test` | `^1.58.2` | Yes (no Vite peer dep) | Keep current | No | + +**Key constraints:** + +- `vitest@4.0.18` has `vite` in its **dependencies** (not peer deps) pinned to `^6.0.0 || ^7.0.0` — this will refuse Vite 8 unless overridden +- `vitest@4.1.0-beta.6` extends this to `^6.0.0 || ^7.0.0 || ^8.0.0-0` — supports Vite 8 beta +- `@vitejs/plugin-react@6.0.0-beta.0` peers on `vite: ^8.0.0` (verified via `npm info`). New optional peer deps: `@rolldown/plugin-babel` and `babel-plugin-react-compiler` (both optional — not required) +- All `@vitest/*` packages at `4.1.0-beta.6` must be installed together (strict peer version matching: `vitest: 4.1.0-beta.6`) +- Since `vitest@4.1.0-beta.6` already includes `^8.0.0-0` in its `vite` dependency range, and all `@vitest/*` packages peer to exact `vitest: 4.1.0-beta.6`, **no npm overrides are needed** when all packages are installed in lockstep at their beta versions + +--- + +## 5. `.npmrc` Configuration + +**No `.npmrc` file currently exists in the project.** No changes needed for these upgrades. + +If plugin compatibility issues arise during ESLint v10 upgrade, **do NOT create an `.npmrc` with `legacy-peer-deps=true`**. Instead, wait for plugin updates or use granular `overrides` in `package.json`: + +```jsonc +// package.json — ONLY if a specific plugin ships a fix before updating peerDeps +{ + "overrides": { + "eslint-plugin-EXAMPLE": { + "eslint": "^10.0.0" + } + } +} +``` + +--- + +## 6. Dockerfile Changes + +**No Dockerfile changes required** for ESLint v10 or TypeScript 6.0. + +**Vite 8 requires Dockerfile changes** — the Rollup native skip flags become irrelevant: + +```diff + # Set environment to bypass native binary requirement for cross-arch builds +- ENV npm_config_rollup_skip_nodejs_native=1 \ +- ROLLUP_SKIP_NODEJS_NATIVE=1 ++ # Vite 8 uses Rolldown (Rust native bindings, auto-resolved per platform) ++ # No skip flags needed — Rolldown's optionalDependencies handle cross-platform +``` + +Current Dockerfile state (frontend-builder stage): + +```dockerfile +FROM --platform=$BUILDPLATFORM node:24.14.0-alpine AS frontend-builder +# ... +ENV npm_config_rollup_skip_nodejs_native=1 \ + ROLLUP_SKIP_NODEJS_NATIVE=1 +RUN npm ci +COPY frontend/ ./ +RUN npm run build +``` + +- Node.js 24.14.0 meets Vite 8's requirement (`^20.19.0 || >=22.12.0`) +- `npm ci` will install Rolldown's `@rolldown/binding-linux-x64-musl` automatically on Alpine +- `--platform=$BUILDPLATFORM` ensures native bindings match the build machine architecture +- The `VITE_APP_VERSION` env var and build output (`dist/`) remain unchanged +- No new environment variables or build args needed + +**Future (Vite 8):** If Vite 8 requires a higher Node.js, upgrade the base image at that time. + +--- + +## 7. Config File Changes + +### 7.1 TypeScript 6.0 — `frontend/tsconfig.json` + +```diff + { + "compilerOptions": { + "target": "ES2022", ++ // Consider upgrading to "ES2025" (TS 6.0 new target) + "useDefineForClassFields": true, +- "lib": ["ES2022", "DOM", "DOM.Iterable"], ++ "lib": ["ES2022", "DOM"], ++ // DOM.Iterable is now included in DOM as of TS 6.0 + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, ++ ++ /* TS 6.0 — explicit types to override new default of [] */ ++ "types": [] + }, + "include": ["src"], + "references": [{ "path": "./tsconfig.node.json" }] + } +``` + +**Key changes:** + +1. **`"types": []`** — Explicitly set to `[]`. Charon uses `noEmit: true` and doesn't rely on global `@types` packages in the main tsconfig. All types come from explicit imports. +2. **`"lib"` simplification** — Remove `"DOM.Iterable"` since TS 6.0 includes it in `"DOM"` automatically. +3. **`"target"` consideration** — Can optionally upgrade from `ES2022` to `ES2025` to access `RegExp.escape` and other ES2025 types natively. Not required. + +### 7.2 TypeScript 6.0 — `frontend/tsconfig.node.json` + +```diff + { + "compilerOptions": { + "composite": true, + "skipLibCheck": true, + "module": "ESNext", + "moduleResolution": "bundler", + "allowSyntheticDefaultImports": true, +- "strict": true ++ "strict": true, ++ "types": [] + }, + "include": ["vite.config.ts"] + } +``` + +**Note:** `allowSyntheticDefaultImports` is fine — TS 6.0 deprecates setting it to `false`, not `true`. Setting it to `true` remains valid. + +### 7.3 ESLint v10 — `frontend/package.json` Version Caps + +```diff + "devDependencies": { +- "eslint": "^9.39.3 <10.0.0", ++ "eslint": "^10.0.0", +- "@eslint/js": "^9.39.3 <10.0.0", ++ "@eslint/js": "^10.0.0", + // ... all other ESLint plugins may need version bumps + } +``` + +### 7.4 ESLint v10 — `frontend/eslint.config.js` + +Likely no structural changes needed since Charon already uses flat config. Potential changes: + +- Remove any `/* eslint-env */` comments found in source files +- Handle new `eslint:recommended` rules (`no-unassigned-vars`, `no-useless-assignment`, `preserve-caught-error`) +- Verify `tseslint.config()` wrapper compatibility + +### 7.5 ESLint v10 — `lefthook.yml` + +```diff ++ # NOTE: ESLint v10 is supported — plugin compatibility verified on [DATE] +- # NOTE: ESLint pinned at v9.x.x — do not upgrade until react-hooks plugin supports v10. +``` + +### 7.6 TypeScript 6.0 — `package.json` (Root + Frontend) + +```diff + "devDependencies": { +- "typescript": "^5.9.3", ++ "typescript": "^6.0.0", + } +``` + +--- + +## 8. Phase-by-Phase Implementation Plan + +### Phase 1: Pre-Upgrade Verification (Both PRs) + +**Owner:** Frontend_Dev agent (or whoever picks up the PR) + +1. **Snapshot current state:** + + ```bash + cd /projects/Charon && npm run lint 2>&1 | tee /tmp/eslint-v9-baseline.log + cd /projects/Charon/frontend && npx tsc --noEmit 2>&1 | tee /tmp/tsc-v5-baseline.log + ``` + +2. **Verify ESLint plugin compatibility (PR-2 gate):** + + ```bash + for plugin in eslint-plugin-react-hooks eslint-plugin-react-compiler \ + eslint-plugin-jsx-a11y eslint-plugin-import-x eslint-plugin-security \ + eslint-plugin-sonarjs eslint-plugin-unicorn eslint-plugin-promise \ + eslint-plugin-unused-imports eslint-plugin-no-unsanitized \ + eslint-plugin-testing-library eslint-plugin-react-refresh \ + @vitest/eslint-plugin typescript-eslint @eslint/css @eslint/json @eslint/markdown; do + echo "=== $plugin ===" && npm info "$plugin" peerDependencies 2>/dev/null + done + ``` + +3. **Search for `eslint-env` comments:** + + ```bash + grep -r "eslint-env" frontend/src/ --include="*.ts" --include="*.tsx" --include="*.js" + ``` + +### Phase 2: TypeScript 6.0 Upgrade (PR-1) + +**Scope:** TypeScript version bump + tsconfig adjustments + +1. Update `typescript` version in both `package.json` files: + - Root: `^5.9.3` → `^6.0.0` + - Frontend: `^5.9.3` → `^6.0.0` + +2. Apply tsconfig changes (Section 7.1 and 7.2 above): + - Add `"types": []` to `tsconfig.json` and `tsconfig.node.json` + - Remove `"DOM.Iterable"` from `lib` array (now included in `"DOM"`) + +3. Run `npm install` to update lock file + +4. Run type-check and fix any new errors: + + ```bash + cd frontend && npx tsc --noEmit + ``` + +5. Common expected issues: + - Missing types from `@types/*` packages (solved by `"types": []` since we don't use globals) + - `ArrayBuffer`/`Buffer` type narrowing (from TS 5.9 lib.d.ts changes) + - Type argument inference changes (may need explicit type annotations) + +6. Run full test suite: + + ```bash + cd frontend && npx vitest run + ``` + +7. Run Playwright E2E tests to verify build works: + + ```bash + # The Dockerfile builds with npm ci && npm run build + # Verify: cd frontend && npx vite build + ``` + +### Phase 3: ESLint v10 Upgrade (PR-2) + +**Prerequisite:** Phase 1 plugin verification passes. `eslint-plugin-react-hooks` must declare ESLint v10 support. + +1. Remove version cap and update ESLint packages: + + ```bash + cd frontend + npm install -D eslint@^10.0.0 @eslint/js@^10.0.0 + ``` + +2. Update any plugins that need version bumps for ESLint v10 compat + +3. Run ESLint and compare against baseline: + + ```bash + cd /projects/Charon && npm run lint 2>&1 | tee /tmp/eslint-v10-output.log + diff /tmp/eslint-v9-baseline.log /tmp/eslint-v10-output.log + ``` + +4. Address new violations from updated `eslint:recommended`: + - `no-unassigned-vars` — variables declared but never assigned + - `no-useless-assignment` — assignments that are immediately overwritten + - `preserve-caught-error` — catch clause variables that are declared but unused + +5. Remove any `/* eslint-env */` comments found in Phase 1 + +6. Update `lefthook.yml` — remove the ESLint v9 pin note + +7. Run full test suite to confirm no regressions + +### Phase 4: Integration Testing + +1. **Full lint + type-check:** + + ```bash + cd /projects/Charon && npm run lint && cd frontend && npx tsc --noEmit + ``` + +2. **Frontend build:** + + ```bash + cd frontend && npx vite build + ``` + +3. **Unit tests:** + + ```bash + cd frontend && npx vitest run + ``` + +4. **Playwright E2E tests (all browsers):** + + ```bash + npx playwright test --project=chromium + npx playwright test --project=firefox + npx playwright test --project=webkit + ``` + +5. **Docker build verification:** + + ```bash + docker build -t charon:upgrade-test . + ``` + +### Phase 5: Vite 8 Upgrade (PR-3 — stacked commit on same branch) + +**Prerequisites:** PR-1 (TypeScript 6.0) and PR-2 (ESLint v10) already committed on branch. + +**Scope:** Vite `^7.3.1` → `8.0.0-beta.18`, plugin-react `^5.1.4` → `6.0.0-beta.0`, vitest `^4.0.18` → `4.1.0-beta.6`, vite.config.ts migration, Dockerfile cleanup. + +#### Step 1: Install Vite 8 and ecosystem packages + +```bash +cd /projects/Charon/frontend + +# Core Vite upgrade +npm install -D vite@8.0.0-beta.18 + +# Plugin-react upgrade (6.x required for Vite 8) +npm install -D @vitejs/plugin-react@6.0.0-beta.0 + +# Vitest + coverage upgrades (4.1.0-beta.6 supports Vite 8) +npm install -D vitest@4.1.0-beta.6 \ + @vitest/coverage-istanbul@4.1.0-beta.6 \ + @vitest/coverage-v8@4.1.0-beta.6 \ + @vitest/ui@4.1.0-beta.6 +``` + +#### Step 2: Update root `package.json` (direct version bump only — no overrides) + +The root `package.json` only has `vite` as a direct devDependency (used by Playwright). It does **not** need overrides — just a version bump: + +```bash +cd /projects/Charon +npm install -D vite@8.0.0-beta.18 +``` + +#### Step 3: Verify peer dep resolution (overrides likely NOT needed) + +With all packages at their Vite 8-compatible versions, overrides should not be necessary: + +- `vitest@4.1.0-beta.6` depends on `vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0` — already includes Vite 8 +- `@vitejs/plugin-react@6.0.0-beta.0` peers on `vite: ^8.0.0` — matches +- All `@vitest/*@4.1.0-beta.6` peer on `vitest: 4.1.0-beta.6` — matches when installed in lockstep + +Run `npm install` and check for peer dep warnings. **Only add overrides in `frontend/package.json`** (following the established pattern from TS 6.0 and ESLint v10 phases) if specific transitive packages fail to resolve: + +```jsonc +// frontend/package.json — ONLY if npm install reports unresolved peer deps +{ + "overrides": { + // ... existing TS and ESLint overrides ... + // Add scoped overrides ONLY for the specific package that fails, e.g.: + // "some-transitive-package": { "vite": "8.0.0-beta.18" } + } +} +``` + +**Do NOT add a top-level `"vite": "8.0.0-beta.18"` override** — this forces every transitive Vite consumer to resolve to the beta, which is overly broad. If a broad override is truly needed after testing, add it with a comment explaining which transitive package requires it. + +#### Step 4: Migrate `vite.config.ts` + +```diff + import react from '@vitejs/plugin-react' + import { defineConfig } from 'vite' + + export default defineConfig({ + plugins: [react()], + server: { + port: 5173, + proxy: { + '/api': { + target: 'http://localhost:8080', + changeOrigin: true + } + } + }, + build: { + outDir: 'dist', + sourcemap: true, +- // TEMPORARY: Disable code splitting to diagnose React initialization issue +- // If this works, the problem is module loading order in async chunks + chunkSizeWarningLimit: 2000, +- rollupOptions: { +- output: { +- // Disable code splitting - bundle everything into one file +- manualChunks: undefined, +- inlineDynamicImports: true +- } +- } ++ rolldownOptions: { ++ output: { ++ // Disable code splitting — single bundle for React init stability ++ // codeSplitting: false is the Rolldown-native approach ++ // (inlineDynamicImports is deprecated in Rolldown) ++ codeSplitting: false ++ } ++ } + } + }) +``` + +**Key changes:** +1. `rollupOptions` → `rolldownOptions` (Rollup config key deprecated) +2. `manualChunks: undefined` removed (object form no longer supported; was already a no-op since `undefined`) +3. `inlineDynamicImports: true` replaced with `codeSplitting: false` — the Rolldown-native equivalent. Rolldown supports `inlineDynamicImports` but marks it as [deprecated](https://rolldown.rs/reference/OutputOptions.inlineDynamicImports) in favor of `codeSplitting: false`. +4. The TEMPORARY comment is preserved in intent — this workaround may still be needed + +**Fallback if `codeSplitting: false` behaves differently than expected:** + +```ts +build: { + rolldownOptions: { + output: { + // Deprecated but still functional in Rolldown 1.0.0-rc.8 + inlineDynamicImports: true + } + } +} +``` + +#### Step 5: Update Dockerfile + +Remove the now-irrelevant Rollup native skip flags: + +```diff +- ENV npm_config_rollup_skip_nodejs_native=1 \ +- ROLLUP_SKIP_NODEJS_NATIVE=1 ++ # Vite 8: Rolldown native bindings auto-resolved per platform via optionalDependencies +``` + +#### Step 6: Run `npm install` to regenerate lock file + +```bash +cd /projects/Charon && npm install +cd /projects/Charon/frontend && npm install +``` + +#### Step 7: Verify builds and tests + +```bash +# 1. Frontend build (most critical — tests Rolldown bundling) +cd /projects/Charon/frontend && npx vite build + +# 2. Type-check (should be unaffected) +cd /projects/Charon/frontend && npx tsc --noEmit + +# 3. Lint (should be unaffected) +cd /projects/Charon && npm run lint + +# 4. Unit tests +cd /projects/Charon/frontend && npx vitest run + +# 5. Docker build (tests Rolldown on Alpine/musl) +docker build -t charon:vite8-test . + +# 6. Playwright E2E (tests the built app end-to-end) +cd /projects/Charon && npx playwright test --project=firefox + +# 7. CJS interop smoke test (verify axios, react-hot-toast, react-hook-form) +# Run the app and manually verify pages that use CJS dependencies render correctly +# See Step 9 for detailed CJS interop verification checklist +``` + +#### Step 8: Verify build output + +```bash +# Compare build output size and structure +ls -la frontend/dist/assets/ +# Should still produce index-*.js, index-*.css +# With codeSplitting: false, should be a single JS bundle +``` + +#### Step 9: Verify CJS interop (Vite 8 behavior change) + +Vite 8's consistent CJS interop may affect imports from CJS packages like `axios` and `react-hot-toast`. **Explicitly verify these packages work at runtime:** + +```bash +# After Docker build or vite build + preview: +# 1. Verify axios API calls work (CJS package with __esModule flag) +# - Navigate to any page that makes API calls (e.g., Dashboard) +# - Check browser console for "default is not a function" errors +# 2. Verify react-hot-toast renders (CJS package) +# - Trigger a toast notification (e.g., save settings) +# - Check browser console for import errors +# 3. Verify react-hook-form works (CJS interop) +# - Open any form page, submit a form +``` + +If any runtime errors appear (e.g., `default is not a function`), use the temporary escape hatch: + +```ts +// vite.config.ts — ONLY if CJS interop breaks +export default defineConfig({ + legacy: { + inconsistentCjsInterop: true + } +}) +``` + +#### Step 10: Update `ARCHITECTURE.md` + +Update the Frontend technology stack table and directory structure to reflect current versions: + +```diff + ### Frontend + | Component | Technology | Version | Purpose | +- | **Build Tool** | Vite | 6.1.9 | Fast bundler and dev server | ++ | **Build Tool** | Vite | 8.0.0-beta.18 | Fast bundler and dev server | +- | **CSS Framework** | Tailwind CSS | 3.x | Utility-first CSS | ++ | **CSS Framework** | Tailwind CSS | 4.2.1 | Utility-first CSS | +- | **Unit Testing** | Vitest | 2.x | Fast unit test runner | ++ | **Unit Testing** | Vitest | 4.1.0-beta.6 | Fast unit test runner | +- | **E2E Testing** | Playwright | 1.50.x | Browser automation | ++ | **E2E Testing** | Playwright | 1.58.2 | Browser automation | +``` + +Also fix the directory structure reference: + +```diff +- │ └── vite.config.js # Vite configuration ++ │ └── vite.config.ts # Vite configuration +``` + +--- + +## 9. Rollback Strategy + +### TypeScript 6.0 Rollback (PR-1) + +1. Revert `package.json` changes (both root and frontend): + + ```diff + - "typescript": "^6.0.0" + + "typescript": "^5.9.3" + ``` + +2. Revert `tsconfig.json` changes (remove `"types": []`, restore `"DOM.Iterable"`) +3. Run `npm install` to restore lock file +4. Verify: `cd frontend && npx tsc --noEmit && npx vitest run` + +**Risk:** Low — TypeScript version is a devDependency only. No runtime impact. `git revert` of the PR commit is sufficient. + +### ESLint v10 Rollback (PR-2) + +1. Revert `package.json` changes: + + ```diff + - "eslint": "^10.0.0" + + "eslint": "^9.39.3 <10.0.0" + - "@eslint/js": "^10.0.0" + + "@eslint/js": "^9.39.3 <10.0.0" + ``` + +2. Revert any plugin version bumps +3. Revert `lefthook.yml` comment change +4. Run `npm install` to restore lock file +5. Verify: `cd /projects/Charon && npm run lint` + +**Risk:** Low — ESLint is a devDependency only. Code changes (fixing new rule violations) are harmless to keep even if ESLint is rolled back. + +### Vite 8 Rollback (PR-3 commit) + +1. Revert `vite` version in both `package.json` files: + + ```diff + - "vite": "8.0.0-beta.18" + + "vite": "^7.3.1" + ``` + +2. Revert ecosystem packages in `frontend/package.json`: + + ```diff + - "@vitejs/plugin-react": "6.0.0-beta.0" + + "@vitejs/plugin-react": "^5.1.4" + - "vitest": "4.1.0-beta.6" + + "vitest": "^4.0.18" + - "@vitest/coverage-istanbul": "4.1.0-beta.6" + + "@vitest/coverage-istanbul": "^4.0.18" + - "@vitest/coverage-v8": "4.1.0-beta.6" + + "@vitest/coverage-v8": "^4.0.18" + - "@vitest/ui": "4.1.0-beta.6" + + "@vitest/ui": "^4.0.18" + ``` + +3. Revert `vite.config.ts`: `rolldownOptions` → `rollupOptions`, restore `manualChunks: undefined` + +4. Revert Dockerfile: restore `ROLLUP_SKIP_NODEJS_NATIVE=1` env vars + +5. Remove Vite 8 overrides from `frontend/package.json` + +6. Run `npm install` to restore lock file + +7. Verify: `cd frontend && npx vite build && npx vitest run` + +**Risk:** Medium — Vite 8 is a pre-release beta. More likely to need rollback than stable upgrades. Since this is a stacked commit on the same branch, `git revert HEAD` cleanly removes only the Vite 8 changes while preserving TS 6.0 and ESLint v10. + +--- + +## 10. Testing Strategy + +### Automated Test Coverage + +| Test Layer | Tool | What It Validates | +|---|---|---| +| Type checking | `tsc --noEmit` | TS 6.0 compatibility, tsconfig changes | +| Linting | `eslint` | ESLint v10 config + plugin compat | +| Unit tests | `vitest run` | No runtime regressions from TS changes | +| E2E tests | Playwright (Chromium, Firefox, WebKit) | Full app build + functionality | +| Docker build | `docker build` | Dockerfile still works with new deps | +| Pre-commit hooks | `lefthook` | All hooks pass with new versions | + +### Specific Test Scenarios for TS 6.0 + +1. **Build output verification:** + + ```bash + cd frontend && npx vite build + # Verify dist/ output is correct, no new warnings + ``` + +2. **Type-check with `--stableTypeOrdering`** (prep for TS 7.0): + + ```bash + cd frontend && npx tsc --noEmit --stableTypeOrdering + # Note any differences — these will be real in TS 7.0 + ``` + +3. **Verify no `@types` resolution issues:** + + ```bash + # With types: [], ensure no global type errors appear + cd frontend && npx tsc --noEmit 2>&1 | grep "Cannot find" + ``` + +### Specific Test Scenarios for ESLint v10 + +1. **Verify all 18 plugins load without errors:** + + ```bash + cd /projects/Charon && npx eslint --print-config frontend/src/App.tsx | head -20 + ``` + +2. **Count new violations vs baseline:** + + ```bash + npx eslint frontend/src/ --format json 2>/dev/null | jq '.[] | .errorCount' | paste -sd+ | bc + ``` + +3. **Verify config lookup works correctly in monorepo:** + + ```bash + # Lint a file from the root — should find root eslint.config.js + npx eslint frontend/src/App.tsx + ``` + +--- + +## 11. Commit Slicing Strategy + +### Decision: 3 Stacked Commits on Single Branch + +**Trigger reasons:** + +- Cross-domain changes (TS and ESLint are independent tools) +- Risk isolation (if one breaks, the other can still merge) +- Review size (each PR is focused and reviewable) +- Plugin compatibility gate (ESLint v10 may be blocked) + +### PR-1: TypeScript 6.0 Upgrade + +| Attribute | Detail | +|---|---| +| **Scope** | TypeScript ^5.9.3 → ^6.0.0, tsconfig changes, fix type errors | +| **Files** | `package.json` (root), `frontend/package.json`, `package-lock.json`, `frontend/tsconfig.json`, `frontend/tsconfig.node.json`, possibly source files with type fixes | +| **Dependencies** | None — can start immediately | +| **Validation Gate** | `tsc --noEmit` passes, `vitest run` passes, `vite build` succeeds, Docker build succeeds | +| **Estimated Complexity** | Medium — mostly defaults are already correct, `types: []` is the main change | +| **Rollback** | `git revert` + `npm install` | + +### PR-2: ESLint v10 Upgrade + +| Attribute | Detail | +|---|---| +| **Scope** | ESLint ^9.x → ^10.0.0, plugin updates, fix new violations, update lefthook | +| **Files** | `frontend/package.json`, `package-lock.json`, `frontend/eslint.config.js` (if needed), `lefthook.yml`, source files with new violations | +| **Dependencies** | **BLOCKED** until `eslint-plugin-react-hooks` declares ESLint v10 support | +| **Validation Gate** | `npm run lint` passes, all plugins load, no new unhandled violations | +| **Estimated Complexity** | Medium — depends on plugin ecosystem readiness | +| **Rollback** | `git revert` + `npm install` | + +### PR-3: Vite 8 Upgrade (stacked commit on same branch) + +| Attribute | Detail | +|---|---| +| **Scope** | Vite 7→8, plugin-react 5→6, vitest 4.0→4.1-beta, vite.config.ts migration, Dockerfile cleanup | +| **Files** | `package.json` (root), `frontend/package.json`, `package-lock.json`, `frontend/vite.config.ts`, `Dockerfile`, `ARCHITECTURE.md` | +| **Dependencies** | PR-1 (TS 6.0) and PR-2 (ESLint v10) already committed on branch | +| **Validation Gate** | `vite build` succeeds with Rolldown, `vitest run` passes, Docker build succeeds, Playwright E2E passes | +| **Estimated Complexity** | **High** — beta software, bundler engine swap (Rollup→Rolldown), multiple ecosystem packages at beta versions | +| **Rollback** | `git revert HEAD` — cleanly removes only the Vite 8 commit | + +#### npm Overrides for PR-3 + +**No overrides expected** when all packages are installed at their beta versions in lockstep: +- `vitest@4.1.0-beta.6` deps include `vite: ^8.0.0-0` — resolves Vite 8 without override +- `@vitest/*@4.1.0-beta.6` peer on `vitest: 4.1.0-beta.6` — satisfied by direct install + +If `npm install` fails, add **scoped** overrides in `frontend/package.json` only for the failing package. Do not add a broad `"vite": "8.0.0-beta.18"` override. + +### Contingency + +- If TS 6.0 stable is delayed past RC, pin to `typescript@6.0.0-rc` temporarily +- If ESLint v10 plugin compat is blocked for >30 days, consider temporarily dropping the blocker plugin or using `--rulesdir` workaround +- If a plugin is permanently abandoned, research replacement plugins +- If Vite 8 beta has blocking regressions, `git revert` the Vite 8 commit and wait for the next beta or stable release — TS 6.0 + ESLint v10 upgrades remain unaffected +- If `vitest@4.1.0-beta.6` fails tests, try pinning `vitest@4.0.18` with an `overrides` entry for its `vite` dependency (force it to accept `^8.0.0-0`) +- If Rolldown's `codeSplitting: false` behaves differently than expected, try the deprecated `inlineDynamicImports: true` as a fallback, or re-investigate the React initialization issue that motivated the workaround + +--- + +## 12. Known Issues & Gotchas + +### ESLint v10 + +1. **react-hooks plugin blocker** — `lefthook.yml` explicitly states the upgrade is blocked until `eslint-plugin-react-hooks` supports v10. This is the #1 risk. + +2. **Config file lookup change** — ESLint v10 finds config files starting from the linted file and walking up. In Charon's monorepo setup (root `eslint.config.js` imports `frontend/eslint.config.js`), verify the root config is still discovered when linting `frontend/src/**`. + +3. **Jiti dependency** — ESLint v10 requires `jiti >= v2.2.0` for loading config files. This is typically a transitive dependency but may need explicit installation if conflicts arise. + +4. **Plugin API breakage** — Plugins that use deprecated `context.getScope()`, `context.getAncestors()`, `context.parserOptions`, or `context.parserPath` will break. All 18 plugins must be verified. + +### TypeScript 6.0 + +1. **`types: []` default** — This is the highest-impact change for Charon. Without explicitly setting `"types"`, TS 6.0 will not auto-load any `@types/*` packages. Since Charon uses `noEmit: true` and explicit imports, this should be fine, but test thoroughly. + +2. **TS 6.0 is a transition release** — It is explicitly designed as a bridge to TS 7.0 (native Go port). Adopting TS 6.0 now prepares us for TS 7.0 later. The `ignoreDeprecations: "6.0"` escape hatch exists if needed. + +3. **`typescript-eslint` compatibility** — If `typescript-eslint@8.57.0` doesn't support TS 6.0, we may need to update it. Check for a release that adds TS 6.0 support. + +4. **`knip` compatibility** — `knip` (`^5.86.0`) uses TS programmatic API internally. Verify it works with TS 6.0. + +5. **ArrayBuffer/Buffer types** — TS 5.9 changes to `lib.d.ts` around `ArrayBuffer` not being a supertype of `TypedArray` may surface with TS 6.0. Ensure `@types/node` is at latest. + +6. **`ts5to6` migration tool** — The experimental [ts5to6](https://github.com/andrewbranch/ts5to6) tool can automatically adjust `baseUrl` and `rootDir`. Charon doesn't use `baseUrl`, so this is of limited value, but worth knowing about. + +### Vite 8 + +1. **Beta software** — `8.0.0-beta.18` is pre-release. Expect edge cases and undocumented behavior. File issues at `https://github.com/vitejs/rolldown-vite/issues`. + +2. **Rolldown bundler is RC, not stable** — Vite 8 depends on `rolldown@1.0.0-rc.8`. Rolldown is feature-complete but may have edge cases with complex chunk splitting configurations. + +3. **`codeSplitting: false` replaces `inlineDynamicImports: true`** — `frontend/vite.config.ts` has a `TEMPORARY` workaround for a "React init issue". Rolldown supports `inlineDynamicImports` but marks it as [deprecated](https://rolldown.rs/reference/OutputOptions.inlineDynamicImports) in favor of `codeSplitting: false`. The migration uses `codeSplitting: false` as the primary approach; `inlineDynamicImports: true` can be used as a deprecated fallback. + +4. **Oxc Minifier assumptions differ from esbuild** — The Oxc Minifier makes [different assumptions](https://oxc.rs/docs/guide/usage/minifier.html#assumptions) about source code than esbuild. If runtime errors appear after build but not in dev, the minifier is the likely culprit. Use `build.minify: false` temporarily to diagnose. + +5. **CJS interop behavior change** — Vite 8 changes how `default` imports from CommonJS modules work. Packages like `axios` (CJS) may be affected. The `legacy.inconsistentCjsInterop: true` escape hatch exists if needed. + +6. **All ecosystem packages are beta** — `@vitejs/plugin-react@6.0.0-beta.0`, `vitest@4.1.0-beta.6`, and all `@vitest/*` packages are pre-release. They are tightly version-locked (e.g., `@vitest/coverage-v8` peers to exact `vitest: 4.1.0-beta.6`). + +7. **Plugin-react 6.0 API change** — The new `@vitejs/plugin-react@6.0.0-beta.0` uses `@rolldown/pluginutils` internally instead of `@rollup/pluginutils`. The public API (`react()` call in config) appears unchanged. New optional peer deps (`@rolldown/plugin-babel`, `babel-plugin-react-compiler`) are not required for Charon's usage. + +8. **Lightning CSS may increase CSS bundle size** — Lightning CSS produces slightly different output than esbuild's CSS minifier. Verify CSS output and check for visual regressions. + +9. **Cross-platform Docker builds** — Rolldown uses native Rust bindings per platform (`@rolldown/binding-linux-x64-musl` for Alpine). The `--platform=$BUILDPLATFORM` Docker flag ensures the correct binding is installed. If cross-arch builds fail, verify the correct `@rolldown/binding-*` package is being resolved. + +--- + +## 13. Risk Assessment + +| Risk | Probability | Impact | Mitigation | +|---|---|---|---| +| `eslint-plugin-react-hooks` doesn't support ESLint v10 | **Medium** | **High** — blocks PR-2 entirely | Monitor npm for updates; check GitHub issues | +| Other ESLint plugins break on v10 | **Low** | **Medium** — individual plugins can be disabled | Verify all 18 plugins; have disable config ready | +| TS 6.0 `types: []` causes unexpected errors | **Medium** | **Low** — easy to fix by adding types | Test with `tsc --noEmit`; add specific types | +| `typescript-eslint` incompatible with TS 6.0 | **Low** | **Medium** — blocks type-aware linting | Check releases; may need to update | +| `knip` breaks with TS 6.0 | **Low** | **Low** — `knip` is optional tooling | Test separately; pin if needed | +| TS 6.0 stable delayed | **Low** | **Low** — RC already available | Use RC or pin beta | +| Vite 8 beta breaks production build | **Medium** | **High** — blocks Docker/deployment | Test `vite build` thoroughly; rollback with `git revert` | +| Rolldown CJS interop breaks runtime imports | **Medium** | **Medium** — runtime errors on CJS packages | Test all CJS deps (axios, etc.); use `legacy.inconsistentCjsInterop` escape | +| Oxc Minifier causes runtime errors | **Low** | **High** — minification bugs are subtle | Compare dev vs prod behavior; use `build.minify: false` to diagnose | +| `vitest@4.1.0-beta.6` incompatible with test suite | **Low** | **Medium** — blocks unit test validation | Pin to `4.0.18` + override vite peer if needed | +| `@vitejs/plugin-react@6.0.0-beta.0` breaks React HMR | **Low** | **Medium** — dev experience degraded | Rollback to 5.1.4 + Vite 7 if critical | +| Rolldown native binding fails on Alpine cross-build | **Low** | **High** — blocks Docker build entirely | Verify `@rolldown/binding-linux-x64-musl` resolves; fall back to non-cross-platform build | +| Lightning CSS produces visual CSS regressions | **Low** | **Low** — cosmetic issues only | Visual diff E2E screenshots | +| Docker build fails after upgrades | **Low** | **Medium** — blocks CI/deployment | Test Docker build in PR CI | +| Playwright E2E failures from TS changes | **Very Low** | **High** — blocks merge | Run full E2E suite before merge | + +### Overall Risk: **MEDIUM-HIGH** + +- TypeScript 6.0 is well-characterized and Charon's tsconfig is well-aligned with the new defaults +- ESLint v10 is dependent on ecosystem readiness (plugin compatibility) +- **Vite 8 is the highest-risk change** — beta software with a complete bundler engine swap (Rollup→Rolldown). The saving grace is that all three upgrades are separate commits on the same branch, enabling surgical rollback of just the Vite 8 commit if needed + +--- + +## Acceptance Criteria + +### PR-1 (TypeScript 6.0) + +- [ ] `typescript` upgraded to `^6.0.0` in root and frontend `package.json` +- [ ] `tsconfig.json` updated with `types: []` and simplified `lib` +- [ ] `tsc --noEmit` passes with zero errors +- [ ] `vitest run` passes all tests +- [ ] `vite build` produces correct output +- [ ] Docker build succeeds +- [ ] No new `ignoreDeprecations` usage (clean upgrade) + +### PR-2 (ESLint v10) + +- [ ] Plugin compatibility verified for all 18 plugins +- [ ] `eslint` and `@eslint/js` upgraded to `^10.0.0` +- [ ] Version cap (`<10.0.0`) removed from both packages +- [ ] `npm run lint` passes (new violations fixed) +- [ ] `lefthook.yml` pin note removed/updated +- [ ] All pre-commit hooks pass + +### PR-3 (Vite 8) + +- [ ] `vite` upgraded to `8.0.0-beta.18` in root and frontend `package.json` +- [ ] `@vitejs/plugin-react` upgraded to `6.0.0-beta.0` +- [ ] `vitest` upgraded to `4.1.0-beta.6` with matching `@vitest/*` packages +- [ ] `vite.config.ts` migrated: `rollupOptions` → `rolldownOptions`, `manualChunks` removed +- [ ] npm overrides verified: no broad overrides needed (or scoped overrides added with justification) +- [ ] Dockerfile: Rollup native skip flags removed +- [ ] `vite build` produces correct output with Rolldown bundler +- [ ] `vitest run` passes all unit tests +- [ ] `tsc --noEmit` still passes (unchanged from PR-1) +- [ ] Docker build succeeds with Rolldown on Alpine/musl +- [ ] Playwright E2E tests pass (all browsers) +- [ ] No CJS interop runtime errors (axios, react-hot-toast, etc.) +- [ ] CJS interop verified: axios API calls, react-hot-toast renders, react-hook-form submits work +- [ ] CSS output visually correct (Lightning CSS minification) +- [ ] `ARCHITECTURE.md` updated: Vite 8.0.0-beta.18, Vitest 4.1.0-beta.6, Playwright 1.58.2, Tailwind CSS 4.2.1, `vite.config.ts` filename +- [ ] Pre-commit hooks pass (`lefthook`) diff --git a/docs/plans/archive/telegram_test_remediation_spec.md b/docs/plans/archive/telegram_test_remediation_spec.md new file mode 100644 index 00000000..12f1e701 --- /dev/null +++ b/docs/plans/archive/telegram_test_remediation_spec.md @@ -0,0 +1,497 @@ +# Telegram Notification Provider — Test Failure Remediation Plan + +**Date:** 2026-03-11 +**Author:** Planning Agent +**Status:** Remediation Required — All security scans pass, test failures block merge +**Previous Plan:** Archived as `docs/plans/telegram_implementation_spec.md` + +--- + +## 1. Introduction + +The Telegram notification provider feature is functionally complete with passing security scans and coverage gates. However, **56 E2E test failures** and **2 frontend unit test failures** block the PR merge. This plan identifies root causes, categorises each failure set, and provides specific remediation steps. + +### Failure Summary + +| Spec File | Failures | Browsers | Unique Est. | Category | +|---|---|---|---|---| +| `notifications.spec.ts` | 48 | 3 | ~16 | **Our change** | +| `notifications-payload.spec.ts` | 18 | 3 | ~6 | **Our change** | +| `telegram-notification-provider.spec.ts` | 4 | 1–3 | ~2 | **Our change** | +| `encryption-management.spec.ts` | 20 | 3 | ~7 | Pre-existing | +| `auth-middleware-cascade.spec.ts` | 18 | 3 | 6 | Pre-existing | +| `Notifications.test.tsx` (unit) | 2 | — | 2 | **Our change** | + +CI retries: 2 per test (`playwright.config.js` L144). Failure counts above represent unique test failures × browser projects. + +--- + +## 2. Root Cause Analysis + +### Root Cause A: `isNew` Guard on Test Button (CRITICAL — Causes ~80% of failures) + +**What changed:** The Telegram feature added a guard in `Notifications.tsx` (L117-124) that blocks the "Test" button for new (unsaved) providers: + +```typescript +// Line 117-124: handleTest() early return guard +const handleTest = () => { + const formData = watch(); + const currentType = normalizeProviderType(formData.type); + if (!formData.id && currentType !== 'email') { + toast.error(t('notificationProviders.saveBeforeTesting')); + return; + } + testMutation.mutate({ ...formData, type: currentType } as Partial); +}; +``` + +And a `disabled` attribute on the test button at `Notifications.tsx` (L382): + +```typescript +// Line 382: Button disabled state +disabled={testMutation.isPending || (isNew && !isEmail)} +``` + +**Why it was added:** The backend `Test` handler at `notification_provider_handler.go` (L333-336) requires a saved provider ID for all non-email types. For Gotify/Telegram, the server needs the stored token. For Discord/Webhook, the server still fetches the provider from DB. Without a saved provider, the backend returns `MISSING_PROVIDER_ID`. + +**Why it breaks tests:** Many existing E2E and unit tests click the test button from a **new (unsaved) provider form** using mocked endpoints. With the new guard: +1. The ` + + )}
+ {certificates.length === 0 ? ( - ) : ( - sortedCertificates.map((cert) => ( + sortedCertificates.map((cert) => { + const inUse = isInUse(cert, hosts) + const deletable = isDeletable(cert, hosts) + const isInUseDeletableCategory = inUse && (cert.provider === 'custom' || cert.provider === 'letsencrypt-staging' || cert.status === 'expired' || cert.status === 'expiring') + + return ( + {deletable && !inUse ? ( + + ) : isInUseDeletableCategory ? ( + + ) : ( + - )) + ) + }) )}
+ + handleSort('name')} className="px-6 py-3 cursor-pointer hover:text-white transition-colors" @@ -116,13 +237,47 @@ export default function CertificateList() {
+ No certificates found.
+ handleSelectRow(cert.id!)} + aria-label={t('certificates.selectCert', { name: cert.name || cert.domain })} + /> + + + + + + + + + {t('certificates.deleteInUse')} + + + {cert.name || '-'} {cert.domain} @@ -142,42 +297,70 @@ export default function CertificateList() { - {cert.id && (cert.provider === 'custom' || cert.issuer?.toLowerCase().includes('staging')) && ( - + + + {t('certificates.deleteInUse')} + + + + ) + } - if (inUse) { - toast.error('Certificate cannot be deleted because it is in use by a proxy host') - return - } + if (deletable) { + return ( + + ) + } - // Allow deletion for custom/staging certs not in use (status check removed) - const message = cert.provider === 'custom' - ? 'Are you sure you want to delete this certificate? This will create a backup before deleting.' - : 'Delete this staging certificate? It will be regenerated on next request.' - if (confirm(message)) { - deleteMutation.mutate(cert.id!) - } - }} - className="text-red-400 hover:text-red-300 transition-colors" - title={cert.provider === 'custom' ? 'Delete Certificate' : 'Delete Staging Certificate'} - > - - - )} + return null + })()}
+ { + if (certToDelete?.id) { + deleteMutation.mutate(certToDelete.id) + } + }} + onCancel={() => setCertToDelete(null)} + isDeleting={deleteMutation.isPending} + /> + c.id && selectedIds.has(c.id))} + open={showBulkDeleteDialog} + onConfirm={() => bulkDeleteMutation.mutate(Array.from(selectedIds))} + onCancel={() => setShowBulkDeleteDialog(false)} + isDeleting={bulkDeleteMutation.isPending} + /> ) } diff --git a/frontend/src/components/ProxyHostForm.tsx b/frontend/src/components/ProxyHostForm.tsx index 19d7f246..0a77144f 100644 --- a/frontend/src/components/ProxyHostForm.tsx +++ b/frontend/src/components/ProxyHostForm.tsx @@ -1388,11 +1388,23 @@ export default function ProxyHostForm({ host, onSubmit, onCancel }: ProxyHostFor id="advanced-config" value={formData.advanced_config} onChange={e => setFormData(prev => ({ ...prev, advanced_config: e.target.value }))} - placeholder="Additional Caddy directives..." + placeholder='{"handler": "headers", "request": {"set": {"X-Custom": ["value"]}}}' rows={4} className="w-full bg-gray-900 border border-gray-700 rounded-lg px-4 py-2 text-white font-mono text-sm focus:outline-none focus:ring-2 focus:ring-blue-500" /> +

+ Accepts{' '} + + Caddy JSON + + {' '}format only — not Caddyfile syntax. +

{/* Enabled Toggle */} diff --git a/frontend/src/components/__tests__/AccessListForm.test.tsx b/frontend/src/components/__tests__/AccessListForm.test.tsx index 0e99e737..214bd780 100644 --- a/frontend/src/components/__tests__/AccessListForm.test.tsx +++ b/frontend/src/components/__tests__/AccessListForm.test.tsx @@ -18,7 +18,7 @@ vi.mock('react-hot-toast', () => ({ })); // Mock ResizeObserver for any layout dependent components -global.ResizeObserver = vi.fn().mockImplementation(() => ({ +globalThis.ResizeObserver = vi.fn().mockImplementation(() => ({ observe: vi.fn(), unobserve: vi.fn(), disconnect: vi.fn(), diff --git a/frontend/src/components/__tests__/CertificateList.test.tsx b/frontend/src/components/__tests__/CertificateList.test.tsx index e1dbcb49..ea63b910 100644 --- a/frontend/src/components/__tests__/CertificateList.test.tsx +++ b/frontend/src/components/__tests__/CertificateList.test.tsx @@ -1,12 +1,12 @@ import { QueryClientProvider } from '@tanstack/react-query' -import { render, screen, waitFor } from '@testing-library/react' +import { render, screen, waitFor, within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import { describe, it, expect, vi, beforeEach } from 'vitest' import { useCertificates } from '../../hooks/useCertificates' import { useProxyHosts } from '../../hooks/useProxyHosts' import { createTestQueryClient } from '../../test/createTestQueryClient' -import CertificateList from '../CertificateList' +import CertificateList, { isDeletable, isInUse } from '../CertificateList' import type { Certificate } from '../../api/certificates' import type { ProxyHost } from '../../api/proxyHosts' @@ -23,6 +23,13 @@ vi.mock('../../api/backups', () => ({ createBackup: vi.fn(async () => ({ filename: 'backup-cert' })), })) +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => key, + i18n: { language: 'en', changeLanguage: vi.fn() }, + }), +})) + vi.mock('../../hooks/useProxyHosts', () => ({ useProxyHosts: vi.fn(), })) @@ -42,6 +49,8 @@ const createCertificatesValue = (overrides: Partial screen .getAllByRole('row') .slice(1) - .map(row => row.querySelector('td')?.textContent?.trim() ?? '') + .map(row => row.querySelectorAll('td')[1]?.textContent?.trim() ?? '') beforeEach(() => { vi.clearAllMocks() @@ -107,58 +116,133 @@ beforeEach(() => { }) describe('CertificateList', () => { - it('deletes custom certificate when confirmed', async () => { - const confirmSpy = vi.spyOn(window, 'confirm').mockImplementation(() => true) + describe('isDeletable', () => { + const noHosts: ProxyHost[] = [] + const withHost = (certId: number): ProxyHost[] => [createProxyHost({ certificate_id: certId })] + + it('returns true for custom cert not in use', () => { + const cert: Certificate = { id: 1, name: 'C', domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + expect(isDeletable(cert, noHosts)).toBe(true) + }) + + it('returns true for staging cert not in use', () => { + const cert: Certificate = { id: 2, name: 'S', domain: 'd', issuer: 'X', expires_at: '', status: 'untrusted', provider: 'letsencrypt-staging' } + expect(isDeletable(cert, noHosts)).toBe(true) + }) + + it('returns true for expired LE cert not in use', () => { + const cert: Certificate = { id: 3, name: 'E', domain: 'd', issuer: 'LE', expires_at: '', status: 'expired', provider: 'letsencrypt' } + expect(isDeletable(cert, noHosts)).toBe(true) + }) + + it('returns false for valid LE cert not in use', () => { + const cert: Certificate = { id: 4, name: 'V', domain: 'd', issuer: 'LE', expires_at: '', status: 'valid', provider: 'letsencrypt' } + expect(isDeletable(cert, noHosts)).toBe(false) + }) + + it('returns false for cert in use', () => { + const cert: Certificate = { id: 5, name: 'U', domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + expect(isDeletable(cert, withHost(5))).toBe(false) + }) + + it('returns false for cert without id', () => { + const cert: Certificate = { domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + expect(isDeletable(cert, noHosts)).toBe(false) + }) + + it('returns true for expiring LE cert not in use', () => { + const cert: Certificate = { id: 7, name: 'Exp', domain: 'd', issuer: 'LE', expires_at: '', status: 'expiring', provider: 'letsencrypt' } + expect(isDeletable(cert, noHosts)).toBe(true) + }) + + it('returns false for expiring LE cert that is in use', () => { + const cert: Certificate = { id: 7, name: 'Exp', domain: 'd', issuer: 'LE', expires_at: '', status: 'expiring', provider: 'letsencrypt' } + expect(isDeletable(cert, withHost(7))).toBe(false) + }) + }) + + describe('isInUse', () => { + it('returns true when host references cert by certificate_id', () => { + const cert: Certificate = { id: 10, domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + expect(isInUse(cert, [createProxyHost({ certificate_id: 10 })])).toBe(true) + }) + + it('returns true when host references cert via certificate.id', () => { + const cert: Certificate = { id: 10, domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + const host = createProxyHost({ certificate_id: undefined, certificate: { id: 10, uuid: 'u', name: 'c', provider: 'custom', domains: 'd', expires_at: '' } }) + expect(isInUse(cert, [host])).toBe(true) + }) + + it('returns false when no host references cert', () => { + const cert: Certificate = { id: 99, domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + expect(isInUse(cert, [createProxyHost({ certificate_id: 3 })])).toBe(false) + }) + + it('returns false when cert.id is undefined even if a host has certificate_id undefined', () => { + const cert: Certificate = { domain: 'd', issuer: 'X', expires_at: '', status: 'valid', provider: 'custom' } + const host = createProxyHost({ certificate_id: undefined }) + expect(isInUse(cert, [host])).toBe(false) + }) + }) + + it('renders delete button for deletable certs', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + expect(within(customRow).getByRole('button', { name: 'certificates.deleteTitle' })).toBeInTheDocument() + }) + + it('renders delete button for expired LE cert not in use', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const expiredLeRow = rows.find(r => r.textContent?.includes('ExpiredLE'))! + expect(within(expiredLeRow).getByRole('button', { name: 'certificates.deleteTitle' })).toBeInTheDocument() + }) + + it('renders aria-disabled delete button for in-use cert', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const activeRow = rows.find(r => r.textContent?.includes('ActiveCert'))! + const btn = within(activeRow).getByRole('button', { name: 'certificates.deleteTitle' }) + expect(btn).toHaveAttribute('aria-disabled', 'true') + }) + + it('hides delete button for valid production LE cert', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const validLeRow = rows.find(r => r.textContent?.includes('ValidLE'))! + expect(within(validLeRow).queryByRole('button', { name: 'certificates.deleteTitle' })).not.toBeInTheDocument() + }) + + it('opens dialog and deletes cert on confirm', async () => { const { deleteCertificate } = await import('../../api/certificates') - const { createBackup } = await import('../../api/backups') - const { toast } = await import('../../utils/toast') const user = userEvent.setup() renderWithClient() const rows = await screen.findAllByRole('row') - const customRow = rows.find(r => r.querySelector('td')?.textContent?.includes('CustomCert')) as HTMLElement - expect(customRow).toBeTruthy() - const customBtn = customRow.querySelector('button[title="Delete Certificate"]') as HTMLButtonElement - expect(customBtn).toBeTruthy() - await user.click(customBtn) + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('button', { name: 'certificates.deleteTitle' })) - await waitFor(() => expect(createBackup).toHaveBeenCalled()) + const dialog = await screen.findByRole('dialog') + expect(dialog).toBeInTheDocument() + expect(within(dialog).getByText('certificates.deleteTitle')).toBeInTheDocument() + + await user.click(within(dialog).getByRole('button', { name: 'certificates.deleteButton' })) await waitFor(() => expect(deleteCertificate).toHaveBeenCalledWith(1)) - await waitFor(() => expect(toast.success).toHaveBeenCalledWith('Certificate deleted')) - confirmSpy.mockRestore() }) - it('deletes staging certificate when confirmed', async () => { - const confirmSpy = vi.spyOn(window, 'confirm').mockImplementation(() => true) - const { deleteCertificate } = await import('../../api/certificates') - const user = userEvent.setup() - - renderWithClient() - const stagingButtons = await screen.findAllByTitle('Delete Staging Certificate') - expect(stagingButtons.length).toBeGreaterThan(0) - await user.click(stagingButtons[0]) - - await waitFor(() => expect(deleteCertificate).toHaveBeenCalledWith(2)) - confirmSpy.mockRestore() - }) - - it('deletes valid custom certificate when not in use', async () => { - const confirmSpy = vi.spyOn(window, 'confirm').mockImplementation(() => true) - const { deleteCertificate } = await import('../../api/certificates') + it('does not call createBackup on delete (server handles it)', async () => { const { createBackup } = await import('../../api/backups') const user = userEvent.setup() renderWithClient() const rows = await screen.findAllByRole('row') - const unusedRow = rows.find(r => r.querySelector('td')?.textContent?.includes('UnusedValidCert')) as HTMLElement - expect(unusedRow).toBeTruthy() - const unusedButton = unusedRow.querySelector('button[title="Delete Certificate"]') as HTMLButtonElement - expect(unusedButton).toBeTruthy() - await user.click(unusedButton) + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('button', { name: 'certificates.deleteTitle' })) - await waitFor(() => expect(createBackup).toHaveBeenCalled()) - await waitFor(() => expect(deleteCertificate).toHaveBeenCalledWith(4)) - confirmSpy.mockRestore() + const dialog = await screen.findByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: 'certificates.deleteButton' })) + await waitFor(() => expect(createBackup).not.toHaveBeenCalled()) }) it('renders empty state when no certificates exist', async () => { @@ -173,6 +257,157 @@ describe('CertificateList', () => { expect(await screen.findByText('Failed to load certificates')).toBeInTheDocument() }) + it('shows error toast when delete mutation fails', async () => { + const { deleteCertificate } = await import('../../api/certificates') + const { toast } = await import('../../utils/toast') + vi.mocked(deleteCertificate).mockRejectedValueOnce(new Error('Network error')) + const user = userEvent.setup() + + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('button', { name: 'certificates.deleteTitle' })) + + const dialog = await screen.findByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: 'certificates.deleteButton' })) + + await waitFor(() => expect(toast.error).toHaveBeenCalledWith('certificates.deleteFailed: Network error')) + }) + + it('clicking disabled delete button for in-use cert does not open dialog', async () => { + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const activeRow = rows.find(r => r.textContent?.includes('ActiveCert'))! + const btn = within(activeRow).getByRole('button', { name: 'certificates.deleteTitle' }) + + await user.click(btn) + expect(screen.queryByRole('dialog')).not.toBeInTheDocument() + }) + + it('closes delete dialog when cancel is clicked', async () => { + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('button', { name: 'certificates.deleteTitle' })) + + const dialog = await screen.findByRole('dialog') + expect(dialog).toBeInTheDocument() + + await user.click(within(dialog).getByRole('button', { name: 'common.cancel' })) + await waitFor(() => expect(screen.queryByRole('dialog')).not.toBeInTheDocument()) + }) + + it('renders enabled checkboxes for deletable not-in-use certs (ids 1, 2, 4, 5)', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + for (const name of ['CustomCert', 'LE Staging', 'UnusedValidCert', 'ExpiredLE']) { + const row = rows.find(r => r.textContent?.includes(name))! + const checkbox = within(row).getByRole('checkbox') + expect(checkbox).toBeEnabled() + expect(checkbox).not.toHaveAttribute('aria-disabled', 'true') + } + }) + + it('renders disabled checkbox for in-use cert (id 3)', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const activeRow = rows.find(r => r.textContent?.includes('ActiveCert'))! + const checkboxes = within(activeRow).getAllByRole('checkbox') + const rowCheckbox = checkboxes[0] + expect(rowCheckbox).toBeDisabled() + expect(rowCheckbox).toHaveAttribute('aria-disabled', 'true') + }) + + it('renders no checkbox in valid production LE cert row (id 6)', async () => { + renderWithClient() + const rows = await screen.findAllByRole('row') + const validLeRow = rows.find(r => r.textContent?.includes('ValidLE'))! + expect(within(validLeRow).queryByRole('checkbox')).not.toBeInTheDocument() + }) + + it('selecting one cert makes the bulk action toolbar visible', async () => { + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('checkbox')) + expect(screen.getByRole('status')).toBeInTheDocument() + }) + + it('header select-all selects only ids 1, 2, 4, 5 (not in-use id 3)', async () => { + const user = userEvent.setup() + renderWithClient() + const headerRow = (await screen.findAllByRole('row'))[0] + const headerCheckbox = within(headerRow).getByRole('checkbox') + await user.click(headerCheckbox) + expect(screen.getByRole('status')).toBeInTheDocument() + const rows = screen.getAllByRole('row').slice(1) + const activeRow = rows.find(r => r.textContent?.includes('ActiveCert'))! + const activeCheckbox = within(activeRow).getByRole('checkbox') + expect(activeCheckbox).toBeDisabled() + expect(activeCheckbox).not.toBeChecked() + }) + + it('clicking the toolbar Delete button opens BulkDeleteCertificateDialog', async () => { + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + await user.click(within(customRow).getByRole('checkbox')) + await user.click(screen.getByRole('button', { name: /certificates\.bulkDeleteButton/i })) + expect(await screen.findByRole('dialog')).toBeInTheDocument() + }) + + it('confirming in the bulk dialog calls deleteCertificate for each selected ID', async () => { + const { deleteCertificate } = await import('../../api/certificates') + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + const stagingRow = rows.find(r => r.textContent?.includes('LE Staging'))! + await user.click(within(customRow).getByRole('checkbox')) + await user.click(within(stagingRow).getByRole('checkbox')) + await user.click(screen.getByRole('button', { name: /certificates\.bulkDeleteButton/i })) + const dialog = await screen.findByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: /certificates\.bulkDeleteButton/i })) + await waitFor(() => { + expect(deleteCertificate).toHaveBeenCalledWith(1) + expect(deleteCertificate).toHaveBeenCalledWith(2) + }) + }) + + it('shows partial failure toast when some bulk deletes fail', async () => { + const { deleteCertificate } = await import('../../api/certificates') + const { toast } = await import('../../utils/toast') + vi.mocked(deleteCertificate).mockImplementation(async (id: number) => { + if (id === 2) throw new Error('network error') + }) + const user = userEvent.setup() + renderWithClient() + const rows = await screen.findAllByRole('row') + const customRow = rows.find(r => r.textContent?.includes('CustomCert'))! + const stagingRow = rows.find(r => r.textContent?.includes('LE Staging'))! + await user.click(within(customRow).getByRole('checkbox')) + await user.click(within(stagingRow).getByRole('checkbox')) + await user.click(screen.getByRole('button', { name: /certificates\.bulkDeleteButton/i })) + const dialog = await screen.findByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: /certificates\.bulkDeleteButton/i })) + await waitFor(() => expect(toast.error).toHaveBeenCalledWith('certificates.bulkDeletePartial')) + }) + + it('clicking header checkbox twice deselects all and hides the bulk action toolbar', async () => { + const user = userEvent.setup() + renderWithClient() + const headerRow = (await screen.findAllByRole('row'))[0] + const headerCheckbox = within(headerRow).getByRole('checkbox') + await user.click(headerCheckbox) + expect(screen.getByRole('status')).toBeInTheDocument() + await user.click(headerCheckbox) + await waitFor(() => expect(screen.queryByRole('status')).not.toBeInTheDocument()) + }) + it('sorts certificates by name and expiry when headers are clicked', async () => { const certificates: Certificate[] = [ { id: 10, name: 'Zulu', domain: 'z.example.com', issuer: 'Custom CA', expires_at: '2026-03-01T00:00:00Z', status: 'valid', provider: 'custom' }, diff --git a/frontend/src/components/__tests__/ProxyHostForm.test.tsx b/frontend/src/components/__tests__/ProxyHostForm.test.tsx index 665e384b..5117e514 100644 --- a/frontend/src/components/__tests__/ProxyHostForm.test.tsx +++ b/frontend/src/components/__tests__/ProxyHostForm.test.tsx @@ -1311,7 +1311,7 @@ describe('ProxyHostForm', () => { await userEvent.type(screen.getByLabelText(/^Host/), '192.168.1.100') await userEvent.type(screen.getByLabelText(/^Port/), '80') - const advancedConfigField = screen.getByPlaceholderText('Additional Caddy directives...') + const advancedConfigField = screen.getByRole('textbox', { name: /Advanced Caddy Config/i }) await userEvent.type(advancedConfigField, 'header /api/* X-Custom-Header "test"') await userEvent.click(screen.getByText('Save')) diff --git a/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx b/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx index a46c5d6a..289e18b2 100644 --- a/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx +++ b/frontend/src/components/__tests__/SecurityNotificationSettingsModal.test.tsx @@ -86,7 +86,7 @@ describe('Security Notification Settings on Notifications page', () => { await user.click(await screen.findByTestId('add-provider-btn')); const typeSelect = screen.getByTestId('provider-type') as HTMLSelectElement; - expect(Array.from(typeSelect.options).map((option) => option.value)).toEqual(['discord', 'gotify', 'webhook', 'email', 'telegram']); + expect(Array.from(typeSelect.options).map((option) => option.value)).toEqual(['discord', 'gotify', 'webhook', 'email', 'telegram', 'slack', 'pushover', 'ntfy']); expect(typeSelect.value).toBe('discord'); const webhookInput = screen.getByTestId('provider-url') as HTMLInputElement; diff --git a/frontend/src/components/dialogs/BulkDeleteCertificateDialog.tsx b/frontend/src/components/dialogs/BulkDeleteCertificateDialog.tsx new file mode 100644 index 00000000..17f867ac --- /dev/null +++ b/frontend/src/components/dialogs/BulkDeleteCertificateDialog.tsx @@ -0,0 +1,88 @@ +import { AlertTriangle } from 'lucide-react' +import { useTranslation } from 'react-i18next' + +import { Button } from '../ui/Button' +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from '../ui/Dialog' + +import type { Certificate } from '../../api/certificates' + +interface BulkDeleteCertificateDialogProps { + certificates: Certificate[] + open: boolean + onConfirm: () => void + onCancel: () => void + isDeleting: boolean +} + +function providerLabel(cert: Certificate, t: (key: string) => string): string { + if (cert.provider === 'letsencrypt-staging') return t('certificates.providerStaging') + if (cert.provider === 'custom') return t('certificates.providerCustom') + if (cert.status === 'expired') return t('certificates.providerExpiredLE') + if (cert.status === 'expiring') return t('certificates.providerExpiringLE') + return cert.provider +} + +export default function BulkDeleteCertificateDialog({ + certificates, + open, + onConfirm, + onCancel, + isDeleting, +}: BulkDeleteCertificateDialogProps) { + const { t } = useTranslation() + + if (certificates.length === 0) return null + + return ( + { if (!isOpen) onCancel() }}> + + + {t('certificates.bulkDeleteTitle', { count: certificates.length })} + + {t('certificates.bulkDeleteDescription', { count: certificates.length })} + + + +
+
+ +

+ {t('certificates.bulkDeleteConfirm')} +

+
+ +
    + {certificates.map((cert) => ( +
  • + {cert.name || cert.domain} + {providerLabel(cert, t)} +
  • + ))} +
+
+ + + + + +
+
+ ) +} diff --git a/frontend/src/components/dialogs/DeleteCertificateDialog.tsx b/frontend/src/components/dialogs/DeleteCertificateDialog.tsx new file mode 100644 index 00000000..68491eb6 --- /dev/null +++ b/frontend/src/components/dialogs/DeleteCertificateDialog.tsx @@ -0,0 +1,81 @@ +import { AlertTriangle } from 'lucide-react' +import { useTranslation } from 'react-i18next' + +import { Button } from '../ui/Button' +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from '../ui/Dialog' + +import type { Certificate } from '../../api/certificates' + +interface DeleteCertificateDialogProps { + certificate: Certificate | null + open: boolean + onConfirm: () => void + onCancel: () => void + isDeleting: boolean +} + +function getWarningKey(cert: Certificate): string { + if (cert.status === 'expired') return 'certificates.deleteConfirmExpired' + if (cert.status === 'expiring') return 'certificates.deleteConfirmExpiring' + if (cert.provider === 'letsencrypt-staging') return 'certificates.deleteConfirmStaging' + return 'certificates.deleteConfirmCustom' +} + +export default function DeleteCertificateDialog({ + certificate, + open, + onConfirm, + onCancel, + isDeleting, +}: DeleteCertificateDialogProps) { + const { t } = useTranslation() + + if (!certificate) return null + + return ( + { if (!isOpen) onCancel() }}> + + + {t('certificates.deleteTitle')} + + {certificate.name || certificate.domain} + + + +
+
+ +

+ {t(getWarningKey(certificate))} +

+
+ +
+
{t('certificates.domain')}
+
{certificate.domain}
+
{t('certificates.status')}
+
{certificate.status}
+
{t('certificates.provider')}
+
{certificate.provider}
+
+
+ + + + + +
+
+ ) +} diff --git a/frontend/src/components/dialogs/__tests__/BulkDeleteCertificateDialog.test.tsx b/frontend/src/components/dialogs/__tests__/BulkDeleteCertificateDialog.test.tsx new file mode 100644 index 00000000..535074f8 --- /dev/null +++ b/frontend/src/components/dialogs/__tests__/BulkDeleteCertificateDialog.test.tsx @@ -0,0 +1,136 @@ +import { render, screen, within } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { describe, it, expect, vi } from 'vitest' + +import BulkDeleteCertificateDialog from '../../dialogs/BulkDeleteCertificateDialog' + +import type { Certificate } from '../../../api/certificates' + +const makeCert = (overrides: Partial): Certificate => ({ + id: 1, + name: 'Test Cert', + domain: 'test.example.com', + issuer: 'Custom CA', + expires_at: '2026-01-01T00:00:00Z', + status: 'valid', + provider: 'custom', + ...overrides, +}) + +const certs: Certificate[] = [ + makeCert({ id: 1, name: 'Cert One', domain: 'one.example.com' }), + makeCert({ id: 2, name: 'Cert Two', domain: 'two.example.com', provider: 'letsencrypt-staging', status: 'untrusted' }), + makeCert({ id: 3, name: 'Cert Three', domain: 'three.example.com', provider: 'letsencrypt', status: 'expired' }), +] + +describe('BulkDeleteCertificateDialog', () => { + it('renders dialog with count in title when 3 certs supplied', () => { + render( + + ) + const dialog = screen.getByRole('dialog') + expect(within(dialog).getByRole('heading', { name: 'Delete 3 Certificate(s)' })).toBeInTheDocument() + }) + + it('lists each certificate name in the scrollable list', () => { + render( + + ) + expect(screen.getByText('Cert One')).toBeInTheDocument() + expect(screen.getByText('Cert Two')).toBeInTheDocument() + expect(screen.getByText('Cert Three')).toBeInTheDocument() + expect(screen.getByText('Custom')).toBeInTheDocument() + expect(screen.getByText('Staging')).toBeInTheDocument() + expect(screen.getByText('Expired LE')).toBeInTheDocument() + }) + + it('calls onConfirm when the Delete button is clicked', async () => { + const onConfirm = vi.fn() + const user = userEvent.setup() + render( + + ) + const dialog = screen.getByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: 'Delete 3 Certificate(s)' })) + expect(onConfirm).toHaveBeenCalled() + }) + + it('calls onCancel when the Cancel button is clicked', async () => { + const onCancel = vi.fn() + const user = userEvent.setup() + render( + + ) + const dialog = screen.getByRole('dialog') + await user.click(within(dialog).getByRole('button', { name: 'Cancel' })) + expect(onCancel).toHaveBeenCalled() + }) + + it('Delete button is loading/disabled when isDeleting is true', () => { + render( + + ) + const dialog = screen.getByRole('dialog') + const deleteBtn = within(dialog).getByRole('button', { name: 'Delete 3 Certificate(s)' }) + expect(deleteBtn).toBeDisabled() + const cancelBtn = within(dialog).getByRole('button', { name: 'Cancel' }) + expect(cancelBtn).toBeDisabled() + }) + + it('returns null when certificates array is empty', () => { + const { container } = render( + + ) + expect(container.innerHTML).toBe('') + }) + + it('renders "Expiring LE" label for a letsencrypt cert with status expiring', () => { + const expiringCert = makeCert({ id: 4, name: 'Expiring Cert', domain: 'expiring.example.com', provider: 'letsencrypt', status: 'expiring' }) + render( + + ) + expect(screen.getByText('Expiring LE')).toBeInTheDocument() + }) +}) diff --git a/frontend/src/components/dialogs/__tests__/DeleteCertificateDialog.test.tsx b/frontend/src/components/dialogs/__tests__/DeleteCertificateDialog.test.tsx new file mode 100644 index 00000000..c5998599 --- /dev/null +++ b/frontend/src/components/dialogs/__tests__/DeleteCertificateDialog.test.tsx @@ -0,0 +1,128 @@ +import { render, screen } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { describe, it, expect, vi } from 'vitest' + +import DeleteCertificateDialog from '../../dialogs/DeleteCertificateDialog' + +import type { Certificate } from '../../../api/certificates' + +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => key, + i18n: { language: 'en', changeLanguage: vi.fn() }, + }), +})) + +const baseCert: Certificate = { + id: 1, + name: 'Test Cert', + domain: 'test.example.com', + issuer: 'Custom CA', + expires_at: '2026-01-01T00:00:00Z', + status: 'valid', + provider: 'custom', +} + +describe('DeleteCertificateDialog', () => { + it('renders warning text for custom cert', () => { + render( + + ) + expect(screen.getByText('certificates.deleteConfirmCustom')).toBeInTheDocument() + expect(screen.getByText('certificates.deleteTitle')).toBeInTheDocument() + }) + + it('renders warning text for staging cert', () => { + const staging: Certificate = { ...baseCert, provider: 'letsencrypt-staging', status: 'untrusted' } + render( + + ) + expect(screen.getByText('certificates.deleteConfirmStaging')).toBeInTheDocument() + }) + + it('renders warning text for expired cert', () => { + const expired: Certificate = { ...baseCert, provider: 'letsencrypt', status: 'expired' } + render( + + ) + expect(screen.getByText('certificates.deleteConfirmExpired')).toBeInTheDocument() + }) + + it('calls onCancel when Cancel is clicked', async () => { + const onCancel = vi.fn() + const user = userEvent.setup() + render( + + ) + await user.click(screen.getByRole('button', { name: 'common.cancel' })) + expect(onCancel).toHaveBeenCalled() + }) + + it('calls onConfirm when Delete is clicked', async () => { + const onConfirm = vi.fn() + const user = userEvent.setup() + render( + + ) + await user.click(screen.getByRole('button', { name: 'certificates.deleteButton' })) + expect(onConfirm).toHaveBeenCalled() + }) + + it('renders nothing when certificate is null', () => { + const { container } = render( + + ) + expect(container.innerHTML).toBe('') + }) + + it('renders expired warning for expired staging cert (priority ordering)', () => { + const expiredStaging: Certificate = { ...baseCert, provider: 'letsencrypt-staging', status: 'expired' } + render( + + ) + expect(screen.getByText('certificates.deleteConfirmExpired')).toBeInTheDocument() + expect(screen.queryByText('certificates.deleteConfirmStaging')).not.toBeInTheDocument() + }) +}) diff --git a/frontend/src/locales/de/translation.json b/frontend/src/locales/de/translation.json index 780acece..6598fb7d 100644 --- a/frontend/src/locales/de/translation.json +++ b/frontend/src/locales/de/translation.json @@ -173,7 +173,32 @@ "uploadSuccess": "Zertifikat erfolgreich hochgeladen", "uploadFailed": "Fehler beim Hochladen des Zertifikats", "note": "Hinweis", - "noteText": "Sie können benutzerdefinierte Zertifikate und Staging-Zertifikate löschen. Produktions-Let's-Encrypt-Zertifikate werden automatisch erneuert und sollten nur beim Umgebungswechsel gelöscht werden." + "noteText": "Sie können benutzerdefinierte Zertifikate, Staging-Zertifikate sowie abgelaufene oder ablaufende Produktionszertifikate löschen, die keinem Proxy-Host zugeordnet sind. Aktive Produktionszertifikate werden von Caddy automatisch erneuert.", + "provider": "Provider", + "deleteTitle": "Delete Certificate", + "deleteConfirmCustom": "This will permanently delete this certificate. A backup will be created first.", + "deleteConfirmStaging": "This staging certificate will be removed. It will be regenerated on next request.", + "deleteConfirmExpired": "This expired certificate is no longer active and will be permanently removed.", + "deleteConfirmExpiring": "This certificate is expiring soon. It will be permanently removed and will not be auto-renewed.", + "deleteSuccess": "Certificate deleted", + "deleteFailed": "Failed to delete certificate", + "deleteInUse": "Cannot delete — certificate is attached to a proxy host", + "deleteButton": "Delete", + "bulkSelectAll": "Alle löschbaren Zertifikate auswählen", + "selectCert": "Zertifikat {{name}} auswählen", + "bulkSelectedCount": "{{count}} Zertifikat(e) ausgewählt", + "bulkDeleteTitle": "{{count}} Zertifikat(e) löschen", + "bulkDeleteDescription": "{{count}} Zertifikat(e) löschen", + "bulkDeleteConfirm": "Die folgenden Zertifikate werden dauerhaft gelöscht. Der Server erstellt vor jeder Löschung eine Sicherung.", + "bulkDeleteListAriaLabel": "Zu löschende Zertifikate", + "bulkDeleteButton": "{{count}} Zertifikat(e) löschen", + "bulkDeleteSuccess": "{{count}} Zertifikat(e) gelöscht", + "bulkDeletePartial": "{{deleted}} gelöscht, {{failed}} fehlgeschlagen", + "bulkDeleteFailed": "Zertifikate konnten nicht gelöscht werden", + "providerStaging": "Staging", + "providerCustom": "Benutzerdefiniert", + "providerExpiredLE": "Abgelaufen LE", + "providerExpiringLE": "Ablaufend LE" }, "auth": { "login": "Anmelden", @@ -240,6 +265,7 @@ "disabledDescription": "Intrusion Prevention System mit Community-Bedrohungsintelligenz", "processRunning": "Läuft (PID {{pid}})", "processStopped": "Prozess gestoppt", + "starting": "Startet...", "toggleTooltip": "CrowdSec-Schutz umschalten", "copyFailed": "Kopieren des API-Schlüssels fehlgeschlagen", "keyWarning": { @@ -406,7 +432,13 @@ "monitorDeleted": "Monitor gelöscht", "deleteConfirm": "Diesen Monitor löschen? Dies kann nicht rückgängig gemacht werden.", "pending": "PRÜFUNG...", - "pendingFirstCheck": "Warten auf erste Prüfung..." + "pendingFirstCheck": "Warten auf erste Prüfung...", + "urlPlaceholder": "https://example.com", + "urlPlaceholderHttp": "https://example.com", + "urlPlaceholderTcp": "192.168.1.1:8080", + "urlHelperHttp": "Vollständige URL einschließlich Schema eingeben (z.B. https://example.com)", + "urlHelperTcp": "Als Host:Port ohne Schema-Präfix eingeben (z.B. 192.168.1.1:8080 oder hostname:22)", + "invalidTcpFormat": "TCP-Monitore erfordern das Format Host:Port. Entfernen Sie das Schema-Präfix (z.B. 192.168.1.1:8080 statt tcp://192.168.1.1:8080)." }, "domains": { "title": "Domänen", @@ -499,7 +531,12 @@ "webhookUrl": "Webhook URL (Optional)", "webhookUrlHelp": "POST requests will be sent to this URL when security events occur.", "emailRecipients": "Email Recipients (Optional)", - "emailRecipientsHelp": "Comma-separated email addresses." + "emailRecipientsHelp": "Comma-separated email addresses.", + "ntfy": "Ntfy", + "ntfyTopicUrl": "Topic URL", + "ntfyAccessToken": "Access Token (optional)", + "ntfyAccessTokenPlaceholder": "Enter your Ntfy access token", + "ntfyAccessTokenHelp": "Your Ntfy access token for authenticated topics. Required for password-protected topics on self-hosted instances. The token is stored securely and separately." }, "users": { "title": "Benutzerverwaltung", diff --git a/frontend/src/locales/en/translation.json b/frontend/src/locales/en/translation.json index 83fc647b..8ab3c927 100644 --- a/frontend/src/locales/en/translation.json +++ b/frontend/src/locales/en/translation.json @@ -182,7 +182,32 @@ "uploadSuccess": "Certificate uploaded successfully", "uploadFailed": "Failed to upload certificate", "note": "Note", - "noteText": "You can delete custom certificates and staging certificates. Production Let's Encrypt certificates are automatically renewed and should not be deleted unless switching environments." + "noteText": "You can delete custom certificates, staging certificates, and expired or expiring production certificates that are not attached to any proxy host. Active production certificates are automatically renewed by Caddy.", + "provider": "Provider", + "deleteTitle": "Delete Certificate", + "deleteConfirmCustom": "This will permanently delete this certificate. A backup will be created first.", + "deleteConfirmStaging": "This staging certificate will be removed. It will be regenerated on next request.", + "deleteConfirmExpired": "This expired certificate is no longer active and will be permanently removed.", + "deleteConfirmExpiring": "This certificate is expiring soon. It will be permanently removed and will not be auto-renewed.", + "deleteSuccess": "Certificate deleted", + "deleteFailed": "Failed to delete certificate", + "deleteInUse": "Cannot delete — certificate is attached to a proxy host", + "deleteButton": "Delete", + "bulkSelectAll": "Select all deletable certificates", + "selectCert": "Select certificate {{name}}", + "bulkSelectedCount": "{{count}} certificate(s) selected", + "bulkDeleteTitle": "Delete {{count}} Certificate(s)", + "bulkDeleteDescription": "Delete {{count}} certificate(s)", + "bulkDeleteConfirm": "The following certificates will be permanently deleted. The server creates a backup before each removal.", + "bulkDeleteListAriaLabel": "Certificates to be deleted", + "bulkDeleteButton": "Delete {{count}} Certificate(s)", + "bulkDeleteSuccess": "{{count}} certificate(s) deleted", + "bulkDeletePartial": "{{deleted}} deleted, {{failed}} failed", + "bulkDeleteFailed": "Failed to delete certificates", + "providerStaging": "Staging", + "providerCustom": "Custom", + "providerExpiredLE": "Expired LE", + "providerExpiringLE": "Expiring LE" }, "auth": { "login": "Login", @@ -250,6 +275,7 @@ "disabledDescription": "Intrusion Prevention System powered by community threat intelligence", "processRunning": "Running (PID {{pid}})", "processStopped": "Process stopped", + "starting": "Starting...", "toggleTooltip": "Toggle CrowdSec protection", "bouncerApiKey": "Bouncer API Key", "keyCopied": "API key copied to clipboard", @@ -476,7 +502,12 @@ "monitorUrl": "URL", "monitorTypeHttp": "HTTP", "monitorTypeTcp": "TCP", - "urlPlaceholder": "https://example.com or tcp://host:port", + "urlPlaceholder": "https://example.com", + "urlPlaceholderHttp": "https://example.com", + "urlPlaceholderTcp": "192.168.1.1:8080", + "urlHelperHttp": "Enter the full URL including the scheme (e.g., https://example.com)", + "urlHelperTcp": "Enter as host:port with no scheme prefix (e.g., 192.168.1.1:8080 or hostname:22)", + "invalidTcpFormat": "TCP monitors require host:port format. Remove the scheme prefix (e.g., use 192.168.1.1:8080, not tcp://192.168.1.1:8080).", "pending": "CHECKING...", "pendingFirstCheck": "Waiting for first check..." }, @@ -586,7 +617,23 @@ "emailRecipientsHelp": "Comma-separated email addresses.", "recipients": "Recipients", "recipientsHelp": "Comma-separated email addresses (max 20)", - "emailSmtpNotice": "Email notifications are sent via the configured SMTP server. Ensure SMTP is configured in Settings \u2192 SMTP." + "emailSmtpNotice": "Email notifications are sent via the configured SMTP server. Ensure SMTP is configured in Settings \u2192 SMTP.", + "slack": "Slack", + "slackWebhookUrl": "Webhook URL", + "slackWebhookUrlPlaceholder": "https://hooks.slack.com/services/T.../B.../xxx", + "slackChannelName": "Channel Name (optional)", + "slackChannelNameHelp": "Display name for the channel. The actual channel is determined by the webhook configuration.", + "pushover": "Pushover", + "pushoverApiToken": "API Token (Application)", + "pushoverApiTokenPlaceholder": "Enter your Pushover Application API Token", + "pushoverUserKey": "User Key", + "pushoverUserKeyPlaceholder": "uQiRzpo4DXghDmr9QzzfQu27cmVRsG", + "pushoverUserKeyHelp": "Your Pushover user or group key. The API token is stored securely and separately.", + "ntfy": "Ntfy", + "ntfyTopicUrl": "Topic URL", + "ntfyAccessToken": "Access Token (optional)", + "ntfyAccessTokenPlaceholder": "Enter your Ntfy access token", + "ntfyAccessTokenHelp": "Your Ntfy access token for authenticated topics. Required for password-protected topics on self-hosted instances. The token is stored securely and separately." }, "users": { "title": "User Management", diff --git a/frontend/src/locales/es/translation.json b/frontend/src/locales/es/translation.json index 61093f21..a6e5dd88 100644 --- a/frontend/src/locales/es/translation.json +++ b/frontend/src/locales/es/translation.json @@ -173,7 +173,32 @@ "uploadSuccess": "Certificado subido exitosamente", "uploadFailed": "Error al subir el certificado", "note": "Nota", - "noteText": "Puedes eliminar certificados personalizados y certificados de prueba. Los certificados de Let's Encrypt de producción se renuevan automáticamente y no deben eliminarse a menos que cambies de entorno." + "noteText": "Puedes eliminar certificados personalizados, certificados de staging y certificados de producción vencidos o por vencer que no estén vinculados a ningún host proxy. Los certificados de producción activos se renuevan automáticamente mediante Caddy.", + "provider": "Provider", + "deleteTitle": "Delete Certificate", + "deleteConfirmCustom": "This will permanently delete this certificate. A backup will be created first.", + "deleteConfirmStaging": "This staging certificate will be removed. It will be regenerated on next request.", + "deleteConfirmExpired": "This expired certificate is no longer active and will be permanently removed.", + "deleteConfirmExpiring": "This certificate is expiring soon. It will be permanently removed and will not be auto-renewed.", + "deleteSuccess": "Certificate deleted", + "deleteFailed": "Failed to delete certificate", + "deleteInUse": "Cannot delete — certificate is attached to a proxy host", + "deleteButton": "Delete", + "bulkSelectAll": "Seleccionar todos los certificados eliminables", + "selectCert": "Seleccionar certificado {{name}}", + "bulkSelectedCount": "{{count}} certificado(s) seleccionado(s)", + "bulkDeleteTitle": "Eliminar {{count}} Certificado(s)", + "bulkDeleteDescription": "Eliminar {{count}} certificado(s)", + "bulkDeleteConfirm": "Los siguientes certificados se eliminarán permanentemente. El servidor crea una copia de seguridad antes de cada eliminación.", + "bulkDeleteListAriaLabel": "Certificados a eliminar", + "bulkDeleteButton": "Eliminar {{count}} Certificado(s)", + "bulkDeleteSuccess": "{{count}} certificado(s) eliminado(s)", + "bulkDeletePartial": "{{deleted}} eliminado(s), {{failed}} fallido(s)", + "bulkDeleteFailed": "No se pudieron eliminar los certificados", + "providerStaging": "Pruebas", + "providerCustom": "Personalizado", + "providerExpiredLE": "LE Expirado", + "providerExpiringLE": "LE Por expirar" }, "auth": { "login": "Iniciar Sesión", @@ -240,6 +265,7 @@ "disabledDescription": "Sistema de Prevención de Intrusiones impulsado por inteligencia de amenazas comunitaria", "processRunning": "Ejecutando (PID {{pid}})", "processStopped": "Proceso detenido", + "starting": "Iniciando...", "toggleTooltip": "Alternar protección CrowdSec", "copyFailed": "Error al copiar la clave API", "keyWarning": { @@ -406,7 +432,13 @@ "monitorDeleted": "Monitor eliminado", "deleteConfirm": "¿Eliminar este monitor? Esto no se puede deshacer.", "pending": "VERIFICANDO...", - "pendingFirstCheck": "Esperando primera verificación..." + "pendingFirstCheck": "Esperando primera verificación...", + "urlPlaceholder": "https://example.com", + "urlPlaceholderHttp": "https://example.com", + "urlPlaceholderTcp": "192.168.1.1:8080", + "urlHelperHttp": "Ingresa la URL completa incluyendo el esquema (ej. https://example.com)", + "urlHelperTcp": "Ingresa como host:puerto sin prefijo de esquema (ej. 192.168.1.1:8080 o nombre-de-host:22)", + "invalidTcpFormat": "Los monitores TCP requieren el formato host:puerto. Elimina el prefijo de esquema (ej. usa 192.168.1.1:8080, no tcp://192.168.1.1:8080)." }, "domains": { "title": "Dominios", @@ -499,7 +531,12 @@ "webhookUrl": "Webhook URL (Optional)", "webhookUrlHelp": "POST requests will be sent to this URL when security events occur.", "emailRecipients": "Email Recipients (Optional)", - "emailRecipientsHelp": "Comma-separated email addresses." + "emailRecipientsHelp": "Comma-separated email addresses.", + "ntfy": "Ntfy", + "ntfyTopicUrl": "Topic URL", + "ntfyAccessToken": "Access Token (optional)", + "ntfyAccessTokenPlaceholder": "Enter your Ntfy access token", + "ntfyAccessTokenHelp": "Your Ntfy access token for authenticated topics. Required for password-protected topics on self-hosted instances. The token is stored securely and separately." }, "users": { "title": "Gestión de Usuarios", diff --git a/frontend/src/locales/fr/translation.json b/frontend/src/locales/fr/translation.json index 0c1c302a..7c0b5d76 100644 --- a/frontend/src/locales/fr/translation.json +++ b/frontend/src/locales/fr/translation.json @@ -173,7 +173,32 @@ "uploadSuccess": "Certificat téléversé avec succès", "uploadFailed": "Échec du téléversement du certificat", "note": "Note", - "noteText": "Vous pouvez supprimer les certificats personnalisés et les certificats de test. Les certificats Let's Encrypt de production sont renouvelés automatiquement et ne doivent pas être supprimés sauf en cas de changement d'environnement." + "noteText": "Vous pouvez supprimer les certificats personnalisés, les certificats de staging et les certificats de production expirés ou arrivant à expiration qui ne sont associés à aucun hôte proxy. Les certificats de production actifs sont renouvelés automatiquement par Caddy.", + "provider": "Provider", + "deleteTitle": "Delete Certificate", + "deleteConfirmCustom": "This will permanently delete this certificate. A backup will be created first.", + "deleteConfirmStaging": "This staging certificate will be removed. It will be regenerated on next request.", + "deleteConfirmExpired": "This expired certificate is no longer active and will be permanently removed.", + "deleteConfirmExpiring": "This certificate is expiring soon. It will be permanently removed and will not be auto-renewed.", + "deleteSuccess": "Certificate deleted", + "deleteFailed": "Failed to delete certificate", + "deleteInUse": "Cannot delete — certificate is attached to a proxy host", + "deleteButton": "Delete", + "bulkSelectAll": "Sélectionner tous les certificats supprimables", + "selectCert": "Sélectionner le certificat {{name}}", + "bulkSelectedCount": "{{count}} certificat(s) sélectionné(s)", + "bulkDeleteTitle": "Supprimer {{count}} Certificat(s)", + "bulkDeleteDescription": "Supprimer {{count}} certificat(s)", + "bulkDeleteConfirm": "Les certificats suivants seront définitivement supprimés. Le serveur crée une sauvegarde avant chaque suppression.", + "bulkDeleteListAriaLabel": "Certificats à supprimer", + "bulkDeleteButton": "Supprimer {{count}} Certificat(s)", + "bulkDeleteSuccess": "{{count}} certificat(s) supprimé(s)", + "bulkDeletePartial": "{{deleted}} supprimé(s), {{failed}} échoué(s)", + "bulkDeleteFailed": "Impossible de supprimer les certificats", + "providerStaging": "Test", + "providerCustom": "Personnalisé", + "providerExpiredLE": "LE Expiré", + "providerExpiringLE": "LE Expirant" }, "auth": { "login": "Connexion", @@ -240,6 +265,7 @@ "disabledDescription": "Système de Prévention des Intrusions alimenté par le renseignement communautaire sur les menaces", "processRunning": "En cours d'exécution (PID {{pid}})", "processStopped": "Processus arrêté", + "starting": "Démarrage...", "toggleTooltip": "Basculer la protection CrowdSec", "copyFailed": "Échec de la copie de la clé API", "keyWarning": { @@ -406,7 +432,13 @@ "monitorDeleted": "Moniteur supprimé", "deleteConfirm": "Supprimer ce moniteur? Cette action est irréversible.", "pending": "VÉRIFICATION...", - "pendingFirstCheck": "En attente de la première vérification..." + "pendingFirstCheck": "En attente de la première vérification...", + "urlPlaceholder": "https://example.com", + "urlPlaceholderHttp": "https://example.com", + "urlPlaceholderTcp": "192.168.1.1:8080", + "urlHelperHttp": "Saisissez l'URL complète avec le schéma (ex. https://example.com)", + "urlHelperTcp": "Saisissez sous la forme hôte:port sans préfixe de schéma (ex. 192.168.1.1:8080 ou nom-d-hôte:22)", + "invalidTcpFormat": "Les moniteurs TCP nécessitent le format hôte:port. Supprimez le préfixe de schéma (ex. 192.168.1.1:8080 et non tcp://192.168.1.1:8080)." }, "domains": { "title": "Domaines", @@ -499,7 +531,12 @@ "webhookUrl": "Webhook URL (Optional)", "webhookUrlHelp": "POST requests will be sent to this URL when security events occur.", "emailRecipients": "Email Recipients (Optional)", - "emailRecipientsHelp": "Comma-separated email addresses." + "emailRecipientsHelp": "Comma-separated email addresses.", + "ntfy": "Ntfy", + "ntfyTopicUrl": "Topic URL", + "ntfyAccessToken": "Access Token (optional)", + "ntfyAccessTokenPlaceholder": "Enter your Ntfy access token", + "ntfyAccessTokenHelp": "Your Ntfy access token for authenticated topics. Required for password-protected topics on self-hosted instances. The token is stored securely and separately." }, "users": { "title": "Gestion des Utilisateurs", diff --git a/frontend/src/locales/zh/translation.json b/frontend/src/locales/zh/translation.json index 4926cbb6..a7a5808b 100644 --- a/frontend/src/locales/zh/translation.json +++ b/frontend/src/locales/zh/translation.json @@ -173,7 +173,32 @@ "uploadSuccess": "证书上传成功", "uploadFailed": "证书上传失败", "note": "注意", - "noteText": "您可以删除自定义证书和测试证书。生产环境的Let's Encrypt证书会自动续期,除非切换环境否则不应删除。" + "noteText": "您可以删除未附加到任何代理主机的自定义证书、暂存证书以及已过期或即将过期的生产证书。活跃的生产证书由 Caddy 自动续期。", + "provider": "Provider", + "deleteTitle": "Delete Certificate", + "deleteConfirmCustom": "This will permanently delete this certificate. A backup will be created first.", + "deleteConfirmStaging": "This staging certificate will be removed. It will be regenerated on next request.", + "deleteConfirmExpired": "This expired certificate is no longer active and will be permanently removed.", + "deleteConfirmExpiring": "This certificate is expiring soon. It will be permanently removed and will not be auto-renewed.", + "deleteSuccess": "Certificate deleted", + "deleteFailed": "Failed to delete certificate", + "deleteInUse": "Cannot delete — certificate is attached to a proxy host", + "deleteButton": "Delete", + "bulkSelectAll": "选择所有可删除的证书", + "selectCert": "选择证书 {{name}}", + "bulkSelectedCount": "已选择 {{count}} 个证书", + "bulkDeleteTitle": "删除 {{count}} 个证书", + "bulkDeleteDescription": "删除 {{count}} 个证书", + "bulkDeleteConfirm": "以下证书将被永久删除。服务器在每次删除前会创建备份。", + "bulkDeleteListAriaLabel": "将被删除的证书", + "bulkDeleteButton": "删除 {{count}} 个证书", + "bulkDeleteSuccess": "已删除 {{count}} 个证书", + "bulkDeletePartial": "已删除 {{deleted}} 个,{{failed}} 个失败", + "bulkDeleteFailed": "证书删除失败", + "providerStaging": "测试", + "providerCustom": "自定义", + "providerExpiredLE": "已过期 LE", + "providerExpiringLE": "即将过期 LE" }, "auth": { "login": "登录", @@ -240,6 +265,7 @@ "disabledDescription": "由社区威胁情报驱动的入侵防御系统", "processRunning": "运行中 (PID {{pid}})", "processStopped": "进程已停止", + "starting": "启动中...", "toggleTooltip": "切换 CrowdSec 保护", "copyFailed": "复制API密钥失败", "keyWarning": { @@ -406,7 +432,13 @@ "monitorDeleted": "监控器已删除", "deleteConfirm": "删除此监控器?此操作无法撤销。", "pending": "检查中...", - "pendingFirstCheck": "等待首次检查..." + "pendingFirstCheck": "等待首次检查...", + "urlPlaceholder": "https://example.com", + "urlPlaceholderHttp": "https://example.com", + "urlPlaceholderTcp": "192.168.1.1:8080", + "urlHelperHttp": "请输入包含协议的完整 URL(例如 https://example.com)", + "urlHelperTcp": "请输入 主机:端口 格式,不含协议前缀(例如 192.168.1.1:8080 或 hostname:22)", + "invalidTcpFormat": "TCP 监控器需要 主机:端口 格式。请删除协议前缀(例如使用 192.168.1.1:8080,而非 tcp://192.168.1.1:8080)。" }, "domains": { "title": "域名", @@ -499,7 +531,12 @@ "webhookUrl": "Webhook URL (Optional)", "webhookUrlHelp": "POST requests will be sent to this URL when security events occur.", "emailRecipients": "Email Recipients (Optional)", - "emailRecipientsHelp": "Comma-separated email addresses." + "emailRecipientsHelp": "Comma-separated email addresses.", + "ntfy": "Ntfy", + "ntfyTopicUrl": "Topic URL", + "ntfyAccessToken": "Access Token (optional)", + "ntfyAccessTokenPlaceholder": "Enter your Ntfy access token", + "ntfyAccessTokenHelp": "Your Ntfy access token for authenticated topics. Required for password-protected topics on self-hosted instances. The token is stored securely and separately." }, "users": { "title": "用户管理", diff --git a/frontend/src/pages/CrowdSecConfig.tsx b/frontend/src/pages/CrowdSecConfig.tsx index bca3888d..003f218e 100644 --- a/frontend/src/pages/CrowdSecConfig.tsx +++ b/frontend/src/pages/CrowdSecConfig.tsx @@ -40,6 +40,22 @@ export default function CrowdSecConfig() { const [validationError, setValidationError] = useState(null) const [applyInfo, setApplyInfo] = useState<{ status?: string; backup?: string; reloadHint?: boolean; usedCscli?: boolean; cacheKey?: string } | null>(null) const queryClient = useQueryClient() + // Read the "CrowdSec is starting" signal broadcast by Security.tsx via the + // QueryClient cache. No HTTP call is made; this is pure in-memory coordination. + const { data: crowdsecStartingCache } = useQuery<{ isStarting: boolean; startedAt?: number }>({ + queryKey: ['crowdsec-starting'], + queryFn: () => ({ isStarting: false, startedAt: 0 }), + staleTime: Infinity, + gcTime: Infinity, + }) + + // isStartingUp is true only while the mutation is genuinely running. + // The 90-second cap guards against stale cache if Security.tsx onSuccess/onError + // never fired (e.g., browser tab was closed mid-mutation). + const isStartingUp = + (crowdsecStartingCache?.isStarting === true) && + Date.now() - (crowdsecStartingCache.startedAt ?? 0) < 90_000 + const isLocalMode = !!status && status.crowdsec?.mode !== 'disabled' // Note: CrowdSec mode is now controlled via Security Dashboard toggle const { data: featureFlags } = useQuery({ queryKey: ['feature-flags'], queryFn: getFeatureFlags }) @@ -579,7 +595,7 @@ export default function CrowdSecConfig() { )} {/* Yellow warning: Process running but LAPI initializing */} - {lapiStatusQuery.data && lapiStatusQuery.data.running && !lapiStatusQuery.data.lapi_ready && initialCheckComplete && ( + {lapiStatusQuery.data && lapiStatusQuery.data.running && !lapiStatusQuery.data.lapi_ready && initialCheckComplete && !isStartingUp && (
@@ -605,7 +621,7 @@ export default function CrowdSecConfig() { )} {/* Red warning: Process not running at all */} - {lapiStatusQuery.data && !lapiStatusQuery.data.running && initialCheckComplete && ( + {lapiStatusQuery.data && !lapiStatusQuery.data.running && initialCheckComplete && !isStartingUp && (
diff --git a/frontend/src/pages/Notifications.tsx b/frontend/src/pages/Notifications.tsx index bf91574b..09ecb97d 100644 --- a/frontend/src/pages/Notifications.tsx +++ b/frontend/src/pages/Notifications.tsx @@ -23,7 +23,7 @@ const isSupportedProviderType = (providerType: string | undefined): providerType const supportsJSONTemplates = (providerType: string | undefined): boolean => { if (!providerType) return false; const t = providerType.toLowerCase(); - return t === 'discord' || t === 'gotify' || t === 'webhook' || t === 'telegram'; + return t === 'discord' || t === 'gotify' || t === 'webhook' || t === 'telegram' || t === 'slack' || t === 'pushover' || t === 'ntfy'; }; const isUnsupportedProviderType = (providerType: string | undefined): boolean => !isSupportedProviderType(providerType); @@ -43,7 +43,7 @@ const normalizeProviderPayloadForSubmit = (data: Partial): type, }; - if (type === 'gotify' || type === 'telegram') { + if (type === 'gotify' || type === 'telegram' || type === 'slack' || type === 'pushover' || type === 'ntfy') { const normalizedToken = typeof payload.gotify_token === 'string' ? payload.gotify_token.trim() : ''; if (normalizedToken.length > 0) { @@ -147,9 +147,12 @@ const ProviderForm: FC<{ const isGotify = type === 'gotify'; const isTelegram = type === 'telegram'; const isEmail = type === 'email'; + const isSlack = type === 'slack'; + const isPushover = type === 'pushover'; + const isNtfy = type === 'ntfy'; const isNew = !watch('id'); useEffect(() => { - if (type !== 'gotify' && type !== 'telegram') { + if (type !== 'gotify' && type !== 'telegram' && type !== 'slack' && type !== 'pushover' && type !== 'ntfy') { setValue('gotify_token', '', { shouldDirty: false, shouldTouch: false }); } }, [type, setValue]); @@ -205,12 +208,25 @@ const ProviderForm: FC<{ + + +
{isEmail && (

@@ -220,11 +236,11 @@ const ProviderForm: FC<{ )} - {(isGotify || isTelegram) && ( + {(isGotify || isTelegram || isSlack || isPushover || isNtfy) && (

diff --git a/frontend/src/pages/Security.tsx b/frontend/src/pages/Security.tsx index e7b7e09a..d88884b6 100644 --- a/frontend/src/pages/Security.tsx +++ b/frontend/src/pages/Security.tsx @@ -197,8 +197,16 @@ export default function Security() { return { enabled: false } } }, - // NO optimistic updates - wait for actual confirmation + // No optimistic backend/status invalidation — server state is not updated until + // onSuccess. The UI does derive checked state from mutation variables while + // isPending to reflect the user's intent immediately (see crowdsecChecked). + onMutate: async (enabled: boolean) => { + if (enabled) { + queryClient.setQueryData(['crowdsec-starting'], { isStarting: true, startedAt: Date.now() }) + } + }, onError: (err: unknown, enabled: boolean) => { + queryClient.setQueryData(['crowdsec-starting'], { isStarting: false }) const msg = err instanceof Error ? err.message : String(err) toast.error(enabled ? `Failed to start CrowdSec: ${msg}` : `Failed to stop CrowdSec: ${msg}`) // Force refresh status from backend to ensure UI matches reality @@ -206,6 +214,7 @@ export default function Security() { fetchCrowdsecStatus() }, onSuccess: async (result: { lapi_ready?: boolean; enabled?: boolean } | boolean) => { + queryClient.setQueryData(['crowdsec-starting'], { isStarting: false }) // Refresh all related queries to ensure consistency await Promise.all([ queryClient.invalidateQueries({ queryKey: ['security-status'] }), @@ -264,6 +273,13 @@ export default function Security() { ) } + // During the crowdsecPowerMutation, use the mutation's argument as the authoritative + // checked state. Neither crowdsecStatus (local, stale) nor status.crowdsec.enabled + // (server, not yet invalidated) reflects the user's intent until onSuccess fires. + const crowdsecChecked = crowdsecPowerMutation.isPending + ? (crowdsecPowerMutation.variables ?? (crowdsecStatus?.running ?? status.crowdsec.enabled)) + : (crowdsecStatus?.running ?? status.crowdsec.enabled) + const cerberusDisabled = !status.cerberus?.enabled const crowdsecToggleDisabled = cerberusDisabled || crowdsecPowerMutation.isPending const crowdsecControlsDisabled = cerberusDisabled || crowdsecPowerMutation.isPending @@ -351,8 +367,8 @@ export default function Security() { )} - {/* CrowdSec Key Rejection Warning */} - {status.cerberus?.enabled && (crowdsecStatus?.running ?? status.crowdsec.enabled) && ( + {/* CrowdSec Key Rejection Warning — suppressed during startup to avoid flashing before bouncer registration completes */} + {status.cerberus?.enabled && !crowdsecPowerMutation.isPending && (crowdsecStatus?.running ?? status.crowdsec.enabled) && ( )} @@ -410,13 +426,13 @@ export default function Security() { {t('security.layer1')} {t('security.ids')}
- - {(crowdsecStatus?.running ?? status.crowdsec.enabled) ? t('common.enabled') : t('common.disabled')} + + {crowdsecPowerMutation.isPending && crowdsecPowerMutation.variables ? t('security.crowdsec.starting') : crowdsecChecked ? t('common.enabled') : t('common.disabled')}
-
- +
+
{t('security.crowdsec')} @@ -426,7 +442,7 @@ export default function Security() {

- {(crowdsecStatus?.running ?? status.crowdsec.enabled) + {crowdsecChecked ? t('security.crowdsecProtects') : t('security.crowdsecDisabledDescription')}

@@ -441,7 +457,7 @@ export default function Security() {
crowdsecPowerMutation.mutate(checked)} data-testid="toggle-crowdsec" diff --git a/frontend/src/pages/Uptime.tsx b/frontend/src/pages/Uptime.tsx index 8577d998..7dd41b60 100644 --- a/frontend/src/pages/Uptime.tsx +++ b/frontend/src/pages/Uptime.tsx @@ -346,6 +346,11 @@ const CreateMonitorModal: FC<{ onClose: () => void; t: (key: string) => string } const [type, setType] = useState<'http' | 'tcp'>('http'); const [interval, setInterval] = useState(60); const [maxRetries, setMaxRetries] = useState(3); + const [urlError, setUrlError] = useState(''); + + const urlPlaceholder = type === 'tcp' + ? t('uptime.urlPlaceholderTcp') + : t('uptime.urlPlaceholderHttp'); const mutation = useMutation({ mutationFn: (data: { name: string; url: string; type: string; interval?: number; max_retries?: number }) => @@ -363,6 +368,10 @@ const CreateMonitorModal: FC<{ onClose: () => void; t: (key: string) => string } const handleSubmit = (e: FormEvent) => { e.preventDefault(); if (!name.trim() || !url.trim()) return; + if (type === 'tcp' && url.trim().includes('://')) { + setUrlError(t('uptime.invalidTcpFormat')); + return; + } mutation.mutate({ name: name.trim(), url: url.trim(), type, interval, max_retries: maxRetries }); }; @@ -399,6 +408,29 @@ const CreateMonitorModal: FC<{ onClose: () => void; t: (key: string) => string } />
+
+ + +
+
- -
- - + {type === 'tcp' ? t('uptime.urlHelperTcp') : t('uptime.urlHelperHttp')} +

+ {urlError && ( + + )}
diff --git a/frontend/src/pages/__tests__/CrowdSecConfig.crowdsec.test.tsx b/frontend/src/pages/__tests__/CrowdSecConfig.crowdsec.test.tsx new file mode 100644 index 00000000..141db3eb --- /dev/null +++ b/frontend/src/pages/__tests__/CrowdSecConfig.crowdsec.test.tsx @@ -0,0 +1,165 @@ +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { act, render, screen } from '@testing-library/react' +import { MemoryRouter } from 'react-router-dom' +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' + +import * as crowdsecApi from '../../api/crowdsec' +import type { CrowdSecStatus } from '../../api/crowdsec' +import * as featureFlagsApi from '../../api/featureFlags' +import * as presetsApi from '../../api/presets' +import * as securityApi from '../../api/security' +import CrowdSecConfig from '../CrowdSecConfig' + +vi.mock('../../api/security') +vi.mock('../../api/crowdsec') +vi.mock('../../api/presets') +vi.mock('../../api/featureFlags') +vi.mock('../../api/backups', () => ({ + createBackup: vi.fn().mockResolvedValue({ filename: 'backup.tar.gz' }), +})) +vi.mock('../../hooks/useConsoleEnrollment', () => ({ + useConsoleStatus: vi.fn(() => ({ + data: { + status: 'not_enrolled', + tenant: 'default', + agent_name: 'charon-agent', + last_error: null, + last_attempt_at: null, + enrolled_at: null, + last_heartbeat_at: null, + key_present: false, + correlation_id: 'corr-1', + }, + isLoading: false, + isRefetching: false, + })), + useEnrollConsole: vi.fn(() => ({ + mutateAsync: vi.fn().mockResolvedValue({ status: 'enrolling', key_present: false }), + isPending: false, + })), + useClearConsoleEnrollment: vi.fn(() => ({ + mutate: vi.fn(), + isPending: false, + })), +})) +vi.mock('../../components/CrowdSecBouncerKeyDisplay', () => ({ + CrowdSecBouncerKeyDisplay: () => null, +})) +vi.mock('../../utils/crowdsecExport', () => ({ + buildCrowdsecExportFilename: vi.fn(() => 'crowdsec-default.tar.gz'), + promptCrowdsecFilename: vi.fn(() => 'crowdsec.tar.gz'), + downloadCrowdsecExport: vi.fn(), +})) +vi.mock('../../utils/toast', () => ({ + toast: { success: vi.fn(), error: vi.fn(), info: vi.fn() }, +})) + +const baseStatus = { + cerberus: { enabled: true }, + crowdsec: { enabled: true, mode: 'local' as const, api_url: '' }, + waf: { enabled: true, mode: 'enabled' as const }, + rate_limit: { enabled: true }, + acl: { enabled: true }, +} + +function makeQueryClient() { + return new QueryClient({ + defaultOptions: { + queries: { retry: false, gcTime: Infinity }, + mutations: { retry: false }, + }, + }) +} + +function renderWithSeed( + crowdsecStartingData: { isStarting: boolean; startedAt?: number }, + lapiStatus: { running: boolean; pid?: number; lapi_ready: boolean } +) { + const fullStatus: CrowdSecStatus = { pid: 0, ...lapiStatus } + const queryClient = makeQueryClient() + queryClient.setQueryData(['crowdsec-starting'], crowdsecStartingData) + queryClient.setQueryData(['feature-flags'], { 'feature.crowdsec.console_enrollment': true }) + queryClient.setQueryData(['security-status'], baseStatus) + // Seed lapi-status so the component has data immediately (no loading gap). + // Also override the mock so any refetch after initialCheckComplete returns the + // same value, preventing the beforeEach default from overwriting the seed. + queryClient.setQueryData(['crowdsec-lapi-status'], fullStatus) + vi.mocked(crowdsecApi.statusCrowdsec).mockResolvedValue(fullStatus) + + return { + queryClient, + ...render( + + + + + + ), + } +} + +describe('CrowdSecConfig — isStartingUp banner suppression', () => { + beforeEach(() => { + vi.useFakeTimers() + vi.clearAllMocks() + + vi.mocked(securityApi.getSecurityStatus).mockResolvedValue(baseStatus) + vi.mocked(featureFlagsApi.getFeatureFlags).mockResolvedValue({ + 'feature.crowdsec.console_enrollment': true, + }) + vi.mocked(crowdsecApi.statusCrowdsec).mockResolvedValue({ running: true, pid: 123, lapi_ready: true }) + vi.mocked(crowdsecApi.listCrowdsecFiles).mockResolvedValue({ files: [] }) + vi.mocked(crowdsecApi.listCrowdsecDecisions).mockResolvedValue({ decisions: [] }) + vi.mocked(crowdsecApi.exportCrowdsecConfig).mockResolvedValue(new Blob()) + vi.mocked(presetsApi.listCrowdsecPresets).mockResolvedValue({ presets: [] }) + }) + + afterEach(() => { + vi.useRealTimers() + }) + + it('LAPI not-running banner suppressed when isStartingUp is true', async () => { + renderWithSeed( + { isStarting: true, startedAt: Date.now() }, + { running: false, lapi_ready: false } + ) + + // Advance past the 3-second initialCheckComplete guard + await act(async () => { await vi.advanceTimersByTimeAsync(3001) }) + + expect(screen.queryByTestId('lapi-not-running-warning')).not.toBeInTheDocument() + }) + + it('LAPI initializing banner suppressed when isStartingUp is true', async () => { + renderWithSeed( + { isStarting: true, startedAt: Date.now() }, + { running: true, lapi_ready: false } + ) + + await act(async () => { await vi.advanceTimersByTimeAsync(3001) }) + + expect(screen.queryByTestId('lapi-warning')).not.toBeInTheDocument() + }) + + it('LAPI not-running banner shows after isStartingUp expires (100s ago)', async () => { + renderWithSeed( + { isStarting: true, startedAt: Date.now() - 100_000 }, + { running: false, lapi_ready: false } + ) + + await act(async () => { await vi.advanceTimersByTimeAsync(3001) }) + + expect(screen.getByTestId('lapi-not-running-warning')).toBeInTheDocument() + }) + + it('LAPI not-running banner shows when isStartingUp is false', async () => { + renderWithSeed( + { isStarting: false }, + { running: false, lapi_ready: false } + ) + + await act(async () => { await vi.advanceTimersByTimeAsync(3001) }) + + expect(screen.getByTestId('lapi-not-running-warning')).toBeInTheDocument() + }) +}) diff --git a/frontend/src/pages/__tests__/Notifications.test.tsx b/frontend/src/pages/__tests__/Notifications.test.tsx index 231430c3..844a0216 100644 --- a/frontend/src/pages/__tests__/Notifications.test.tsx +++ b/frontend/src/pages/__tests__/Notifications.test.tsx @@ -16,7 +16,7 @@ vi.mock('react-i18next', () => ({ })) vi.mock('../../api/notifications', () => ({ - SUPPORTED_NOTIFICATION_PROVIDER_TYPES: ['discord', 'gotify', 'webhook', 'email', 'telegram'], + SUPPORTED_NOTIFICATION_PROVIDER_TYPES: ['discord', 'gotify', 'webhook', 'email', 'telegram', 'slack', 'pushover', 'ntfy'], getProviders: vi.fn(), createProvider: vi.fn(), updateProvider: vi.fn(), @@ -148,8 +148,8 @@ describe('Notifications', () => { const typeSelect = screen.getByTestId('provider-type') as HTMLSelectElement const options = Array.from(typeSelect.options) - expect(options).toHaveLength(5) - expect(options.map((option) => option.value)).toEqual(['discord', 'gotify', 'webhook', 'email', 'telegram']) + expect(options).toHaveLength(8) + expect(options.map((option) => option.value)).toEqual(['discord', 'gotify', 'webhook', 'email', 'telegram', 'slack', 'pushover', 'ntfy']) expect(typeSelect.disabled).toBe(false) }) @@ -428,8 +428,8 @@ describe('Notifications', () => { const legacyProvider: NotificationProvider = { ...baseProvider, id: 'legacy-provider', - name: 'Legacy Slack', - type: 'slack', + name: 'Legacy SMS', + type: 'legacy_sms', enabled: false, } @@ -611,4 +611,73 @@ describe('Notifications', () => { expect(screen.getByTestId('provider-config')).toBeInTheDocument() }) + + it('shows token field when slack type selected', async () => { + const user = userEvent.setup() + renderWithQueryClient() + + await user.click(await screen.findByTestId('add-provider-btn')) + await user.selectOptions(screen.getByTestId('provider-type'), 'slack') + + expect(screen.getByTestId('provider-gotify-token')).toBeInTheDocument() + }) + + it('hides token field when switching from slack to discord', async () => { + const user = userEvent.setup() + renderWithQueryClient() + + await user.click(await screen.findByTestId('add-provider-btn')) + await user.selectOptions(screen.getByTestId('provider-type'), 'slack') + expect(screen.getByTestId('provider-gotify-token')).toBeInTheDocument() + + await user.selectOptions(screen.getByTestId('provider-type'), 'discord') + expect(screen.queryByTestId('provider-gotify-token')).toBeNull() + }) + + it('submits slack provider with token as webhook URL', async () => { + const user = userEvent.setup() + renderWithQueryClient() + + await user.click(await screen.findByTestId('add-provider-btn')) + await user.selectOptions(screen.getByTestId('provider-type'), 'slack') + await user.type(screen.getByTestId('provider-name'), 'Slack Alerts') + await user.type(screen.getByTestId('provider-gotify-token'), 'https://hooks.slack.com/services/T00/B00/xxx') + await user.click(screen.getByTestId('provider-save-btn')) + + await waitFor(() => { + expect(notificationsApi.createProvider).toHaveBeenCalled() + }) + + const payload = vi.mocked(notificationsApi.createProvider).mock.calls[0][0] + expect(payload.type).toBe('slack') + expect(payload.token).toBe('https://hooks.slack.com/services/T00/B00/xxx') + }) + + it('does not require URL for slack', async () => { + const user = userEvent.setup() + renderWithQueryClient() + + await user.click(await screen.findByTestId('add-provider-btn')) + await user.selectOptions(screen.getByTestId('provider-type'), 'slack') + await user.type(screen.getByTestId('provider-name'), 'Slack No URL') + await user.type(screen.getByTestId('provider-gotify-token'), 'https://hooks.slack.com/services/T00/B00/xxx') + await user.click(screen.getByTestId('provider-save-btn')) + + await waitFor(() => { + expect(notificationsApi.createProvider).toHaveBeenCalled() + }) + + expect(screen.queryByTestId('provider-url-error')).toBeNull() + }) + + it('renders pushover form with API Token field and User Key placeholder', async () => { + const user = userEvent.setup() + renderWithQueryClient() + + await user.click(await screen.findByTestId('add-provider-btn')) + await user.selectOptions(screen.getByTestId('provider-type'), 'pushover') + + expect(screen.getByTestId('provider-gotify-token')).toBeInTheDocument() + expect(screen.getByTestId('provider-url')).toHaveAttribute('placeholder', 'notificationProviders.pushoverUserKeyPlaceholder') + }) }) diff --git a/frontend/src/pages/__tests__/Security.crowdsec.test.tsx b/frontend/src/pages/__tests__/Security.crowdsec.test.tsx new file mode 100644 index 00000000..020df6ec --- /dev/null +++ b/frontend/src/pages/__tests__/Security.crowdsec.test.tsx @@ -0,0 +1,206 @@ +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { render, screen, waitFor } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { BrowserRouter } from 'react-router-dom' +import { describe, it, expect, vi, beforeEach } from 'vitest' + +import * as crowdsecApi from '../../api/crowdsec' +import * as logsApi from '../../api/logs' +import * as api from '../../api/security' +import * as settingsApi from '../../api/settings' +import Security from '../Security' + +import type { SecurityStatus } from '../../api/security' +import type * as ReactRouterDom from 'react-router-dom' + +const mockNavigate = vi.fn() + +vi.mock('react-router-dom', async () => { + const actual = await vi.importActual('react-router-dom') + return { ...actual, useNavigate: () => mockNavigate } +}) + +vi.mock('../../api/security') +vi.mock('../../api/settings') +vi.mock('../../api/crowdsec') +vi.mock('../../api/logs', () => ({ + connectLiveLogs: vi.fn(() => vi.fn()), + connectSecurityLogs: vi.fn(() => vi.fn()), +})) +vi.mock('../../components/LiveLogViewer', () => ({ + LiveLogViewer: () =>
, +})) +vi.mock('../../components/SecurityNotificationSettingsModal', () => ({ + SecurityNotificationSettingsModal: () =>
, +})) +vi.mock('../../components/CrowdSecKeyWarning', () => ({ + CrowdSecKeyWarning: () =>
CrowdSec API Key Updated
, +})) +vi.mock('../../hooks/useNotifications', () => ({ + useSecurityNotificationSettings: () => ({ + data: { + enabled: false, + min_log_level: 'warn', + security_waf_enabled: true, + security_acl_enabled: true, + security_rate_limit_enabled: true, + webhook_url: '', + }, + isLoading: false, + }), + useUpdateSecurityNotificationSettings: () => ({ + mutate: vi.fn(), + isPending: false, + }), +})) +vi.mock('../../hooks/useSecurity', async (importOriginal) => { + const actual = await importOriginal() + return { + ...actual, + useSecurityConfig: vi.fn(() => ({ data: { config: { admin_whitelist: '' } } })), + useUpdateSecurityConfig: vi.fn(() => ({ mutate: vi.fn(), isPending: false })), + useGenerateBreakGlassToken: vi.fn(() => ({ mutate: vi.fn(), isPending: false })), + useRuleSets: vi.fn(() => ({ data: { rulesets: [] } })), + } +}) + +const baseStatus: SecurityStatus = { + cerberus: { enabled: true }, + crowdsec: { enabled: false, mode: 'disabled' as const, api_url: '' }, + waf: { enabled: false, mode: 'disabled' as const }, + rate_limit: { enabled: false }, + acl: { enabled: false }, +} + +function createQueryClient() { + return new QueryClient({ + defaultOptions: { + queries: { retry: false, gcTime: Infinity }, + mutations: { retry: false }, + }, + }) +} + +function renderSecurity(queryClient?: QueryClient) { + const qc = queryClient ?? createQueryClient() + return { + qc, + ...render( + + + + + + ), + } +} + +describe('Security CrowdSec mutation UX', () => { + beforeEach(() => { + vi.resetAllMocks() + vi.mocked(api.getSecurityStatus).mockResolvedValue(baseStatus) + vi.mocked(api.getSecurityConfig).mockResolvedValue({ config: { name: 'default', waf_mode: 'block', waf_rules_source: '', admin_whitelist: '' } }) + vi.mocked(api.getRuleSets).mockResolvedValue({ rulesets: [] }) + vi.mocked(api.updateSecurityConfig).mockResolvedValue({}) + vi.mocked(logsApi.connectLiveLogs).mockReturnValue(vi.fn()) + vi.mocked(logsApi.connectSecurityLogs).mockReturnValue(vi.fn()) + vi.mocked(crowdsecApi.statusCrowdsec).mockResolvedValue({ running: false, pid: 0, lapi_ready: false }) + vi.mocked(crowdsecApi.getCrowdsecKeyStatus).mockResolvedValue({ + env_key_rejected: false, + key_source: 'auto-generated', + current_key_preview: '...', + message: 'OK', + }) + vi.mocked(settingsApi.updateSetting).mockResolvedValue(undefined) + }) + + it('toggle stays checked while crowdsecPowerMutation is pending', async () => { + // startCrowdsec never resolves — keeps mutation pending + vi.mocked(crowdsecApi.startCrowdsec).mockReturnValue(new Promise(() => {})) + + const user = userEvent.setup() + renderSecurity() + + const toggle = await screen.findByTestId('toggle-crowdsec') + await user.click(toggle) + + // While pending, the toggle must reflect the user's intent (checked=true) + await waitFor(() => { + expect(toggle).toBeChecked() + }) + }) + + it('CrowdSec badge shows "Starting..." while mutation is pending', async () => { + vi.mocked(crowdsecApi.startCrowdsec).mockReturnValue(new Promise(() => {})) + + const user = userEvent.setup() + renderSecurity() + + const toggle = await screen.findByTestId('toggle-crowdsec') + await user.click(toggle) + + await waitFor(() => { + expect(screen.getByText('Starting...')).toBeInTheDocument() + }) + }) + + it('CrowdSecKeyWarning is not rendered while crowdsecPowerMutation is pending', async () => { + vi.mocked(crowdsecApi.startCrowdsec).mockReturnValue(new Promise(() => {})) + vi.mocked(crowdsecApi.getCrowdsecKeyStatus).mockResolvedValue({ + env_key_rejected: true, + key_source: 'env', + full_key: 'abc123', + current_key_preview: 'abc...', + rejected_key_preview: 'def...', + message: 'Key rejected', + }) + + const user = userEvent.setup() + renderSecurity() + + const toggle = await screen.findByTestId('toggle-crowdsec') + await user.click(toggle) + + await waitFor(() => { + expect(toggle).toBeChecked() + }) + + expect(screen.queryByTestId('crowdsec-key-warning')).not.toBeInTheDocument() + }) + + it('toggle reflects correct final state after mutation succeeds', async () => { + vi.mocked(crowdsecApi.startCrowdsec).mockResolvedValue({ status: 'started', pid: 123, lapi_ready: true }) + vi.mocked(crowdsecApi.statusCrowdsec) + .mockResolvedValueOnce({ running: false, pid: 0, lapi_ready: false }) + .mockResolvedValue({ running: true, pid: 123, lapi_ready: true }) + // Call order: 1st → baseStatus, 2nd → baseStatus, 3rd+ → enabled + vi.mocked(api.getSecurityStatus) + .mockResolvedValueOnce(baseStatus) + .mockResolvedValueOnce(baseStatus) + .mockResolvedValue({ ...baseStatus, crowdsec: { ...baseStatus.crowdsec, enabled: true } }) + + const user = userEvent.setup() + renderSecurity() + + const toggle = await screen.findByTestId('toggle-crowdsec') + await user.click(toggle) + + await waitFor(() => { + expect(toggle).toBeChecked() + }, { timeout: 3000 }) + }) + + it('toggle reverts to unchecked when mutation fails', async () => { + vi.mocked(crowdsecApi.startCrowdsec).mockRejectedValue(new Error('failed')) + + const user = userEvent.setup() + renderSecurity() + + const toggle = await screen.findByTestId('toggle-crowdsec') + await user.click(toggle) + + await waitFor(() => { + expect(toggle).not.toBeChecked() + }, { timeout: 3000 }) + }) +}) diff --git a/frontend/src/pages/__tests__/Uptime.tcp-ux.test.tsx b/frontend/src/pages/__tests__/Uptime.tcp-ux.test.tsx new file mode 100644 index 00000000..0c06a3e9 --- /dev/null +++ b/frontend/src/pages/__tests__/Uptime.tcp-ux.test.tsx @@ -0,0 +1,265 @@ +import { screen, waitFor } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { describe, it, expect, vi, beforeEach } from 'vitest' + +import { renderWithQueryClient } from '../../test-utils/renderWithQueryClient' +import Uptime from '../Uptime' + +// Mock react-i18next +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string, options?: Record) => { + const translations: Record = { + 'uptime.title': 'Uptime Monitoring', + 'uptime.loadingMonitors': 'Loading monitors...', + 'uptime.noMonitorsFound': 'No monitors found', + 'uptime.syncWithHosts': 'Sync with Hosts', + 'uptime.syncing': 'Syncing...', + 'uptime.addMonitor': 'Add Monitor', + 'uptime.autoRefreshing': 'Auto-refreshing every 30s', + 'uptime.proxyHosts': 'Proxy Hosts', + 'uptime.remoteServers': 'Remote Servers', + 'uptime.otherMonitors': 'Other Monitors', + 'uptime.latency': 'Latency', + 'uptime.lastCheck': 'Last Check', + 'uptime.never': 'Never', + 'uptime.configureMonitor': 'Configure Monitor', + 'uptime.createMonitor': 'Create Monitor', + 'uptime.monitorSettings': 'Monitor Settings', + 'uptime.triggerHealthCheck': 'Trigger Health Check', + 'uptime.paused': 'Paused', + 'uptime.pause': 'Pause', + 'uptime.unpause': 'Resume', + 'uptime.maxRetries': 'Max Retries', + 'uptime.maxRetriesHelper': 'Number of retries before marking as down', + 'uptime.checkInterval': 'Check Interval', + 'uptime.saveChanges': 'Save Changes', + 'uptime.monitorUrl': 'Monitor URL', + 'uptime.urlPlaceholder': 'https://example.com', + 'uptime.urlPlaceholderHttp': 'https://example.com', + 'uptime.urlPlaceholderTcp': '192.168.1.1:8080', + 'uptime.urlHelperHttp': 'Enter the full URL including the scheme', + 'uptime.urlHelperTcp': 'Enter as host:port with no scheme prefix', + 'uptime.invalidTcpFormat': 'TCP monitors require host:port format. Remove the scheme prefix.', + 'uptime.monitorType': 'Monitor Type', + 'uptime.monitorTypeHttp': 'HTTP(S)', + 'uptime.monitorTypeTcp': 'TCP', + 'uptime.noHistoryAvailable': 'No history available', + 'uptime.last60Checks': 'Last 60 Checks', + 'common.configure': 'Configure', + 'common.cancel': 'Cancel', + 'common.delete': 'Delete', + 'common.create': 'Create', + 'common.saving': 'Saving...', + 'common.name': 'Name', + 'common.close': 'Close', + } + if (options && typeof options === 'object') { + let result = translations[key] || key + for (const [k, v] of Object.entries(options)) { + result = result.replace(`{{${k}}}`, String(v)) + } + return result + } + return translations[key] || key + }, + }), +})) + +// Mock uptime API +vi.mock('../../api/uptime', () => ({ + getMonitors: vi.fn(), + getMonitorHistory: vi.fn(), + updateMonitor: vi.fn(), + deleteMonitor: vi.fn(), + checkMonitor: vi.fn(), + createMonitor: vi.fn(), + syncMonitors: vi.fn(), +})) + +async function openCreateModal() { + const { getMonitors } = await import('../../api/uptime') + vi.mocked(getMonitors).mockResolvedValue([]) + + renderWithQueryClient() + + await waitFor(() => { + expect(screen.getByTestId('add-monitor-button')).toBeInTheDocument() + }) + + const user = userEvent.setup() + await user.click(screen.getByTestId('add-monitor-button')) + + await waitFor(() => { + expect(screen.getByText('Create Monitor')).toBeInTheDocument() + }) + + return user +} + +describe('CreateMonitorModal — TCP UX', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('renders HTTP placeholder by default', async () => { + await openCreateModal() + + const urlInput = screen.getByLabelText(/Monitor URL/) + expect(urlInput).toHaveAttribute('placeholder', 'https://example.com') + }) + + it('renders TCP placeholder when type is TCP', async () => { + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + await waitFor(() => { + const urlInput = screen.getByLabelText(/Monitor URL/) + expect(urlInput).toHaveAttribute('placeholder', '192.168.1.1:8080') + }) + }) + + it('shows HTTP helper text by default', async () => { + await openCreateModal() + + const helper = document.getElementById('create-monitor-url-helper') + expect(helper).toBeInTheDocument() + expect(helper?.textContent).toContain('scheme') + }) + + it('shows TCP helper text when type is TCP', async () => { + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + await waitFor(() => { + const helper = document.getElementById('create-monitor-url-helper') + expect(helper?.textContent).toContain('host:port') + }) + }) + + it('shows inline error when tcp:// entered in TCP mode', async () => { + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + const urlInput = screen.getByLabelText(/Monitor URL/) + await user.type(urlInput, 'tcp://192.168.1.1:8080') + + await waitFor(() => { + const alert = screen.getByRole('alert') + expect(alert).toBeInTheDocument() + expect(alert.textContent).toContain('host:port format') + }) + }) + + it('inline error clears when scheme prefix removed', async () => { + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + const urlInput = screen.getByLabelText(/Monitor URL/) + await user.type(urlInput, 'tcp://192.168.1.1:8080') + + await waitFor(() => { + expect(screen.getByRole('alert')).toBeInTheDocument() + }) + + await user.clear(urlInput) + await user.type(urlInput, '192.168.1.1:8080') + + await waitFor(() => { + expect(screen.queryByRole('alert')).not.toBeInTheDocument() + }) + }) + + it('inline error clears when type changes from TCP to HTTP', async () => { + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + const urlInput = screen.getByLabelText(/Monitor URL/) + await user.type(urlInput, 'tcp://192.168.1.1:8080') + + await waitFor(() => { + expect(screen.getByRole('alert')).toBeInTheDocument() + }) + + await user.selectOptions(typeSelect, 'http') + + await waitFor(() => { + expect(screen.queryByRole('alert')).not.toBeInTheDocument() + }) + }) + + it('handleSubmit blocked when tcp:// in URL while type is TCP', async () => { + const { createMonitor } = await import('../../api/uptime') + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + const nameInput = screen.getByLabelText(/Name/) + await user.type(nameInput, 'My TCP Monitor') + + const urlInput = screen.getByLabelText(/Monitor URL/) + await user.type(urlInput, 'tcp://192.168.1.1:8080') + + const submitButton = screen.getByRole('button', { name: /Create/ }) + await user.click(submitButton) + + expect(createMonitor).not.toHaveBeenCalled() + }) + + it('handleSubmit proceeds when TCP URL is bare host:port', async () => { + const { createMonitor } = await import('../../api/uptime') + vi.mocked(createMonitor).mockResolvedValue({ + id: 'new-1', + name: 'DB Server', + type: 'tcp', + url: '192.168.1.1:5432', + interval: 60, + enabled: true, + status: 'pending', + latency: 0, + max_retries: 3, + }) + + const user = await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + await user.selectOptions(typeSelect, 'tcp') + + const nameInput = screen.getByLabelText(/Name/) + await user.type(nameInput, 'DB Server') + + const urlInput = screen.getByLabelText(/Monitor URL/) + await user.type(urlInput, '192.168.1.1:5432') + + const submitButton = screen.getByRole('button', { name: /Create/ }) + await user.click(submitButton) + + await waitFor(() => { + expect(createMonitor).toHaveBeenCalledWith( + expect.objectContaining({ url: '192.168.1.1:5432', type: 'tcp' }) + ) + }) + }) + + it('type selector appears before URL input in DOM order', async () => { + await openCreateModal() + + const typeSelect = screen.getByLabelText(/Monitor Type/) + const urlInput = screen.getByLabelText(/Monitor URL/) + + // Node.DOCUMENT_POSITION_FOLLOWING (4) means typeSelect comes before urlInput + const position = typeSelect.compareDocumentPosition(urlInput) + expect(position & Node.DOCUMENT_POSITION_FOLLOWING).toBeTruthy() + }) +}) diff --git a/frontend/src/pages/__tests__/Uptime.test.tsx b/frontend/src/pages/__tests__/Uptime.test.tsx index 54f2ac7f..a1568c38 100644 --- a/frontend/src/pages/__tests__/Uptime.test.tsx +++ b/frontend/src/pages/__tests__/Uptime.test.tsx @@ -37,7 +37,12 @@ vi.mock('react-i18next', () => ({ 'uptime.checkInterval': 'Check Interval', 'uptime.saveChanges': 'Save Changes', 'uptime.monitorUrl': 'Monitor URL', - 'uptime.urlPlaceholder': 'https://example.com or host:port', + 'uptime.urlPlaceholder': 'https://example.com', + 'uptime.urlPlaceholderHttp': 'https://example.com', + 'uptime.urlPlaceholderTcp': '192.168.1.1:8080', + 'uptime.urlHelperHttp': 'Enter the full URL including the scheme', + 'uptime.urlHelperTcp': 'Enter as host:port with no scheme prefix', + 'uptime.invalidTcpFormat': 'TCP monitors require host:port format. Remove the scheme prefix.', 'uptime.monitorType': 'Monitor Type', 'uptime.monitorTypeHttp': 'HTTP(S)', 'uptime.monitorTypeTcp': 'TCP', diff --git a/frontend/src/test/setup.ts b/frontend/src/test/setup.ts index 68eca292..d066bc1a 100644 --- a/frontend/src/test/setup.ts +++ b/frontend/src/test/setup.ts @@ -81,7 +81,7 @@ Object.defineProperty(window, 'matchMedia', { }) // Add ResizeObserver mock (required by Radix UI) -global.ResizeObserver = class ResizeObserver { +globalThis.ResizeObserver = class ResizeObserver { observe() {} unobserve() {} disconnect() {} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 849da642..9936076a 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -2,7 +2,8 @@ "compilerOptions": { "target": "ES2022", "useDefineForClassFields": true, - "lib": ["ES2022", "DOM", "DOM.Iterable"], + "types": [], + "lib": ["ES2022", "DOM"], "module": "ESNext", "skipLibCheck": true, diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json index 97ede7ee..4a779b1f 100644 --- a/frontend/tsconfig.node.json +++ b/frontend/tsconfig.node.json @@ -1,6 +1,7 @@ { "compilerOptions": { "composite": true, + "types": [], "skipLibCheck": true, "module": "ESNext", "moduleResolution": "bundler", diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 231f24e8..22da0590 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -16,14 +16,13 @@ export default defineConfig({ build: { outDir: 'dist', sourcemap: true, - // TEMPORARY: Disable code splitting to diagnose React initialization issue - // If this works, the problem is module loading order in async chunks chunkSizeWarningLimit: 2000, - rollupOptions: { + rolldownOptions: { output: { - // Disable code splitting - bundle everything into one file - manualChunks: undefined, - inlineDynamicImports: true + // Disable code splitting — single bundle for React init stability + // codeSplitting: false is the Rolldown-native approach + // (inlineDynamicImports is deprecated in Rolldown) + codeSplitting: false } } } diff --git a/go.work.sum b/go.work.sum index 468746d5..c0da62ff 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,7 +1,10 @@ +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= cloud.google.com/go/compute v1.14.0 h1:hfm2+FfxVmnRlh6LpB7cg1ZNU+5edAHmW679JePztk0= cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0= github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY= github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= @@ -9,18 +12,26 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8V github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs= github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5/go.mod h1:KdCmV+x/BuvyMxRnYBlmVaq4OLiKW6iRQfvC62cvdkI= github.com/containerd/typeurl/v2 v2.2.0 h1:6NBDbQzr7I5LHgp34xAXYF5DOTQDn05X58lsPEmzLso= github.com/containerd/typeurl/v2 v2.2.0/go.mod h1:8XOOxnyatxSWuG8OfsZXVnAF4iZfedjS/8UHSPJnX4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/envoyproxy/go-control-plane v0.14.0/go.mod h1:NcS5X47pLl/hfqxU70yPwL9ZMkUlwlKxtAohpi2wBEU= +github.com/envoyproxy/go-control-plane/envoy v1.36.0/go.mod h1:ty89S1YCCVruQAm9OtKeEkQLTb+Lkz0k8v9W0Oxsv98= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v1.3.0/go.mod h1:HvYl7zwPa5mffgyeTUHA9zHIH36nmrm7oCbo4YKoSWA= github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk= github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v1.2.5/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= @@ -47,6 +58,7 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRW github.com/oschwald/geoip2-golang/v2 v2.0.1 h1:YcYoG/L+gmSfk7AlToTmoL0JvblNyhGC8NyVhwDzzi8= github.com/oschwald/geoip2-golang/v2 v2.0.1/go.mod h1:qdVmcPgrTJ4q2eP9tHq/yldMTdp2VMr33uVdFbHBiBc= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/prometheus/client_golang v1.20.4/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= @@ -68,6 +80,7 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU= github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA= +github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= @@ -79,6 +92,7 @@ github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtX github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opentelemetry.io/contrib/detectors/gcp v1.39.0/go.mod h1:t/OGqzHBa5v6RHZwrDBJ2OirWc+4q/w2fTbLZwAKjTk= golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc= golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70= golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= @@ -108,6 +122,7 @@ golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= +golang.org/x/term v0.41.0/go.mod h1:3pfBgksrReYfZ5lvYM0kSO0LIkAl4Yl2bXOkKP7Ec2A= golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= @@ -115,6 +130,7 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IV golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= diff --git a/lefthook.yml b/lefthook.yml index 619d4d87..e7298f34 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -93,8 +93,9 @@ pre-commit: run: bash scripts/check-version-match-tag.sh # --- Frontend --- - # NOTE: ESLint pinned at v9.x.x — do not upgrade until react-hooks plugin - # supports v10. TypeScript check runs first; lint runs after so fixes apply. + # NOTE: ESLint upgraded to v10.x. Overrides in frontend/package.json for + # react-hooks, jsx-a11y, promise plugins (peer deps not yet updated). + # Remove overrides when plugins declare ESLint v10 support natively. frontend-type-check: glob: "frontend/**/*.{ts,tsx}" run: cd frontend && npx tsc --noEmit @@ -103,13 +104,9 @@ pre-commit: glob: "frontend/**/*.{ts,tsx,js,jsx}" run: cd frontend && npm run lint - -# ============================================================ -# PRE-PUSH (blocking, runs on push) -# ============================================================ -pre-push: - commands: semgrep: + glob: "{**/*.{go,ts,tsx,js,jsx,sh,yml,yaml,json},Dockerfile*}" + exclude: 'frontend/(coverage|dist|node_modules|\.vite)/' run: scripts/pre-commit-hooks/semgrep-scan.sh diff --git a/package-lock.json b/package-lock.json index 6ff532d2..8a0fb3d8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6,21 +6,21 @@ "": { "dependencies": { "@typescript/analyze-trace": "^0.10.1", - "tldts": "^7.0.25", - "type-check": "^0.4.0", - "typescript": "^5.9.3", - "vite": "^7.3.1" + "tldts": "^7.0.27", + "type-check": "^0.4.0" }, "devDependencies": { "@bgotink/playwright-coverage": "^0.3.2", "@playwright/test": "^1.58.2", "@types/eslint-plugin-jsx-a11y": "^6.10.1", - "@types/node": "^25.4.0", + "@types/node": "^25.5.0", "dotenv": "^17.3.1", - "markdownlint-cli2": "^0.21.0", + "markdownlint-cli2": "^0.22.0", "prettier": "^3.8.1", "prettier-plugin-tailwindcss": "^0.7.2", - "tar": "^7.5.11" + "tar": "^7.5.13", + "typescript": "^6.0.2", + "vite": "^8.0.2" } }, "node_modules/@bcoe/v8-coverage": { @@ -51,420 +51,38 @@ "@playwright/test": "^1.14.1" } }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", - "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", - "cpu": [ - "ppc64" - ], + "node_modules/@emnapi/core": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz", + "integrity": "sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==", + "dev": true, "license": "MIT", "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" + "dependencies": { + "@emnapi/wasi-threads": "1.2.0", + "tslib": "^2.4.0" } }, - "node_modules/@esbuild/android-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", - "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", - "cpu": [ - "arm" - ], + "node_modules/@emnapi/runtime": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.1.tgz", + "integrity": "sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==", + "dev": true, "license": "MIT", "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" + "dependencies": { + "tslib": "^2.4.0" } }, - "node_modules/@esbuild/android-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", - "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", - "cpu": [ - "arm64" - ], + "node_modules/@emnapi/wasi-threads": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.0.tgz", + "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==", + "dev": true, "license": "MIT", "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", - "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", - "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", - "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", - "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", - "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", - "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", - "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", - "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", - "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", - "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", - "cpu": [ - "mips64el" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", - "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", - "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", - "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", - "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", - "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", - "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", - "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", - "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", - "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", - "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", - "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", - "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", - "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" + "dependencies": { + "tslib": "^2.4.0" } }, "node_modules/@eslint-community/eslint-utils": { @@ -704,6 +322,23 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.1.tgz", + "integrity": "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -742,6 +377,16 @@ "node": ">= 8" } }, + "node_modules/@oxc-project/types": { + "version": "0.122.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.122.0.tgz", + "integrity": "sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, "node_modules/@playwright/test": { "version": "1.58.2", "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.58.2.tgz", @@ -758,330 +403,267 @@ "node": ">=18" } }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", - "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.11.tgz", + "integrity": "sha512-SJ+/g+xNnOh6NqYxD0V3uVN4W3VfnrGsC9/hoglicgTNfABFG9JjISvkkU0dNY84MNHLWyOgxP9v9Y9pX4S7+A==", "cpu": [ - "arm" + "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "android" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", - "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", - "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.11.tgz", + "integrity": "sha512-7WQgR8SfOPwmDZGFkThUvsmd/nwAWv91oCO4I5LS7RKrssPZmOt7jONN0cW17ydGC1n/+puol1IpoieKqQidmg==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "darwin" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", - "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.11.tgz", + "integrity": "sha512-39Ks6UvIHq4rEogIfQBoBRusj0Q0nPVWIvqmwBLaT6aqQGIakHdESBVOPRRLacy4WwUPIx4ZKzfZ9PMW+IeyUQ==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", - "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", - "cpu": [ - "arm64" ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", - "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.11.tgz", + "integrity": "sha512-jfsm0ZHfhiqrvWjJAmzsqiIFPz5e7mAoCOPBNTcNgkiid/LaFKiq92+0ojH+nmJmKYkre4t71BWXUZDNp7vsag==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "freebsd" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", - "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.11.tgz", + "integrity": "sha512-zjQaUtSyq1nVe3nxmlSCuR96T1LPlpvmJ0SZy0WJFEsV4kFbXcq2u68L4E6O0XeFj4aex9bEauqjW8UQBeAvfQ==", "cpu": [ "arm" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", - "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", - "cpu": [ - "arm" ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", - "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.11.tgz", + "integrity": "sha512-WMW1yE6IOnehTcFE9eipFkm3XN63zypWlrJQ2iF7NrQ9b2LDRjumFoOGJE8RJJTJCTBAdmLMnJ8uVitACUUo1Q==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", - "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.11.tgz", + "integrity": "sha512-jfndI9tsfm4APzjNt6QdBkYwre5lRPUgHeDHoI7ydKUuJvz3lZeCfMsI56BZj+7BYqiKsJm7cfd/6KYV7ubrBg==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", - "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", - "cpu": [ - "loong64" ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", - "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", - "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "node_modules/@rolldown/binding-linux-ppc64-gnu": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.11.tgz", + "integrity": "sha512-ZlFgw46NOAGMgcdvdYwAGu2Q+SLFA9LzbJLW+iyMOJyhj5wk6P3KEE9Gct4xWwSzFoPI7JCdYmYMzVtlgQ+zfw==", "cpu": [ "ppc64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] - }, - "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", - "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", - "cpu": [ - "ppc64" ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", - "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", - "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", - "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "node_modules/@rolldown/binding-linux-s390x-gnu": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.11.tgz", + "integrity": "sha512-hIOYmuT6ofM4K04XAZd3OzMySEO4K0/nc9+jmNcxNAxRi6c5UWpqfw3KMFV4MVFWL+jQsSh+bGw2VqmaPMTLyw==", "cpu": [ "s390x" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", - "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.11.tgz", + "integrity": "sha512-qXBQQO9OvkjjQPLdUVr7Nr2t3QTZI7s4KZtfw7HzBgjbmAPSFwSv4rmET9lLSgq3rH/ndA3ngv3Qb8l2njoPNA==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", - "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.11.tgz", + "integrity": "sha512-/tpFfoSTzUkH9LPY+cYbqZBDyyX62w5fICq9qzsHLL8uTI6BHip3Q9Uzft0wylk/i8OOwKik8OxW+QAhDmzwmg==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" - ] - }, - "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", - "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", - "cpu": [ - "x64" ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", - "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.11.tgz", + "integrity": "sha512-mcp3Rio2w72IvdZG0oQ4bM2c2oumtwHfUfKncUM6zGgz0KgPz4YmDPQfnXEiY5t3+KD/i8HG2rOB/LxdmieK2g==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "openharmony" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", - "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.11.tgz", + "integrity": "sha512-LXk5Hii1Ph9asuGRjBuz8TUxdc1lWzB7nyfdoRgI0WGPZKmCxvlKk8KfYysqtr4MfGElu/f/pEQRh8fcEgkrWw==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.11.tgz", + "integrity": "sha512-dDwf5otnx0XgRY1yqxOC4ITizcdzS/8cQ3goOWv3jFAo4F+xQYni+hnMuO6+LssHHdJW7+OCVL3CoU4ycnh35Q==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", - "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", - "cpu": [ - "ia32" ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", - "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.11.tgz", + "integrity": "sha512-LN4/skhSggybX71ews7dAj6r2geaMJfm3kMbK2KhFMg9B10AZXnKoLCVVgzhMHL0S+aKtr4p8QbAW8k+w95bAA==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", - "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", - "cpu": [ - "x64" ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.11.tgz", + "integrity": "sha512-xQO9vbwBecJRv9EUcQ/y0dzSTJgA7Q6UVN7xp6B81+tBGSLVAK03yJ9NkJaUA7JFD91kbjxRSC/mDnmvXzbHoQ==", + "dev": true, + "license": "MIT" }, "node_modules/@sindresorhus/merge-streams": { "version": "4.0.0", @@ -1096,10 +678,21 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/debug": { - "version": "4.1.12", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", - "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.13.tgz", + "integrity": "sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==", "dev": true, "license": "MIT", "dependencies": { @@ -1120,6 +713,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, "license": "MIT" }, "node_modules/@types/istanbul-lib-coverage": { @@ -1151,10 +745,10 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "25.4.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.4.0.tgz", - "integrity": "sha512-9wLpoeWuBlcbBpOY3XmzSTG3oscB6xjBEEtn+pYXTfhyXhIxC5FsBer2KTopBlvKEiW9l13po9fq+SJY/5lkhw==", - "devOptional": true, + "version": "25.5.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.5.0.tgz", + "integrity": "sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw==", + "dev": true, "license": "MIT", "dependencies": { "undici-types": "~7.18.0" @@ -1532,6 +1126,16 @@ "node": ">=6" } }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, "node_modules/devlop": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", @@ -1578,47 +1182,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/esbuild": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", - "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.3", - "@esbuild/android-arm": "0.27.3", - "@esbuild/android-arm64": "0.27.3", - "@esbuild/android-x64": "0.27.3", - "@esbuild/darwin-arm64": "0.27.3", - "@esbuild/darwin-x64": "0.27.3", - "@esbuild/freebsd-arm64": "0.27.3", - "@esbuild/freebsd-x64": "0.27.3", - "@esbuild/linux-arm": "0.27.3", - "@esbuild/linux-arm64": "0.27.3", - "@esbuild/linux-ia32": "0.27.3", - "@esbuild/linux-loong64": "0.27.3", - "@esbuild/linux-mips64el": "0.27.3", - "@esbuild/linux-ppc64": "0.27.3", - "@esbuild/linux-riscv64": "0.27.3", - "@esbuild/linux-s390x": "0.27.3", - "@esbuild/linux-x64": "0.27.3", - "@esbuild/netbsd-arm64": "0.27.3", - "@esbuild/netbsd-x64": "0.27.3", - "@esbuild/openbsd-arm64": "0.27.3", - "@esbuild/openbsd-x64": "0.27.3", - "@esbuild/openharmony-arm64": "0.27.3", - "@esbuild/sunos-x64": "0.27.3", - "@esbuild/win32-arm64": "0.27.3", - "@esbuild/win32-ia32": "0.27.3", - "@esbuild/win32-x64": "0.27.3" - } - }, "node_modules/escalade": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", @@ -1946,9 +1509,9 @@ } }, "node_modules/flatted": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.1.tgz", - "integrity": "sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz", + "integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==", "dev": true, "license": "ISC" }, @@ -1969,6 +1532,7 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, "hasInstallScript": true, "license": "MIT", "optional": true, @@ -2028,9 +1592,9 @@ } }, "node_modules/globby": { - "version": "16.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-16.1.0.tgz", - "integrity": "sha512-+A4Hq7m7Ze592k9gZRy4gJ27DrXRNnC1vPjxTt1qQxEY8RxagBkBxivkCwg7FxSTG0iLLEMaUx13oOr0R2/qcQ==", + "version": "16.1.1", + "resolved": "https://registry.npmjs.org/globby/-/globby-16.1.1.tgz", + "integrity": "sha512-dW7vl+yiAJSp6aCekaVnVJxurRv7DCOLyXqEG3RYMYUg7AuJ2jCqPkZTA8ooqC2vtnkaMcV5WfFBMuEnTu1OQg==", "dev": true, "license": "MIT", "dependencies": { @@ -2316,6 +1880,16 @@ ], "license": "MIT" }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jsonstream-next": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/jsonstream-next/-/jsonstream-next-3.0.0.tgz", @@ -2333,9 +1907,9 @@ } }, "node_modules/katex": { - "version": "0.16.38", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.38.tgz", - "integrity": "sha512-cjHooZUmIAUmDsHBN+1n8LaZdpmbj03LtYeYPyuYB7OuloiaeaV6N4LcfjcnHVzGWjVQmKrxxTrpDcmSzEZQwQ==", + "version": "0.16.42", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.42.tgz", + "integrity": "sha512-sZ4jqyEXfHTLEFK+qsFYToa3UZ0rtFcPGwKpyiRYh2NJn8obPWOQ+/u7ux0F6CAU/y78+Mksh1YkxTPXTh47TQ==", "dev": true, "funding": [ "https://opencollective.com/katex", @@ -2373,6 +1947,267 @@ "node": ">= 0.8.0" } }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, "node_modules/linkify-it": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", @@ -2465,19 +2300,21 @@ } }, "node_modules/markdownlint-cli2": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.21.0.tgz", - "integrity": "sha512-DzzmbqfMW3EzHsunP66x556oZDzjcdjjlL2bHG4PubwnL58ZPAfz07px4GqteZkoCGnBYi779Y2mg7+vgNCwbw==", + "version": "0.22.0", + "resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.22.0.tgz", + "integrity": "sha512-mOC9BY/XGtdX3M9n3AgERd79F0+S7w18yBBTNIQ453sI87etZfp1z4eajqSMV70CYjbxKe5ktKvT2HCpvcWx9w==", "dev": true, "license": "MIT", "dependencies": { - "globby": "16.1.0", + "globby": "16.1.1", "js-yaml": "4.1.1", "jsonc-parser": "3.3.1", + "jsonpointer": "5.0.1", "markdown-it": "14.1.1", "markdownlint": "0.40.0", "markdownlint-cli2-formatter-default": "0.0.6", - "micromatch": "4.0.8" + "micromatch": "4.0.8", + "smol-toml": "1.6.0" }, "bin": { "markdownlint-cli2": "markdownlint-cli2-bin.mjs" @@ -3116,6 +2953,7 @@ "version": "3.3.11", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, "funding": [ { "type": "github", @@ -3283,12 +3121,13 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "dev": true, "license": "MIT", "engines": { @@ -3334,6 +3173,7 @@ "version": "8.5.8", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, "funding": [ { "type": "opencollective", @@ -3547,48 +3387,38 @@ "node": ">=0.10.0" } }, - "node_modules/rollup": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", - "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "node_modules/rolldown": { + "version": "1.0.0-rc.11", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.11.tgz", + "integrity": "sha512-NRjoKMusSjfRbSYiH3VSumlkgFe7kYAa3pzVOsVYVFY3zb5d7nS+a3KGQ7hJKXuYWbzJKPVQ9Wxq2UvyK+ENpw==", + "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.8" + "@oxc-project/types": "=0.122.0", + "@rolldown/pluginutils": "1.0.0-rc.11" }, "bin": { - "rollup": "dist/bin/rollup" + "rolldown": "bin/cli.mjs" }, "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" + "node": "^20.19.0 || >=22.12.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.59.0", - "@rollup/rollup-android-arm64": "4.59.0", - "@rollup/rollup-darwin-arm64": "4.59.0", - "@rollup/rollup-darwin-x64": "4.59.0", - "@rollup/rollup-freebsd-arm64": "4.59.0", - "@rollup/rollup-freebsd-x64": "4.59.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", - "@rollup/rollup-linux-arm-musleabihf": "4.59.0", - "@rollup/rollup-linux-arm64-gnu": "4.59.0", - "@rollup/rollup-linux-arm64-musl": "4.59.0", - "@rollup/rollup-linux-loong64-gnu": "4.59.0", - "@rollup/rollup-linux-loong64-musl": "4.59.0", - "@rollup/rollup-linux-ppc64-gnu": "4.59.0", - "@rollup/rollup-linux-ppc64-musl": "4.59.0", - "@rollup/rollup-linux-riscv64-gnu": "4.59.0", - "@rollup/rollup-linux-riscv64-musl": "4.59.0", - "@rollup/rollup-linux-s390x-gnu": "4.59.0", - "@rollup/rollup-linux-x64-gnu": "4.59.0", - "@rollup/rollup-linux-x64-musl": "4.59.0", - "@rollup/rollup-openbsd-x64": "4.59.0", - "@rollup/rollup-openharmony-arm64": "4.59.0", - "@rollup/rollup-win32-arm64-msvc": "4.59.0", - "@rollup/rollup-win32-ia32-msvc": "4.59.0", - "@rollup/rollup-win32-x64-gnu": "4.59.0", - "@rollup/rollup-win32-x64-msvc": "4.59.0", - "fsevents": "~2.3.2" + "@rolldown/binding-android-arm64": "1.0.0-rc.11", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.11", + "@rolldown/binding-darwin-x64": "1.0.0-rc.11", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.11", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.11", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.11", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.11", + "@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.11", + "@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.11", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.11", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.11", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.11", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.11", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.11", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.11" } }, "node_modules/run-parallel": { @@ -3684,10 +3514,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/smol-toml": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.6.0.tgz", + "integrity": "sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 18" + }, + "funding": { + "url": "https://github.com/sponsors/cyyynthia" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" @@ -3770,9 +3614,9 @@ } }, "node_modules/tar": { - "version": "7.5.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", - "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", + "version": "7.5.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", + "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -3799,6 +3643,7 @@ "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, "license": "MIT", "dependencies": { "fdir": "^6.5.0", @@ -3815,6 +3660,7 @@ "version": "6.5.0", "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, "license": "MIT", "engines": { "node": ">=12.0.0" @@ -3829,9 +3675,10 @@ } }, "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, "license": "MIT", "engines": { "node": ">=12" @@ -3841,21 +3688,21 @@ } }, "node_modules/tldts": { - "version": "7.0.25", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.25.tgz", - "integrity": "sha512-keinCnPbwXEUG3ilrWQZU+CqcTTzHq9m2HhoUP2l7Xmi8l1LuijAXLpAJ5zRW+ifKTNscs4NdCkfkDCBYm352w==", + "version": "7.0.27", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.27.tgz", + "integrity": "sha512-I4FZcVFcqCRuT0ph6dCDpPuO4Xgzvh+spkcTr1gK7peIvxWauoloVO0vuy1FQnijT63ss6AsHB6+OIM4aXHbPg==", "license": "MIT", "dependencies": { - "tldts-core": "^7.0.25" + "tldts-core": "^7.0.27" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.25", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.25.tgz", - "integrity": "sha512-ZjCZK0rppSBu7rjHYDYsEaMOIbbT+nWF57hKkv4IUmZWBNrBWBOjIElc0mKRgLM8bm7x/BBlof6t2gi/Oq/Asw==", + "version": "7.0.27", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.27.tgz", + "integrity": "sha512-YQ7uPjgWUibIK6DW5lrKujGwUKhLevU4hcGbP5O6TcIUb+oTjJYJVWPS4nZsIHrEEEG6myk/oqAJUEQmpZrHsg==", "license": "MIT" }, "node_modules/to-regex-range": { @@ -3880,6 +3727,14 @@ "node": ">=0.6" } }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -3893,9 +3748,10 @@ } }, "node_modules/typescript": { - "version": "5.9.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", - "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-6.0.2.tgz", + "integrity": "sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==", + "dev": true, "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", @@ -3916,7 +3772,7 @@ "version": "7.18.2", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.18.2.tgz", "integrity": "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/unicorn-magic": { @@ -3964,16 +3820,16 @@ } }, "node_modules/vite": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", - "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-8.0.2.tgz", + "integrity": "sha512-1gFhNi+bHhRE/qKZOJXACm6tX4bA3Isy9KuKF15AgSRuRazNBOJfdDemPBU16/mpMxApDPrWvZ08DcLPEoRnuA==", + "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.27.0", - "fdir": "^6.5.0", + "lightningcss": "^1.32.0", "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", + "postcss": "^8.5.8", + "rolldown": "1.0.0-rc.11", "tinyglobby": "^0.2.15" }, "bin": { @@ -3990,9 +3846,10 @@ }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", + "@vitejs/devtools": "^0.1.0", + "esbuild": "^0.27.0", "jiti": ">=1.21.0", "less": "^4.0.0", - "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", @@ -4005,15 +3862,18 @@ "@types/node": { "optional": true }, + "@vitejs/devtools": { + "optional": true + }, + "esbuild": { + "optional": true + }, "jiti": { "optional": true }, "less": { "optional": true }, - "lightningcss": { - "optional": true - }, "sass": { "optional": true }, @@ -4037,27 +3897,11 @@ } } }, - "node_modules/vite/node_modules/fdir": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, "node_modules/vite/node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, "hasInstallScript": true, "license": "MIT", "optional": true, @@ -4069,9 +3913,10 @@ } }, "node_modules/vite/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, "license": "MIT", "engines": { "node": ">=12" diff --git a/package.json b/package.json index 3788ad23..88bb0ccd 100644 --- a/package.json +++ b/package.json @@ -11,20 +11,20 @@ }, "dependencies": { "@typescript/analyze-trace": "^0.10.1", - "tldts": "^7.0.25", - "type-check": "^0.4.0", - "typescript": "^5.9.3", - "vite": "^7.3.1" + "tldts": "^7.0.27", + "type-check": "^0.4.0" }, "devDependencies": { "@types/eslint-plugin-jsx-a11y": "^6.10.1", "@bgotink/playwright-coverage": "^0.3.2", "@playwright/test": "^1.58.2", - "@types/node": "^25.4.0", + "@types/node": "^25.5.0", "dotenv": "^17.3.1", - "markdownlint-cli2": "^0.21.0", + "markdownlint-cli2": "^0.22.0", "prettier": "^3.8.1", "prettier-plugin-tailwindcss": "^0.7.2", - "tar": "^7.5.11" + "tar": "^7.5.13", + "typescript": "^6.0.2", + "vite": "^8.0.2" } } diff --git a/scripts/cerberus_integration.sh b/scripts/cerberus_integration.sh index d5e5e8fe..5311b7b6 100755 --- a/scripts/cerberus_integration.sh +++ b/scripts/cerberus_integration.sh @@ -170,7 +170,8 @@ if ! docker network inspect containers_default >/dev/null 2>&1; then fi log_info "Starting httpbin backend container..." -docker run -d --name ${BACKEND_CONTAINER} --network containers_default kennethreitz/httpbin +docker pull mccutchen/go-httpbin 2>/dev/null || true +docker run -d --name ${BACKEND_CONTAINER} --network containers_default -e PORT=80 mccutchen/go-httpbin log_info "Starting Charon container with ALL Cerberus features enabled..." docker run -d --name ${CONTAINER_NAME} \ @@ -210,12 +211,12 @@ done echo "" log_info "Waiting for httpbin backend to be ready..." -for i in {1..20}; do - if docker exec ${CONTAINER_NAME} sh -c "curl -sf http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then +for i in {1..45}; do + if docker exec ${CONTAINER_NAME} sh -c "wget -qO /dev/null http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then log_info "httpbin backend is ready" break fi - if [ $i -eq 20 ]; then + if [ $i -eq 45 ]; then log_error "httpbin backend failed to start" exit 1 fi diff --git a/scripts/coraza_integration.sh b/scripts/coraza_integration.sh index 8a71e7a9..87cabe6a 100755 --- a/scripts/coraza_integration.sh +++ b/scripts/coraza_integration.sh @@ -155,12 +155,12 @@ if ! docker network inspect containers_default >/dev/null 2>&1; then fi docker rm -f coraza-backend >/dev/null 2>&1 || true -docker run -d --name coraza-backend --network containers_default kennethreitz/httpbin +docker run -d --name coraza-backend --network containers_default -e PORT=80 mccutchen/go-httpbin echo "Waiting for httpbin backend to be ready..." for i in {1..20}; do # Check if container is running and has network connectivity - if docker exec charon-debug sh -c 'curl -s http://coraza-backend/get' >/dev/null 2>&1; then + if docker exec charon-debug sh -c 'wget -qO /dev/null http://coraza-backend/get' >/dev/null 2>&1; then echo "✓ httpbin backend is ready" break fi diff --git a/scripts/crowdsec_startup_test.sh b/scripts/crowdsec_startup_test.sh index cfeae241..99f7dd66 100755 --- a/scripts/crowdsec_startup_test.sh +++ b/scripts/crowdsec_startup_test.sh @@ -176,7 +176,7 @@ fi log_test "Check 2: CrowdSec LAPI health (127.0.0.1:8085/health)" # Use docker exec to check LAPI health from inside the container -LAPI_HEALTH=$(docker exec ${CONTAINER_NAME} curl -sf http://127.0.0.1:8085/health 2>/dev/null || echo "FAILED") +LAPI_HEALTH=$(docker exec ${CONTAINER_NAME} wget -qO - http://127.0.0.1:8085/health 2>/dev/null || echo "FAILED") if [ "$LAPI_HEALTH" != "FAILED" ] && [ -n "$LAPI_HEALTH" ]; then log_info " LAPI is healthy" diff --git a/scripts/diagnose-test-env.sh b/scripts/diagnose-test-env.sh index eeaaa4b4..a9336d00 100755 --- a/scripts/diagnose-test-env.sh +++ b/scripts/diagnose-test-env.sh @@ -101,7 +101,7 @@ echo "" # Check CrowdSec LAPI echo "7. CrowdSec LAPI Status:" -if docker exec charon-e2e curl -sf http://localhost:8090/health > /dev/null 2>&1; then +if docker exec charon-e2e wget -qO /dev/null http://localhost:8090/health 2>/dev/null; then echo -e " ${GREEN}✓${NC} CrowdSec LAPI is responding (port 8090)" else echo -e " ${YELLOW}⚠${NC} CrowdSec LAPI is not responding" diff --git a/scripts/pre-commit-hooks/semgrep-scan.sh b/scripts/pre-commit-hooks/semgrep-scan.sh index e49e360b..bbe3b0dc 100755 --- a/scripts/pre-commit-hooks/semgrep-scan.sh +++ b/scripts/pre-commit-hooks/semgrep-scan.sh @@ -15,17 +15,29 @@ fi cd "${REPO_ROOT}" -# Default to p/golang for speed (~30s vs 60-180s for auto). -# Override with: SEMGREP_CONFIG=auto git push -readonly SEMGREP_CONFIG_VALUE="${SEMGREP_CONFIG:-p/golang}" +# Default: full security ruleset covering Go backend, JS/TS/React frontend, secrets. +# Override with: SEMGREP_CONFIG=auto git commit (runs all Semgrep rules, ~3-5 min) +if [ -n "${SEMGREP_CONFIG:-}" ]; then + SEMGREP_CONFIGS=(--config "${SEMGREP_CONFIG}") + echo "Running Semgrep with override config: ${SEMGREP_CONFIG}" +else + SEMGREP_CONFIGS=( + --config p/golang + --config p/javascript + --config p/typescript + --config p/react + --config p/secrets + --config p/dockerfile + ) + echo "Running Semgrep with configs: p/golang, p/javascript, p/typescript, p/react, p/secrets, p/dockerfile" +fi -echo "Running Semgrep with config: ${SEMGREP_CONFIG_VALUE}" semgrep scan \ - --config "${SEMGREP_CONFIG_VALUE}" \ + "${SEMGREP_CONFIGS[@]}" \ --severity ERROR \ --severity WARNING \ --error \ --exclude "frontend/node_modules" \ --exclude "frontend/coverage" \ --exclude "frontend/dist" \ - backend frontend/src scripts .github/workflows + Dockerfile backend frontend/src scripts .github/workflows diff --git a/scripts/rate_limit_integration.sh b/scripts/rate_limit_integration.sh index 43f6d461..777033a1 100755 --- a/scripts/rate_limit_integration.sh +++ b/scripts/rate_limit_integration.sh @@ -35,14 +35,14 @@ TEST_DOMAIN="ratelimit.local" # Verifies rate limit handler is present in Caddy config verify_rate_limit_config() { local retries=10 - local wait=3 + local wait=5 echo "Verifying rate limit config in Caddy..." for i in $(seq 1 $retries); do # Fetch Caddy config via admin API local caddy_config - caddy_config=$(curl -s http://localhost:2119/config 2>/dev/null || echo "") + caddy_config=$(curl -s http://localhost:2119/config/ 2>/dev/null || echo "") if [ -z "$caddy_config" ]; then echo " Attempt $i/$retries: Caddy admin API not responding, retrying..." @@ -79,7 +79,7 @@ on_failure() { echo "" echo "=== Caddy Admin API Config ===" - curl -s http://localhost:2119/config 2>/dev/null | head -300 || echo "Could not retrieve Caddy config" + curl -s http://localhost:2119/config/ 2>/dev/null | head -300 || echo "Could not retrieve Caddy config" echo "" echo "=== Security Config in API ===" @@ -170,7 +170,7 @@ for i in {1..30}; do echo "✓ Charon API is ready" break fi - if [ $i -eq 30 ]; then + if [ "$i" -eq 30 ]; then echo "✗ Charon API failed to start" exit 1 fi @@ -183,15 +183,16 @@ done # ============================================================================ echo "" echo "Creating backend container for proxy host..." -docker run -d --name ${BACKEND_CONTAINER} --network containers_default kennethreitz/httpbin +docker pull mccutchen/go-httpbin 2>/dev/null || true +docker run -d --name ${BACKEND_CONTAINER} --network containers_default -e PORT=80 mccutchen/go-httpbin echo "Waiting for httpbin backend to be ready..." -for i in {1..20}; do - if docker exec ${CONTAINER_NAME} sh -c "curl -sf http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then +for i in {1..45}; do + if docker exec ${CONTAINER_NAME} sh -c "wget -qO /dev/null http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then echo "✓ httpbin backend is ready" break fi - if [ $i -eq 20 ]; then + if [ "$i" -eq 45 ]; then echo "✗ httpbin backend failed to start" exit 1 fi @@ -209,12 +210,16 @@ curl -s -X POST -H "Content-Type: application/json" \ -d '{"email":"ratelimit@example.local","password":"password123","name":"Rate Limit Tester"}' \ http://localhost:8280/api/v1/auth/register >/dev/null 2>&1 || true -curl -s -X POST -H "Content-Type: application/json" \ +LOGIN_STATUS=$(curl -s -w "\n%{http_code}" -X POST -H "Content-Type: application/json" \ -d '{"email":"ratelimit@example.local","password":"password123"}' \ - -c ${TMP_COOKIE} \ - http://localhost:8280/api/v1/auth/login >/dev/null + -c "${TMP_COOKIE}" \ + http://localhost:8280/api/v1/auth/login | tail -n1) -echo "✓ Authentication complete" +if [ "$LOGIN_STATUS" != "200" ]; then + echo "✗ Login failed (HTTP $LOGIN_STATUS) — aborting" + exit 1 +fi +echo "✓ Authentication complete (HTTP $LOGIN_STATUS)" # ============================================================================ # Step 5: Create proxy host @@ -235,14 +240,17 @@ EOF CREATE_RESP=$(curl -s -w "\n%{http_code}" -X POST -H "Content-Type: application/json" \ -d "${PROXY_HOST_PAYLOAD}" \ - -b ${TMP_COOKIE} \ + -b "${TMP_COOKIE}" \ http://localhost:8280/api/v1/proxy-hosts) CREATE_STATUS=$(echo "$CREATE_RESP" | tail -n1) if [ "$CREATE_STATUS" = "201" ]; then echo "✓ Proxy host created successfully" +elif [ "$CREATE_STATUS" = "401" ] || [ "$CREATE_STATUS" = "403" ]; then + echo "✗ Proxy host creation failed — authentication/authorization error (HTTP $CREATE_STATUS)" + exit 1 else - echo " Proxy host may already exist (status: $CREATE_STATUS)" + echo " Proxy host may already exist or was created (status: $CREATE_STATUS) — continuing" fi # ============================================================================ @@ -254,6 +262,7 @@ SEC_CFG_PAYLOAD=$(cat </dev/null +echo "Waiting for Caddy admin API to be ready..." +for i in {1..20}; do + if curl -s -f http://localhost:2119/config/ >/dev/null 2>&1; then + echo "✓ Caddy admin API is ready" + break + fi + if [ "$i" -eq 20 ]; then + echo "✗ Caddy admin API failed to become ready" + exit 1 + fi + echo -n '.' + sleep 1 +done -echo "✓ Rate limiting configured" +SEC_CONFIG_RESP=$(curl -s -w "\n%{http_code}" -X POST -H "Content-Type: application/json" \ + -d "${SEC_CFG_PAYLOAD}" \ + -b "${TMP_COOKIE}" \ + http://localhost:8280/api/v1/security/config) +SEC_CONFIG_STATUS=$(echo "$SEC_CONFIG_RESP" | tail -n1) +SEC_CONFIG_BODY=$(echo "$SEC_CONFIG_RESP" | head -n-1) + +if [ "$SEC_CONFIG_STATUS" != "200" ]; then + echo "✗ Security config update failed (HTTP $SEC_CONFIG_STATUS)" + echo " Response body: $SEC_CONFIG_BODY" + echo " Verify the auth cookie is valid and the user has the admin role." + exit 1 +fi +echo "✓ Rate limiting configured (HTTP $SEC_CONFIG_STATUS)" echo "Waiting for Caddy to apply configuration..." -sleep 5 +sleep 8 -# Verify rate limit handler is configured +# Verify rate limit handler is configured — this is a hard requirement if ! verify_rate_limit_config; then - echo "WARNING: Rate limit handler verification failed (Caddy may still be loading)" - echo "Proceeding with test anyway..." + echo "✗ Rate limit handler verification failed — aborting test" + echo " The handler must be present in Caddy config before enforcement can be tested." + echo "" + echo "=== Caddy admin API full config ===" + curl -s http://localhost:2119/config/ 2>/dev/null | head -200 || echo "Admin API not responding" + echo "" + echo "=== Security config from API ===" + curl -s -b "${TMP_COOKIE}" http://localhost:8280/api/v1/security/config 2>/dev/null || echo "API not responding" + exit 1 fi # ============================================================================ @@ -331,10 +369,10 @@ else echo " ✗ Expected HTTP 429, got HTTP $BLOCKED_STATUS" echo "" echo "=== DEBUG: SecurityConfig from API ===" - curl -s -b ${TMP_COOKIE} http://localhost:8280/api/v1/security/config | jq . + curl -s -b "${TMP_COOKIE}" http://localhost:8280/api/v1/security/config | jq . echo "" echo "=== DEBUG: SecurityStatus from API ===" - curl -s -b ${TMP_COOKIE} http://localhost:8280/api/v1/security/status | jq . + curl -s -b "${TMP_COOKIE}" http://localhost:8280/api/v1/security/status | jq . echo "" echo "=== DEBUG: Caddy config (first proxy route handlers) ===" curl -s http://localhost:2119/config/ | jq '.apps.http.servers.charon_server.routes[0].handle // []' @@ -389,12 +427,12 @@ echo "" # Remove test proxy host from database echo "Removing test proxy host from database..." -INTEGRATION_UUID=$(curl -s -b ${TMP_COOKIE} http://localhost:8280/api/v1/proxy-hosts | \ +INTEGRATION_UUID=$(curl -s -b "${TMP_COOKIE}" http://localhost:8280/api/v1/proxy-hosts | \ grep -o '"uuid":"[^"]*"[^}]*"domain_names":"'${TEST_DOMAIN}'"' | head -n1 | \ grep -o '"uuid":"[^"]*"' | sed 's/"uuid":"\([^"]*\)"/\1/') if [ -n "$INTEGRATION_UUID" ]; then - curl -s -X DELETE -b ${TMP_COOKIE} \ + curl -s -X DELETE -b "${TMP_COOKIE}" \ "http://localhost:8280/api/v1/proxy-hosts/${INTEGRATION_UUID}?delete_uptime=true" >/dev/null echo "✓ Deleted test proxy host ${INTEGRATION_UUID}" fi diff --git a/scripts/waf_integration.sh b/scripts/waf_integration.sh index e954852b..b9658a4a 100755 --- a/scripts/waf_integration.sh +++ b/scripts/waf_integration.sh @@ -163,7 +163,8 @@ if ! docker network inspect containers_default >/dev/null 2>&1; then fi log_info "Starting httpbin backend container..." -docker run -d --name ${BACKEND_CONTAINER} --network containers_default kennethreitz/httpbin +docker pull mccutchen/go-httpbin 2>/dev/null || true +docker run -d --name ${BACKEND_CONTAINER} --network containers_default -e PORT=80 mccutchen/go-httpbin log_info "Starting Charon container with Cerberus enabled..." docker run -d --name ${CONTAINER_NAME} \ @@ -201,12 +202,12 @@ done echo "" log_info "Waiting for httpbin backend to be ready..." -for i in {1..20}; do - if docker exec ${CONTAINER_NAME} sh -c "curl -sf http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then +for i in {1..45}; do + if docker exec ${CONTAINER_NAME} sh -c "wget -qO /dev/null http://${BACKEND_CONTAINER}/get" >/dev/null 2>&1; then log_info "httpbin backend is ready" break fi - if [ $i -eq 20 ]; then + if [ $i -eq 45 ]; then log_error "httpbin backend failed to start" exit 1 fi diff --git a/tests/README.md b/tests/README.md index 2a33d794..4b330c6c 100644 --- a/tests/README.md +++ b/tests/README.md @@ -3,6 +3,7 @@ **Playwright-based end-to-end tests for the Charon management interface.** Quick Links: + - 📖 [Complete Testing Documentation](../docs/testing/) - 📝 [E2E Test Writing Guide](../docs/testing/e2e-test-writing-guide.md) - 🐛 [Debugging Guide](../docs/testing/debugging-guide.md) @@ -109,6 +110,7 @@ await scriptPath.fill('/usr/local/bin/dns-challenge.sh'); ``` **Why**: Browsers handle label association differently. This helper provides 4-tier fallback: + 1. `getByLabel()` — Standard label association 2. `getByPlaceholder()` — Fallback to placeholder text 3. `locator('#id')` — Fallback to direct ID @@ -221,6 +223,7 @@ logger.error('Failed to load settings', new Error('Network timeout')); ### 1. Only Poll When State Changes ❌ **Before (Inefficient)**: + ```typescript test.beforeEach(async ({ page }) => { // Polls even if flags already correct @@ -234,6 +237,7 @@ test('Test', async ({ page }) => { ``` ✅ **After (Optimized)**: + ```typescript test.afterEach(async ({ request }) => { // Restore defaults once at end @@ -370,6 +374,7 @@ test('Start long task', async ({ page }) => { | `getByRole()` | Semantic locators | ✅ Chromium ✅ Firefox ✅ WebKit | **Avoid**: + - CSS selectors (brittle, browser-specific) - `{ force: true }` clicks (bypasses real user behavior) - `waitForTimeout()` (non-deterministic) @@ -385,6 +390,7 @@ test('Start long task', async ({ page }) => { **Cause**: Label matching differs between browsers. **Fix**: Use `getFormFieldByLabel()` with fallbacks: + ```typescript const field = getFormFieldByLabel(page, /field name/i, { placeholder: /enter value/i @@ -396,14 +402,17 @@ const field = getFormFieldByLabel(page, /field name/i, { **Symptom**: `Feature flag propagation timeout after 120 attempts` **Causes**: + 1. Config reload overlay stuck visible 2. Backend not updating flags 3. Database transaction not committed **Fix**: + 1. Check backend logs for PUT `/api/v1/feature-flags` errors 2. Check if overlay is stuck: `page.locator('[data-testid="config-reload-overlay"]').isVisible()` 3. Add retry wrapper: + ```typescript await retryAction(async () => { await clickSwitch(toggle); @@ -418,6 +427,7 @@ await retryAction(async () => { **Cause**: Config reload overlay or sticky header blocking interaction. **Fix**: Use `clickSwitch()` (handles overlay automatically): + ```typescript await clickSwitch(page.getByRole('switch', { name: /feature/i })); ``` diff --git a/tests/certificate-bulk-delete.spec.ts b/tests/certificate-bulk-delete.spec.ts new file mode 100644 index 00000000..4e318616 --- /dev/null +++ b/tests/certificate-bulk-delete.spec.ts @@ -0,0 +1,399 @@ +/** + * Certificate Bulk Delete E2E Tests + * + * Tests the bulk certificate deletion UX: + * - Checkbox column present for each deletable cert + * - No checkbox rendered for valid production LE certs + * - Selection toolbar appears with count and Delete button + * - Select-all header checkbox selects all seeded certs + * - Bulk delete dialog shows correct count + * - Cancel preserves all selected certs + * - Confirming bulk delete removes all selected certs from the table + * + * @see /projects/Charon/docs/plans/current_spec.md §4 Phase 5 + */ + +import { test, expect, loginUser } from './fixtures/auth-fixtures'; +import { request as playwrightRequest } from '@playwright/test'; +import { + waitForLoadingComplete, + waitForDialog, + waitForAPIResponse, + waitForToast, +} from './utils/wait-helpers'; +import { generateUniqueId } from './fixtures/test-data'; +import { getStorageStateAuthHeaders } from './utils/api-helpers'; +import { STORAGE_STATE } from './constants'; + +const CERTIFICATES_API = /\/api\/v1\/certificates/; + +/** + * Real self-signed certificate and key for upload tests. + * Generated via: openssl req -x509 -newkey rsa:2048 -nodes -days 365 -subj "/CN=test.local/O=TestOrg" + * The backend parses X.509 data, so placeholder PEM from fixtures won't work. + */ +const REAL_TEST_CERT = `-----BEGIN CERTIFICATE----- +MIIDLzCCAhegAwIBAgIUehGqwKI4zLvoZSNHlAuv7cJ0G5AwDQYJKoZIhvcNAQEL +BQAwJzETMBEGA1UEAwwKdGVzdC5sb2NhbDEQMA4GA1UECgwHVGVzdE9yZzAeFw0y +NjAzMjIwMzQyMDhaFw0yNzAzMjIwMzQyMDhaMCcxEzARBgNVBAMMCnRlc3QubG9j +YWwxEDAOBgNVBAoMB1Rlc3RPcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQDdzdQfOkHzG/lZ242xTvFYMVOrd12rUGQVcWhc9NG1LIJGYZKpS0bzNUdo +ylHhIqbwNq18Dni1znDYsOAlnfZR+gv84U4klRHGE7liNRixBA5ymZ6KI68sOwqx +bn6wpDZgNLnjD3POwSQoPEx2BAYwIyLPjXFjfnv5nce8Bt99j/zDVwhq24b9YdMR +BVV/sOBsAtNEuRngajA9+i2rmLVrXJSiSFhA/hR0wX6bICpFTtahYX7JqfzlMHFO +4lBka9sbC3xujwtFmLtkBovCzf69fA6p2qhJGVNJ9oHeFY3V2CdYq5Q8SZTsG1Yt +S0O/2A9ZkQmHezeG9DYeg68nLfJDAgMBAAGjUzBRMB0GA1UdDgQWBBRE+2+ss2yl +0vAmlccEC7MBWX6UmDAfBgNVHSMEGDAWgBRE+2+ss2yl0vAmlccEC7MBWX6UmDAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCvwsnSRYQ5PYtuhJ3v +YhKmjkg+NsojYItlo+UkJmq09LkIEwRqJwFLcDxhyHWqRL5Bpc1PA1VJAG6Pif8D +uwwNnXwZZf0P5e7exccSQZnI03OhS0c6/4kfvRSiFiT6BYTYSvQ+OWhpMIIcwhov +86muij2Y32E3F0aqOPjEB+cm/XauXzmFjXi7ig7cktphHcwT8zQn43yCG/BJfWe2 +bRLWqMy+jdr/x2Ij8eWPSlJD3zDxsQiLiO0hFzpQNHfz2Qe17K3dsuhNQ85h2s0w +zCLDm4WygKTw2foUXGNtbWG7z6Eq7PI+2fSlJDFgb+xmdIFQdyKDsZeYO5bmdYq5 +0tY8 +-----END CERTIFICATE-----`; + +const REAL_TEST_KEY = `-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDdzdQfOkHzG/lZ +242xTvFYMVOrd12rUGQVcWhc9NG1LIJGYZKpS0bzNUdoylHhIqbwNq18Dni1znDY +sOAlnfZR+gv84U4klRHGE7liNRixBA5ymZ6KI68sOwqxbn6wpDZgNLnjD3POwSQo +PEx2BAYwIyLPjXFjfnv5nce8Bt99j/zDVwhq24b9YdMRBVV/sOBsAtNEuRngajA9 ++i2rmLVrXJSiSFhA/hR0wX6bICpFTtahYX7JqfzlMHFO4lBka9sbC3xujwtFmLtk +BovCzf69fA6p2qhJGVNJ9oHeFY3V2CdYq5Q8SZTsG1YtS0O/2A9ZkQmHezeG9DYe +g68nLfJDAgMBAAECggEAA8uIcZsBkzNLVOpDcQvfZ+7ldkLt61x4xJUoKqRVt4/c +usTjSYTsNdps2lzRLH+h85eRPaonDpVLAP97FlRZk+rUrFhT30mzACdI6LvtLDox +imxudgFI91dwm2Xp7QPM77XMkxdUl+5eEVeBchN84kiiSS2BCdQZiEUsLF9sZi2P +A5+x6XHImE+Sqfm/xVOZzHjj7ObHxc3bUpDT+RvRDvEBGjtEUlCCWuKvLi3DWIBF +T9E38f0hqoxKwc7gsZCZs7phoVm9a3xjQ8Xh3ONLa30aBsJii33KHHxSASc7hMy1 +cM6GaGcg4xgqFw3B677KWUMc3Ur5YdLu71Bw7MFc4QKBgQD9FyRoWcTEktPdvH9y +o7yxRVWcSs5c47h5X9rhcKvUCyEzQ/89Gt1d8e/qMv9JxXmcg3AS8VYeFmzyyMta +iKTrHYnA8iRgM6CHvgSD4+vc7niW1de7qxW3T6MrGA4AEoQOPUvd6ZljBPIqxV8h +jw9BW5YREZV6fXqqVOVT4GMrbQKBgQDgWpvmu1FY65TjoDljOPBtO17krwaWzb/D +jlXQgZgRJVD7kaUPhm7Kb2d7P7t34LgzGH63hF82PlXqtwd5QhB3EZP9mhZTbXxK +vwLf+H44ANDlcZiyDG9OJBT6ND5/JP0jHEt/KsP9pcd9xbZWNEZZFzddbbcp1G/v +ue6p18XWbwKBgQCmdm8y10BNToldQVrOKxWzvve1CZq7i+fMpRhQyQurNvrKPkIF +jcLlxHhZINu6SNFY+TZgry1GMtfLw/fEfzWBkvcE2f7E64/9WCSeHu4GbS8Rfmsb +e0aYQCAA+xxSPdtvhi99MOT7NMiXCyQr7W1KPpPwfBFF9HwWxinjxiVT7QKBgFAb +Ch9QMrN1Kiw8QUFUS0Q1NqSgedHOlPHWGH3iR9GXaVrpne31KgnNzT0MfHtJGXvk ++xm7geN0TmkIAPsiw45AEH80TVRsezyVBwnBSA/m+q9x5/tqxTM5XuQXU1lCc7/d +kndNZb1jO9+EgJ42/AdDatlJG2UsHOuTj8vE5zaxAoGBAPthB+5YZfu3de+vnfpa +o0oFy++FeeHUTxor2605Lit9ZfEvDTe1/iPQw5TNOLjwx0CdsrCxWk5Tyz50aA30 +KfVperc+m+vEVXIPI1qluI0iTPcHd/lMQYCsu6tKWmFP/hAFTIy7rOHMHfPx3RzK +yRNV1UrzJGv5ZUVKq2kymBut +-----END PRIVATE KEY-----`; + +/** + * Create a custom certificate directly via the API, bypassing TestDataManager's + * narrow CertificateData type which omits the required `name` field. + * Returns the numeric cert ID (from list endpoint) and name for later lookup/cleanup. + */ +async function createCustomCertViaAPI(baseURL: string): Promise<{ id: number; certName: string }> { + const id = generateUniqueId(); + const certName = `bulk-cert-${id}`; + + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + extraHTTPHeaders: getStorageStateAuthHeaders(), + }); + + try { + const response = await ctx.post('/api/v1/certificates', { + multipart: { + name: certName, + certificate_file: { + name: 'cert.pem', + mimeType: 'application/x-pem-file', + buffer: Buffer.from(REAL_TEST_CERT), + }, + key_file: { + name: 'key.pem', + mimeType: 'application/x-pem-file', + buffer: Buffer.from(REAL_TEST_KEY), + }, + }, + }); + + if (!response.ok()) { + throw new Error(`Failed to create certificate: ${response.status()} ${await response.text()}`); + } + + const createResult = await response.json(); + const certUUID: string = createResult.uuid; + + // The create response excludes the numeric ID (json:"-" on model). + // Query the list endpoint and match by UUID to get the numeric ID. + const listResponse = await ctx.get('/api/v1/certificates'); + if (!listResponse.ok()) { + throw new Error(`Failed to list certificates: ${listResponse.status()}`); + } + const certs: Array<{ id: number; uuid: string }> = await listResponse.json(); + const match = certs.find((c) => c.uuid === certUUID); + if (!match) { + throw new Error(`Certificate with UUID ${certUUID} not found in list after creation`); + } + + return { id: match.id, certName }; + } finally { + await ctx.dispose(); + } +} + +/** + * Delete a certificate directly via the API for cleanup. + */ +async function deleteCertViaAPI(baseURL: string, certId: number): Promise { + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + extraHTTPHeaders: getStorageStateAuthHeaders(), + }); + + try { + await ctx.delete(`/api/v1/certificates/${certId}`); + } finally { + await ctx.dispose(); + } +} + +/** + * Navigate to the certificates page and wait for data to load. + */ +async function navigateToCertificates(page: import('@playwright/test').Page): Promise { + const certsResponse = waitForAPIResponse(page, CERTIFICATES_API); + await page.goto('/certificates'); + await certsResponse; + await waitForLoadingComplete(page); +} + +// serial mode: tests share createdCerts[] state via beforeAll/afterAll; +// parallelising across workers would give each worker its own isolated array. +test.describe.serial('Certificate Bulk Delete', () => { + const baseURL = process.env.PLAYWRIGHT_BASE_URL || 'http://127.0.0.1:8080'; + const createdCerts: Array<{ id: number; certName: string }> = []; + + test.beforeAll(async () => { + for (let i = 0; i < 3; i++) { + const cert = await createCustomCertViaAPI(baseURL); + createdCerts.push(cert); + } + }); + + test.afterAll(async () => { + // .catch(() => {}) handles certs already deleted by test 7 + for (const cert of createdCerts) { + await deleteCertViaAPI(baseURL, cert.id).catch(() => {}); + } + }); + + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + await navigateToCertificates(page); + }); + + // --------------------------------------------------------------------------- + // Scenario 1: Checkbox column present for each deletable (custom) cert + // --------------------------------------------------------------------------- + test('Checkbox column present — checkboxes appear for each deletable cert', async ({ page }) => { + await test.step('Verify each seeded cert row has a selectable checkbox', async () => { + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + + const checkbox = row.getByRole('checkbox', { + name: new RegExp(`Select certificate ${certName}`, 'i'), + }); + await expect(checkbox).toBeVisible(); + await expect(checkbox).toBeEnabled(); + } + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 2: Valid production LE cert row has no checkbox rendered + // --------------------------------------------------------------------------- + test('No checkbox for valid LE — valid production LE cert row has no checkbox', async ({ page }) => { + await test.step('Find valid production LE cert rows and verify no checkbox', async () => { + const leRows = page.getByRole('row').filter({ hasText: /let.*encrypt/i }); + const leCount = await leRows.count(); + + if (leCount === 0) { + test.skip(true, 'No Let\'s Encrypt certificates present in this environment'); + return; + } + + for (let i = 0; i < leCount; i++) { + const row = leRows.nth(i); + const rowText = await row.textContent(); + const isExpiredOrStaging = /expired|staging/i.test(rowText ?? ''); + if (isExpiredOrStaging) continue; + + // Valid production LE cert: first cell is aria-hidden with no checkbox + const firstCell = row.locator('td').first(); + await expect(firstCell).toHaveAttribute('aria-hidden', 'true'); + await expect(row.getByRole('checkbox')).toHaveCount(0); + } + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 3: Select one → toolbar appears with count and Delete button + // --------------------------------------------------------------------------- + test('Select one — checking one cert shows count and Delete button in toolbar', async ({ page }) => { + const { certName } = createdCerts[0]; + + await test.step('Click checkbox for first seeded cert', async () => { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + const checkbox = row.getByRole('checkbox', { + name: new RegExp(`Select certificate ${certName}`, 'i'), + }); + await checkbox.click(); + }); + + await test.step('Verify toolbar appears with count 1 and bulk Delete button', async () => { + const toolbar = page.getByRole('status').filter({ hasText: /selected/i }); + await expect(toolbar).toBeVisible(); + await expect(toolbar).toContainText('1 certificate(s) selected'); + + const bulkDeleteBtn = toolbar.getByRole('button', { name: /Delete \d+ Certificate/i }); + await expect(bulkDeleteBtn).toBeVisible(); + await expect(bulkDeleteBtn).toBeEnabled(); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 4: Select-all → header checkbox selects all seeded certs + // --------------------------------------------------------------------------- + test('Select-all — header checkbox selects all seeded certs; toolbar shows count', async ({ page }) => { + await test.step('Click the select-all header checkbox', async () => { + const selectAllCheckbox = page.getByRole('checkbox', { + name: /Select all deletable certificates/i, + }); + await expect(selectAllCheckbox).toBeVisible({ timeout: 10000 }); + await selectAllCheckbox.click(); + }); + + await test.step('Verify all seeded cert row checkboxes are checked', async () => { + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + const checkbox = row.getByRole('checkbox'); + await expect(checkbox).toBeChecked(); + } + }); + + await test.step('Verify toolbar is visible with bulk Delete button', async () => { + const toolbar = page.getByRole('status').filter({ hasText: /selected/i }); + await expect(toolbar).toBeVisible(); + const bulkDeleteBtn = toolbar.getByRole('button', { name: /Delete \d+ Certificate/i }); + await expect(bulkDeleteBtn).toBeVisible(); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 5: Dialog shows correct count ("Delete 3 Certificate(s)") + // --------------------------------------------------------------------------- + test('Dialog shows correct count — bulk dialog shows "Delete 3 Certificate(s)" for 3 selected', async ({ page }) => { + await test.step('Select each of the 3 seeded certs individually', async () => { + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + const checkbox = row.getByRole('checkbox', { + name: new RegExp(`Select certificate ${certName}`, 'i'), + }); + await checkbox.click(); + } + }); + + await test.step('Click the bulk Delete button in the toolbar', async () => { + const toolbar = page.getByRole('status').filter({ hasText: /selected/i }); + await expect(toolbar).toBeVisible(); + const bulkDeleteBtn = toolbar.getByRole('button', { name: /Delete \d+ Certificate/i }); + await bulkDeleteBtn.click(); + }); + + await test.step('Verify dialog title shows "Delete 3 Certificate(s)"', async () => { + const dialog = await waitForDialog(page); + await expect(dialog).toBeVisible(); + await expect(dialog).toContainText('Delete 3 Certificate(s)'); + }); + + await test.step('Cancel the dialog to preserve certs for subsequent tests', async () => { + const dialog = page.getByRole('dialog'); + await dialog.getByRole('button', { name: /cancel/i }).click(); + await expect(dialog).not.toBeVisible({ timeout: 5000 }); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 6: Cancel preserves all selected certs in the list + // --------------------------------------------------------------------------- + test('Cancel preserves certs — cancelling bulk dialog leaves all certs in list', async ({ page }) => { + await test.step('Select all 3 seeded certs and open bulk delete dialog', async () => { + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + await row.getByRole('checkbox', { + name: new RegExp(`Select certificate ${certName}`, 'i'), + }).click(); + } + const toolbar = page.getByRole('status').filter({ hasText: /selected/i }); + await toolbar.getByRole('button', { name: /Delete \d+ Certificate/i }).click(); + }); + + await test.step('Click Cancel in the bulk delete dialog', async () => { + const dialog = await waitForDialog(page); + await expect(dialog).toBeVisible(); + await dialog.getByRole('button', { name: /cancel/i }).click(); + }); + + await test.step('Verify dialog is closed and all 3 certs remain in the list', async () => { + await expect(page.getByRole('dialog')).not.toBeVisible({ timeout: 5000 }); + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 5000 }); + } + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 7: Confirming bulk delete removes all selected certs from the table + // --------------------------------------------------------------------------- + test('Confirm deletes all selected — bulk delete removes all selected certs', async ({ page }) => { + await test.step('Select all 3 seeded certs and open bulk delete dialog', async () => { + for (const { certName } of createdCerts) { + const row = page.getByRole('row').filter({ hasText: certName }); + await expect(row).toBeVisible({ timeout: 10000 }); + await row.getByRole('checkbox', { + name: new RegExp(`Select certificate ${certName}`, 'i'), + }).click(); + } + const toolbar = page.getByRole('status').filter({ hasText: /selected/i }); + await toolbar.getByRole('button', { name: /Delete \d+ Certificate/i }).click(); + }); + + await test.step('Confirm bulk deletion', async () => { + const dialog = await waitForDialog(page); + await expect(dialog).toBeVisible(); + const confirmBtn = dialog.getByRole('button', { name: /Delete \d+ Certificate/i }); + await expect(confirmBtn).toBeVisible(); + await expect(confirmBtn).toBeEnabled(); + await confirmBtn.click(); + }); + + await test.step('Await success toast confirming all deletions settled', async () => { + // toast.success fires in onSuccess after Promise.allSettled resolves + await waitForToast(page, /certificate.*deleted/i, { type: 'success' }); + }); + + await test.step('Verify all 3 certs are removed from the table', async () => { + for (const { certName } of createdCerts) { + await expect( + page.getByRole('row').filter({ hasText: certName }) + ).toHaveCount(0, { timeout: 10000 }); + } + }); + }); +}); diff --git a/tests/certificate-delete.spec.ts b/tests/certificate-delete.spec.ts new file mode 100644 index 00000000..4882742f --- /dev/null +++ b/tests/certificate-delete.spec.ts @@ -0,0 +1,486 @@ +/** + * Certificate Deletion E2E Tests + * + * Tests the certificate deletion UX enhancement: + * - Delete button visibility based on cert type, status, and in-use state + * - Accessible confirmation dialog (replaces native confirm()) + * - Cancel/confirm flows + * - Disabled button with tooltip for in-use certs + * - No delete button for valid production LE certs + * + * @see /projects/Charon/docs/plans/current_spec.md + */ + +import { test, expect, loginUser } from './fixtures/auth-fixtures'; +import { request as playwrightRequest } from '@playwright/test'; +import { + waitForLoadingComplete, + waitForDialog, + waitForAPIResponse, +} from './utils/wait-helpers'; +import { generateUniqueId } from './fixtures/test-data'; +import { STORAGE_STATE } from './constants'; + +const CERTIFICATES_API = /\/api\/v1\/certificates/; + +/** + * Real self-signed certificate and key for upload tests. + * Generated via: openssl req -x509 -newkey rsa:2048 -nodes -days 365 -subj "/CN=test.local/O=TestOrg" + * The backend parses X.509 data, so placeholder PEM from fixtures won't work. + */ +const REAL_TEST_CERT = `-----BEGIN CERTIFICATE----- +MIIDLzCCAhegAwIBAgIUehGqwKI4zLvoZSNHlAuv7cJ0G5AwDQYJKoZIhvcNAQEL +BQAwJzETMBEGA1UEAwwKdGVzdC5sb2NhbDEQMA4GA1UECgwHVGVzdE9yZzAeFw0y +NjAzMjIwMzQyMDhaFw0yNzAzMjIwMzQyMDhaMCcxEzARBgNVBAMMCnRlc3QubG9j +YWwxEDAOBgNVBAoMB1Rlc3RPcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQDdzdQfOkHzG/lZ242xTvFYMVOrd12rUGQVcWhc9NG1LIJGYZKpS0bzNUdo +ylHhIqbwNq18Dni1znDYsOAlnfZR+gv84U4klRHGE7liNRixBA5ymZ6KI68sOwqx +bn6wpDZgNLnjD3POwSQoPEx2BAYwIyLPjXFjfnv5nce8Bt99j/zDVwhq24b9YdMR +BVV/sOBsAtNEuRngajA9+i2rmLVrXJSiSFhA/hR0wX6bICpFTtahYX7JqfzlMHFO +4lBka9sbC3xujwtFmLtkBovCzf69fA6p2qhJGVNJ9oHeFY3V2CdYq5Q8SZTsG1Yt +S0O/2A9ZkQmHezeG9DYeg68nLfJDAgMBAAGjUzBRMB0GA1UdDgQWBBRE+2+ss2yl +0vAmlccEC7MBWX6UmDAfBgNVHSMEGDAWgBRE+2+ss2yl0vAmlccEC7MBWX6UmDAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCvwsnSRYQ5PYtuhJ3v +YhKmjkg+NsojYItlo+UkJmq09LkIEwRqJwFLcDxhyHWqRL5Bpc1PA1VJAG6Pif8D +uwwNnXwZZf0P5e7exccSQZnI03OhS0c6/4kfvRSiFiT6BYTYSvQ+OWhpMIIcwhov +86muij2Y32E3F0aqOPjEB+cm/XauXzmFjXi7ig7cktphHcwT8zQn43yCG/BJfWe2 +bRLWqMy+jdr/x2Ij8eWPSlJD3zDxsQiLiO0hFzpQNHfz2Qe17K3dsuhNQ85h2s0w +zCLDm4WygKTw2foUXGNtbWG7z6Eq7PI+2fSlJDFgb+xmdIFQdyKDsZeYO5bmdYq5 +0tY8 +-----END CERTIFICATE-----`; + +const REAL_TEST_KEY = `-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDdzdQfOkHzG/lZ +242xTvFYMVOrd12rUGQVcWhc9NG1LIJGYZKpS0bzNUdoylHhIqbwNq18Dni1znDY +sOAlnfZR+gv84U4klRHGE7liNRixBA5ymZ6KI68sOwqxbn6wpDZgNLnjD3POwSQo +PEx2BAYwIyLPjXFjfnv5nce8Bt99j/zDVwhq24b9YdMRBVV/sOBsAtNEuRngajA9 ++i2rmLVrXJSiSFhA/hR0wX6bICpFTtahYX7JqfzlMHFO4lBka9sbC3xujwtFmLtk +BovCzf69fA6p2qhJGVNJ9oHeFY3V2CdYq5Q8SZTsG1YtS0O/2A9ZkQmHezeG9DYe +g68nLfJDAgMBAAECggEAA8uIcZsBkzNLVOpDcQvfZ+7ldkLt61x4xJUoKqRVt4/c +usTjSYTsNdps2lzRLH+h85eRPaonDpVLAP97FlRZk+rUrFhT30mzACdI6LvtLDox +imxudgFI91dwm2Xp7QPM77XMkxdUl+5eEVeBchN84kiiSS2BCdQZiEUsLF9sZi2P +A5+x6XHImE+Sqfm/xVOZzHjj7ObHxc3bUpDT+RvRDvEBGjtEUlCCWuKvLi3DWIBF +T9E38f0hqoxKwc7gsZCZs7phoVm9a3xjQ8Xh3ONLa30aBsJii33KHHxSASc7hMy1 +cM6GaGcg4xgqFw3B677KWUMc3Ur5YdLu71Bw7MFc4QKBgQD9FyRoWcTEktPdvH9y +o7yxRVWcSs5c47h5X9rhcKvUCyEzQ/89Gt1d8e/qMv9JxXmcg3AS8VYeFmzyyMta +iKTrHYnA8iRgM6CHvgSD4+vc7niW1de7qxW3T6MrGA4AEoQOPUvd6ZljBPIqxV8h +jw9BW5YREZV6fXqqVOVT4GMrbQKBgQDgWpvmu1FY65TjoDljOPBtO17krwaWzb/D +jlXQgZgRJVD7kaUPhm7Kb2d7P7t34LgzGH63hF82PlXqtwd5QhB3EZP9mhZTbXxK +vwLf+H44ANDlcZiyDG9OJBT6ND5/JP0jHEt/KsP9pcd9xbZWNEZZFzddbbcp1G/v +ue6p18XWbwKBgQCmdm8y10BNToldQVrOKxWzvve1CZq7i+fMpRhQyQurNvrKPkIF +jcLlxHhZINu6SNFY+TZgry1GMtfLw/fEfzWBkvcE2f7E64/9WCSeHu4GbS8Rfmsb +e0aYQCAA+xxSPdtvhi99MOT7NMiXCyQr7W1KPpPwfBFF9HwWxinjxiVT7QKBgFAb +Ch9QMrN1Kiw8QUFUS0Q1NqSgedHOlPHWGH3iR9GXaVrpne31KgnNzT0MfHtJGXvk ++xm7geN0TmkIAPsiw45AEH80TVRsezyVBwnBSA/m+q9x5/tqxTM5XuQXU1lCc7/d +kndNZb1jO9+EgJ42/AdDatlJG2UsHOuTj8vE5zaxAoGBAPthB+5YZfu3de+vnfpa +o0oFy++FeeHUTxor2605Lit9ZfEvDTe1/iPQw5TNOLjwx0CdsrCxWk5Tyz50aA30 +KfVperc+m+vEVXIPI1qluI0iTPcHd/lMQYCsu6tKWmFP/hAFTIy7rOHMHfPx3RzK +yRNV1UrzJGv5ZUVKq2kymBut +-----END PRIVATE KEY-----`; + +/** + * Create a custom certificate directly via the API, bypassing TestDataManager's + * narrow CertificateData type which omits the required `name` field. + * Returns the numeric cert ID (from list endpoint) and name for later lookup/cleanup. + * + * Note: The POST response excludes the numeric `id` (model uses json:"-"), + * so we query the list endpoint to resolve the numeric ID by matching on UUID. + */ +async function createCustomCertViaAPI(baseURL: string): Promise<{ id: number; certName: string }> { + const id = generateUniqueId(); + const certName = `test-cert-${id}`; + + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + }); + + try { + const response = await ctx.post('/api/v1/certificates', { + multipart: { + name: certName, + certificate_file: { + name: 'cert.pem', + mimeType: 'application/x-pem-file', + buffer: Buffer.from(REAL_TEST_CERT), + }, + key_file: { + name: 'key.pem', + mimeType: 'application/x-pem-file', + buffer: Buffer.from(REAL_TEST_KEY), + }, + }, + }); + + if (!response.ok()) { + throw new Error(`Failed to create certificate: ${response.status()} ${await response.text()}`); + } + + const createResult = await response.json(); + const certUUID: string = createResult.uuid; + + // The create response excludes the numeric ID (json:"-" on model). + // Query the list endpoint and match by UUID to get the numeric ID. + const listResponse = await ctx.get('/api/v1/certificates'); + if (!listResponse.ok()) { + throw new Error(`Failed to list certificates: ${listResponse.status()}`); + } + const certs: Array<{ id: number; uuid: string }> = await listResponse.json(); + const match = certs.find((c) => c.uuid === certUUID); + if (!match) { + throw new Error(`Certificate with UUID ${certUUID} not found in list after creation`); + } + + return { id: match.id, certName }; + } finally { + await ctx.dispose(); + } +} + +/** + * Delete a certificate directly via the API for cleanup. + */ +async function deleteCertViaAPI(baseURL: string, certId: number): Promise { + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + }); + + try { + await ctx.delete(`/api/v1/certificates/${certId}`); + } finally { + await ctx.dispose(); + } +} + +/** + * Create a proxy host linked to a certificate via direct API. + * Returns the proxy host ID for cleanup. + */ +async function createProxyHostWithCertViaAPI( + baseURL: string, + certificateId: number +): Promise<{ id: string }> { + const id = generateUniqueId(); + const domain = `proxy-${id}.test.local`; + + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + }); + + try { + const response = await ctx.post('/api/v1/proxy-hosts', { + data: { + domain_names: domain, + forward_host: '127.0.0.1', + forward_port: 3000, + forward_scheme: 'https', + certificate_id: certificateId, + }, + }); + + if (!response.ok()) { + throw new Error(`Failed to create proxy host: ${response.status()} ${await response.text()}`); + } + + const result = await response.json(); + return { id: result.id }; + } finally { + await ctx.dispose(); + } +} + +/** + * Delete a proxy host via API for cleanup. + */ +async function deleteProxyHostViaAPI(baseURL: string, hostId: string): Promise { + const ctx = await playwrightRequest.newContext({ + baseURL, + storageState: STORAGE_STATE, + }); + + try { + await ctx.delete(`/api/v1/proxy-hosts/${hostId}`); + } finally { + await ctx.dispose(); + } +} + +/** + * Navigate to the certificates page and wait for data to load + */ +async function navigateToCertificates(page: import('@playwright/test').Page): Promise { + const certsResponse = waitForAPIResponse(page, CERTIFICATES_API); + await page.goto('/certificates'); + await certsResponse; + await waitForLoadingComplete(page); +} + +test.describe('Certificate Deletion', () => { + const baseURL = process.env.PLAYWRIGHT_BASE_URL || 'http://127.0.0.1:8080'; + const createdCertIds: number[] = []; + + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + }); + + test.afterAll(async () => { + // Clean up any certs created during tests that weren't deleted by the tests + for (const certId of createdCertIds) { + await deleteCertViaAPI(baseURL, certId).catch(() => {}); + } + }); + + // --------------------------------------------------------------------------- + // Scenario 1: Certificates page loads and shows certificate list + // --------------------------------------------------------------------------- + test('should display certificates page with heading and list', async ({ page }) => { + await test.step('Navigate to certificates page', async () => { + await navigateToCertificates(page); + }); + + await test.step('Verify page heading is visible', async () => { + const heading = page.getByRole('heading', { name: /certificates/i }); + await expect(heading).toBeVisible(); + }); + + await test.step('Verify certificate list or empty state is present', async () => { + const table = page.getByRole('table'); + const emptyState = page.getByText(/no.*certificates/i); + + await expect(async () => { + const hasTable = (await table.count()) > 0 && (await table.first().isVisible()); + const hasEmpty = (await emptyState.count()) > 0 && (await emptyState.first().isVisible()); + expect(hasTable || hasEmpty).toBeTruthy(); + }).toPass({ timeout: 10000 }); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 2: Custom cert not in use shows delete button + // --------------------------------------------------------------------------- + test('should show delete button for custom cert not in use', async ({ page }) => { + let certName: string; + + await test.step('Seed a custom certificate via API', async () => { + const result = await createCustomCertViaAPI(baseURL); + createdCertIds.push(result.id); + certName = result.certName; + }); + + await test.step('Navigate to certificates page', async () => { + await navigateToCertificates(page); + }); + + await test.step('Verify delete button is visible for the custom cert', async () => { + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible({ timeout: 10000 }); + + const deleteButton = certRow.getByRole('button', { name: /delete/i }); + await expect(deleteButton).toBeVisible(); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 3: Delete button opens confirmation dialog + // --------------------------------------------------------------------------- + test('should open confirmation dialog when delete button is clicked', async ({ page }) => { + let certName: string; + + await test.step('Seed a custom certificate via API', async () => { + const result = await createCustomCertViaAPI(baseURL); + createdCertIds.push(result.id); + certName = result.certName; + }); + + await test.step('Navigate to certificates page', async () => { + await navigateToCertificates(page); + }); + + await test.step('Click the delete button', async () => { + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible({ timeout: 10000 }); + const deleteButton = certRow.getByRole('button', { name: /delete/i }); + await deleteButton.click(); + }); + + await test.step('Verify confirmation dialog is visible', async () => { + const dialog = await waitForDialog(page); + await expect(dialog).toBeVisible(); + + await expect(dialog.getByText(/Delete Certificate/)).toBeVisible(); + await expect(dialog.getByRole('button', { name: /Cancel/i })).toBeVisible(); + await expect(dialog.getByRole('button', { name: /^Delete$/i })).toBeVisible(); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 4: Cancel closes dialog without deleting + // --------------------------------------------------------------------------- + test('should close dialog and keep cert when Cancel is clicked', async ({ page }) => { + let certName: string; + + await test.step('Seed a custom certificate via API', async () => { + const result = await createCustomCertViaAPI(baseURL); + createdCertIds.push(result.id); + certName = result.certName; + }); + + await test.step('Navigate to certificates and open delete dialog', async () => { + await navigateToCertificates(page); + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible({ timeout: 10000 }); + const deleteButton = certRow.getByRole('button', { name: /delete/i }); + await deleteButton.click(); + await waitForDialog(page); + }); + + await test.step('Click Cancel button', async () => { + const dialog = page.getByRole('dialog'); + const cancelButton = dialog.getByRole('button', { name: /cancel/i }); + await cancelButton.click(); + }); + + await test.step('Verify dialog is closed and cert still exists', async () => { + await expect(page.getByRole('dialog')).not.toBeVisible({ timeout: 5000 }); + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible(); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 5: Successful deletion removes cert from list + // --------------------------------------------------------------------------- + test('should delete cert and show success toast on confirm', async ({ page }) => { + let certName: string; + + await test.step('Seed a custom certificate via API', async () => { + const result = await createCustomCertViaAPI(baseURL); + // Don't push to createdCertIds — this test will delete it via UI + certName = result.certName; + }); + + await test.step('Navigate to certificates and open delete dialog', async () => { + await navigateToCertificates(page); + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible({ timeout: 10000 }); + const deleteButton = certRow.getByRole('button', { name: /Delete Certificate/i }); + await deleteButton.click(); + await waitForDialog(page); + }); + + await test.step('Confirm deletion and verify cert is removed', async () => { + const dialog = page.getByRole('dialog'); + await expect(dialog).toBeVisible(); + + // Wait for the dialog's confirm Delete button + const confirmDeleteButton = dialog.getByRole('button', { name: /^Delete$/i }); + await expect(confirmDeleteButton).toBeVisible(); + await expect(confirmDeleteButton).toBeEnabled(); + + // Click confirm and wait for the DELETE API response simultaneously + const [deleteResponse] = await Promise.all([ + page.waitForResponse( + (resp) => resp.url().includes('/api/v1/certificates/') && resp.request().method() === 'DELETE', + { timeout: 15000 } + ), + confirmDeleteButton.click(), + ]); + + // Verify the API call succeeded + expect(deleteResponse.status()).toBeLessThan(400); + + // Verify the cert row is removed from the list + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toHaveCount(0, { timeout: 10000 }); + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 6: In-use cert shows disabled delete button with tooltip + // --------------------------------------------------------------------------- + test('should show disabled delete button with tooltip for in-use cert', async ({ + page, + }) => { + let certName: string; + let proxyHostId: string; + + await test.step('Seed a custom cert and attach it to a proxy host', async () => { + const certResult = await createCustomCertViaAPI(baseURL); + createdCertIds.push(certResult.id); + certName = certResult.certName; + + // Create a proxy host that references this certificate via certificate_id + const proxyResult = await createProxyHostWithCertViaAPI(baseURL, certResult.id); + proxyHostId = proxyResult.id; + }); + + await test.step('Navigate to certificates page', async () => { + await navigateToCertificates(page); + }); + + await test.step('Verify delete button is disabled for the in-use cert', async () => { + const certRow = page.getByRole('row').filter({ hasText: certName }); + await expect(certRow).toBeVisible({ timeout: 10000 }); + + const deleteButton = certRow.getByRole('button', { name: /Delete Certificate/i }); + await expect(deleteButton).toBeVisible(); + await expect(deleteButton).toHaveAttribute('aria-disabled', 'true'); + }); + + await test.step('Verify tooltip on hover', async () => { + const certRow = page.getByRole('row').filter({ hasText: certName }); + const deleteButton = certRow.getByRole('button', { name: /Delete Certificate/i }); + + await deleteButton.hover(); + + const tooltip = page.getByRole('tooltip').or( + page.getByText(/cannot delete/i) + ); + await expect(tooltip.first()).toBeVisible({ timeout: 5000 }); + }); + + // Cleanup: delete proxy host first (so cert can be cleaned up), then cert + await test.step('Cleanup proxy host', async () => { + if (proxyHostId) { + await deleteProxyHostViaAPI(baseURL, proxyHostId).catch(() => {}); + } + }); + }); + + // --------------------------------------------------------------------------- + // Scenario 7: Valid production LE cert not in use has no delete button + // --------------------------------------------------------------------------- + test('should not show delete button for valid production LE cert', async ({ page }) => { + await test.step('Navigate to certificates page', async () => { + await navigateToCertificates(page); + }); + + await test.step('Check for valid production LE certs', async () => { + const leCertRows = page + .getByRole('row') + .filter({ hasText: /let.*encrypt/i }); + + const leCount = await leCertRows.count(); + if (leCount === 0) { + test.skip(true, 'No Let\'s Encrypt certificates present in this environment to verify'); + return; + } + + for (let i = 0; i < leCount; i++) { + const row = leCertRows.nth(i); + const rowText = await row.textContent(); + + // Skip expired LE certs — they ARE expected to have a delete button + const isExpired = /expired/i.test(rowText ?? ''); + if (isExpired) continue; + + // Valid production LE cert should NOT have a delete button + const deleteButton = row.getByRole('button', { name: /delete/i }); + await expect(deleteButton).toHaveCount(0); + } + }); + }); +}); diff --git a/tests/core/caddy-import/caddy-import-gaps.spec.ts b/tests/core/caddy-import/caddy-import-gaps.spec.ts index ac64fe9c..06b1d61d 100644 --- a/tests/core/caddy-import/caddy-import-gaps.spec.ts +++ b/tests/core/caddy-import/caddy-import-gaps.spec.ts @@ -19,7 +19,7 @@ import { test, expect, type TestUser } from '../../fixtures/auth-fixtures'; import type { TestDataManager } from '../../utils/TestDataManager'; import type { Page } from '@playwright/test'; -import { ensureAuthenticatedImportFormReady, ensureImportFormReady, resetImportSession } from './import-page-helpers'; +import { ensureAuthenticatedImportFormReady, ensureImportFormReady, getStoredAuthHeader, resetImportSession } from './import-page-helpers'; /** * Helper: Generate unique domain with namespace isolation @@ -328,7 +328,7 @@ test.describe('Caddy Import Gap Coverage @caddy-import-gaps', () => { // Gap 3: Overwrite Resolution Flow // ========================================================================= test.describe('Overwrite Resolution Flow', () => { - test('3.1: should update existing host when selecting Replace with Imported resolution', async ({ page, request, testData, browserName, adminUser }) => { + test('3.1: should update existing host when selecting Replace with Imported resolution', async ({ page, testData, browserName, adminUser }) => { // Create existing host with initial config const result = await testData.createProxyHost({ domain: 'overwrite-test.example.com', @@ -379,7 +379,7 @@ test.describe('Caddy Import Gap Coverage @caddy-import-gaps', () => { await test.step('Verify existing host was updated (not duplicated)', async () => { // Fetch the host via API - const response = await request.get(`/api/v1/proxy-hosts/${hostId}`); + const response = await page.request.get(`/api/v1/proxy-hosts/${hostId}`, { headers: await getStoredAuthHeader(page) }); expect(response.ok()).toBeTruthy(); const host = await response.json(); @@ -389,7 +389,7 @@ test.describe('Caddy Import Gap Coverage @caddy-import-gaps', () => { expect(host.forward_port).toBe(9000); // Verify no duplicate was created - fetch all hosts and check count - const allHostsResponse = await request.get('/api/v1/proxy-hosts'); + const allHostsResponse = await page.request.get('/api/v1/proxy-hosts', { headers: await getStoredAuthHeader(page) }); expect(allHostsResponse.ok()).toBeTruthy(); const allHosts = await allHostsResponse.json(); @@ -627,7 +627,7 @@ test.describe('Caddy Import Gap Coverage @caddy-import-gaps', () => { // Gap 5: Name Editing in Review // ========================================================================= test.describe('Name Editing in Review', () => { - test('5.1: should create proxy host with custom name from review table input', async ({ page, request, testData }) => { + test('5.1: should create proxy host with custom name from review table input', async ({ page, testData }) => { const domain = generateDomain(testData, 'custom-name-test'); const customName = 'My Custom Proxy Name'; const caddyfile = `${domain} { reverse_proxy localhost:5000 }`; @@ -669,7 +669,7 @@ test.describe('Caddy Import Gap Coverage @caddy-import-gaps', () => { await test.step('Verify created host has custom name', async () => { // Fetch all proxy hosts - const response = await request.get('/api/v1/proxy-hosts'); + const response = await page.request.get('/api/v1/proxy-hosts', { headers: await getStoredAuthHeader(page) }); expect(response.ok()).toBeTruthy(); const hosts = await response.json(); diff --git a/tests/core/caddy-import/caddy-import-webkit.spec.ts b/tests/core/caddy-import/caddy-import-webkit.spec.ts index 2b6dfe8e..f7707717 100644 --- a/tests/core/caddy-import/caddy-import-webkit.spec.ts +++ b/tests/core/caddy-import/caddy-import-webkit.spec.ts @@ -22,6 +22,7 @@ import { Page } from '@playwright/test'; import { attachImportDiagnostics, ensureImportUiPreconditions, + getStoredAuthHeader, logImportFailureContext, resetImportSession, waitForSuccessfulImportResponse, @@ -72,7 +73,7 @@ async function ensureWebkitAuthSession(page: Page): Promise { }); } - const meResponse = await page.request.get('/api/v1/auth/me'); + const meResponse = await page.request.get('/api/v1/auth/me', { headers: await getStoredAuthHeader(page) }); if (!meResponse.ok()) { throw new Error( `WebKit auth bootstrap verification failed: /api/v1/auth/me returned ${meResponse.status()} at ${page.url()}` diff --git a/tests/core/caddy-import/import-page-helpers.ts b/tests/core/caddy-import/import-page-helpers.ts index 73194b45..06ae85dd 100644 --- a/tests/core/caddy-import/import-page-helpers.ts +++ b/tests/core/caddy-import/import-page-helpers.ts @@ -4,6 +4,11 @@ import { readFileSync } from 'fs'; import { STORAGE_STATE } from '../../constants'; const IMPORT_PAGE_PATH = '/tasks/import/caddyfile'; + +export async function getStoredAuthHeader(page: Page): Promise> { + const token = await page.evaluate(() => localStorage.getItem('charon_auth_token')).catch(() => null); + return token ? { Authorization: `Bearer ${token}` } : {}; +} const SETUP_TEST_EMAIL = process.env.E2E_TEST_EMAIL || 'e2e-test@example.com'; const SETUP_TEST_PASSWORD = process.env.E2E_TEST_PASSWORD || 'TestPassword123!'; const IMPORT_BLOCKING_STATUS_CODES = new Set([401, 403, 302, 429]); @@ -252,7 +257,7 @@ export async function resetImportSession(page: Page): Promise { async function readImportStatus(page: Page): Promise<{ hasPending: boolean; sessionId: string }> { try { - const statusResponse = await page.request.get('/api/v1/import/status'); + const statusResponse = await page.request.get('/api/v1/import/status', { headers: await getStoredAuthHeader(page) }); if (!statusResponse.ok()) { return { hasPending: false, sessionId: '' }; } @@ -272,16 +277,17 @@ async function readImportStatus(page: Page): Promise<{ hasPending: boolean; sess } async function issuePendingSessionCancel(page: Page, sessionId: string): Promise { + const authHeader = await getStoredAuthHeader(page); if (sessionId) { await page .request - .delete(`/api/v1/import/cancel?session_uuid=${encodeURIComponent(sessionId)}`) + .delete(`/api/v1/import/cancel?session_uuid=${encodeURIComponent(sessionId)}`, { headers: authHeader }) .catch(() => null); } // Keep legacy endpoints for compatibility across backend variants. - await page.request.delete('/api/v1/import/cancel').catch(() => null); - await page.request.post('/api/v1/import/cancel').catch(() => null); + await page.request.delete('/api/v1/import/cancel', { headers: authHeader }).catch(() => null); + await page.request.post('/api/v1/import/cancel', { headers: authHeader }).catch(() => null); } async function clearPendingImportSession(page: Page): Promise { diff --git a/tests/fixtures/notifications.ts b/tests/fixtures/notifications.ts index 100e84fe..5dd9e994 100644 --- a/tests/fixtures/notifications.ts +++ b/tests/fixtures/notifications.ts @@ -21,7 +21,8 @@ export type NotificationProviderType = | 'telegram' | 'generic' | 'webhook' - | 'email'; + | 'email' + | 'pushover'; /** * Notification provider configuration interface @@ -171,6 +172,24 @@ export const telegramProvider: NotificationProviderConfig = { notify_uptime: true, }; +// ============================================================================ +// Pushover Provider Fixtures +// ============================================================================ + +/** + * Valid Pushover notification provider configuration + */ +export const pushoverProvider: NotificationProviderConfig = { + name: generateProviderName('pushover'), + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + token: 'azGDORePK8gMaC0QOYAMyEEuzJnyUi', + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: true, +}; + // ============================================================================ // Generic Webhook Provider Fixtures // ============================================================================ diff --git a/tests/monitoring/create-monitor.spec.ts b/tests/monitoring/create-monitor.spec.ts new file mode 100644 index 00000000..840a0efe --- /dev/null +++ b/tests/monitoring/create-monitor.spec.ts @@ -0,0 +1,198 @@ +import { test, expect, loginUser } from '../fixtures/auth-fixtures'; +import { waitForLoadingComplete } from '../utils/wait-helpers'; + +interface UptimeMonitor { + id: string; + name: string; + type: string; + url: string; + interval: number; + enabled: boolean; + status: string; + latency: number; + max_retries: number; + last_check?: string | null; +} + +const emptyMonitorsRoute = async (page: import('@playwright/test').Page) => { + await page.route('**/api/v1/uptime/monitors', async (route) => { + if (route.request().method() === 'GET') { + await route.fulfill({ status: 200, json: [] }); + } else { + await route.continue(); + } + }); + await page.route('**/api/v1/uptime/monitors/*/history*', async (route) => { + await route.fulfill({ status: 200, json: [] }); + }); +}; + +async function openCreateModal(page: import('@playwright/test').Page) { + await page.click('[data-testid="add-monitor-button"]'); + await expect(page.getByRole('heading', { name: /create monitor/i })).toBeVisible(); +} + +test.describe('Create Monitor Modal — TCP UX', () => { + test.beforeEach(async ({ page, authenticatedUser }) => { + await loginUser(page, authenticatedUser); + await emptyMonitorsRoute(page); + await page.goto('/uptime'); + await waitForLoadingComplete(page); + }); + + test('HTTP type shows URL placeholder', async ({ page }) => { + await openCreateModal(page); + + const urlInput = page.locator('#create-monitor-url'); + await expect(urlInput).toHaveAttribute('placeholder', 'https://example.com'); + }); + + test('TCP type shows bare host:port placeholder', async ({ page }) => { + await openCreateModal(page); + + const typeSelect = page.locator('#create-monitor-type'); + await typeSelect.selectOption('tcp'); + + const urlInput = page.locator('#create-monitor-url'); + await expect(urlInput).toHaveAttribute('placeholder', '192.168.1.1:8080'); + }); + + test('Type selector precedes URL input in DOM order', async ({ page }) => { + await openCreateModal(page); + + await expect(page.locator('#create-monitor-type')).toBeVisible(); + await expect(page.locator('#create-monitor-url')).toBeVisible(); + + const typeComesBeforeUrl = await page.evaluate(() => { + const typeEl = document.getElementById('create-monitor-type'); + const urlEl = document.getElementById('create-monitor-url'); + if (!typeEl || !urlEl) return false; + return !!(typeEl.compareDocumentPosition(urlEl) & Node.DOCUMENT_POSITION_FOLLOWING); + }); + + expect(typeComesBeforeUrl).toBe(true); + }); + + test('Helper text updates dynamically when type changes', async ({ page }) => { + await openCreateModal(page); + + const helperText = page.locator('#create-monitor-url-helper'); + + await expect(helperText).toContainText(/scheme/i); + + await page.locator('#create-monitor-type').selectOption('tcp'); + + await expect(helperText).toContainText(/host:port/i); + }); + + test('Inline error appears when tcp:// scheme entered for TCP type', async ({ page }) => { + await openCreateModal(page); + + await page.locator('#create-monitor-type').selectOption('tcp'); + await page.locator('#create-monitor-url').fill('tcp://192.168.1.1:8080'); + + const errorAlert = page.locator('[role="alert"]'); + await expect(errorAlert).toBeVisible(); + await expect(errorAlert).toContainText(/host:port format/i); + }); + + test('Inline error clears when scheme prefix removed', async ({ page }) => { + await openCreateModal(page); + + await page.locator('#create-monitor-type').selectOption('tcp'); + const urlInput = page.locator('#create-monitor-url'); + await urlInput.fill('tcp://192.168.1.1:8080'); + + await expect(page.locator('[role="alert"]')).toBeVisible(); + + await urlInput.fill('192.168.1.1:8080'); + + await expect(page.locator('[role="alert"]')).not.toBeVisible(); + }); + + test('Inline error clears when type changed from TCP to HTTP', async ({ page }) => { + await openCreateModal(page); + + const typeSelect = page.locator('#create-monitor-type'); + await typeSelect.selectOption('tcp'); + + const urlInput = page.locator('#create-monitor-url'); + await urlInput.fill('tcp://192.168.1.1:8080'); + + await expect(page.locator('[role="alert"]')).toBeVisible(); + + await typeSelect.selectOption('http'); + + await expect(page.locator('[role="alert"]')).not.toBeVisible(); + }); + + test('Submit with tcp:// prefix is prevented client-side', async ({ page }) => { + let createCalled = false; + + await page.route('**/api/v1/uptime/monitors', async (route) => { + if (route.request().method() === 'POST') { + createCalled = true; + await route.continue(); + } else { + await route.fulfill({ status: 200, json: [] }); + } + }); + + await openCreateModal(page); + + await page.locator('#create-monitor-type').selectOption('tcp'); + await page.locator('#create-monitor-name').fill('DB Server'); + await page.locator('#create-monitor-url').fill('tcp://192.168.1.1:8080'); + + await page.getByRole('button', { name: /create/i }).click(); + + // Inline error confirms client-side validation blocked the submit + await expect(page.locator('[role="alert"]')).toBeVisible(); + // Modal still open — form was not submitted + await expect(page.getByRole('heading', { name: /create monitor/i })).toBeVisible(); + + expect(createCalled).toBe(false); + }); + + test('TCP monitor created successfully with bare host:port', async ({ page }) => { + let capturedPayload: Record | null = null; + + const createdMonitor: UptimeMonitor = { + id: 'm-test', + name: 'DB Server', + type: 'tcp', + url: '192.168.1.1:5432', + interval: 60, + enabled: true, + status: 'pending', + latency: 0, + max_retries: 3, + }; + + await page.route('**/api/v1/uptime/monitors', async (route) => { + if (route.request().method() === 'POST') { + capturedPayload = route.request().postDataJSON() as Record; + await route.fulfill({ status: 201, json: createdMonitor }); + } else { + await route.fulfill({ status: 200, json: [] }); + } + }); + await page.route(`**/api/v1/uptime/monitors/${createdMonitor.id}/history*`, async (route) => { + await route.fulfill({ status: 200, json: [] }); + }); + + await openCreateModal(page); + + await page.locator('#create-monitor-type').selectOption('tcp'); + await page.locator('#create-monitor-name').fill('DB Server'); + await page.locator('#create-monitor-url').fill('192.168.1.1:5432'); + + await page.getByRole('button', { name: /create/i }).click(); + + await expect(page.getByRole('heading', { name: /create monitor/i })).not.toBeVisible({ timeout: 5000 }); + + expect(capturedPayload).not.toBeNull(); + expect(capturedPayload?.url).toBe('192.168.1.1:5432'); + expect(capturedPayload?.type).toBe('tcp'); + }); +}); diff --git a/tests/monitoring/uptime-monitoring.spec.ts b/tests/monitoring/uptime-monitoring.spec.ts index 34a26361..29210cf1 100644 --- a/tests/monitoring/uptime-monitoring.spec.ts +++ b/tests/monitoring/uptime-monitoring.spec.ts @@ -444,7 +444,7 @@ test.describe('Uptime Monitoring Page', () => { await page.click(SELECTORS.addMonitorButton); await page.fill('input#create-monitor-name', 'Redis Cache'); - await page.fill('input#create-monitor-url', 'tcp://redis.local:6379'); + await page.fill('input#create-monitor-url', 'redis.local:6379'); await page.selectOption('select#create-monitor-type', 'tcp'); await page.fill('input#create-monitor-interval', '30'); @@ -462,7 +462,7 @@ test.describe('Uptime Monitoring Page', () => { expect(createPayload).not.toBeNull(); expect(createPayload?.type).toBe('tcp'); - expect(createPayload?.url).toBe('tcp://redis.local:6379'); + expect(createPayload?.url).toBe('redis.local:6379'); }); test('should update existing monitor', async ({ page, authenticatedUser }) => { diff --git a/tests/security/crowdsec-first-enable.spec.ts b/tests/security/crowdsec-first-enable.spec.ts new file mode 100644 index 00000000..3efbc6b7 --- /dev/null +++ b/tests/security/crowdsec-first-enable.spec.ts @@ -0,0 +1,98 @@ +/** + * CrowdSec First-Enable UX E2E Tests + * + * Tests the UI behavior while the CrowdSec startup mutation is pending. + * Uses route interception to simulate the slow startup without a real CrowdSec install. + * + * @see /projects/Charon/docs/plans/current_spec.md PR-4 + */ + +import { test, expect, loginUser } from '../fixtures/auth-fixtures'; +import { waitForLoadingComplete } from '../utils/wait-helpers'; + +test.describe('CrowdSec first-enable UX @security', () => { + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + await page.goto('/security'); + await waitForLoadingComplete(page); + }); + + test('CrowdSec toggle stays checked while starting', async ({ page }) => { + // Intercept start endpoint and hold the response for 2 seconds + await page.route('**/api/v1/admin/crowdsec/start', async (route) => { + await new Promise((resolve) => setTimeout(resolve, 2000)); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ pid: 123, lapi_ready: false }), + }); + }); + + const toggle = page.getByTestId('toggle-crowdsec'); + await toggle.click(); + + // Immediately after click, the toggle should remain checked (user intent) + await expect(toggle).toBeChecked(); + }); + + test('CrowdSec card shows Starting badge while starting', async ({ page }) => { + await page.route('**/api/v1/admin/crowdsec/start', async (route) => { + await new Promise((resolve) => setTimeout(resolve, 2000)); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ pid: 123, lapi_ready: false }), + }); + }); + + const toggle = page.getByTestId('toggle-crowdsec'); + await toggle.click(); + + // Badge should show "Starting..." text while mutation is pending + await expect(page.getByText('Starting...')).toBeVisible(); + }); + + test('CrowdSecKeyWarning absent while starting', async ({ page }) => { + await page.route('**/api/v1/admin/crowdsec/start', async (route) => { + await new Promise((resolve) => setTimeout(resolve, 2000)); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ pid: 123, lapi_ready: false }), + }); + }); + + // Make key-status return a rejected key + await page.route('**/api/v1/admin/crowdsec/key-status', async (route) => { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ + env_key_rejected: true, + key_source: 'env', + full_key: 'key123', + current_key_preview: 'key...', + rejected_key_preview: 'old...', + message: 'Key rejected', + }), + }); + }); + + const toggle = page.getByTestId('toggle-crowdsec'); + await toggle.click(); + + // The key warning alert must not be present while mutation is pending + await expect(page.getByRole('alert', { name: /CrowdSec API Key/i })).not.toBeVisible({ timeout: 1500 }); + const keyWarning = page.locator('[role="alert"]').filter({ hasText: /CrowdSec API Key Updated/ }); + await expect(keyWarning).not.toBeVisible({ timeout: 500 }); + }); + + test('Backend accepts empty value for setting', async ({ page }) => { + // Confirm POST /settings with empty value returns 200 (not 400) + const response = await page.request.post('/api/v1/settings', { + data: { key: 'security.crowdsec.enabled', value: '' }, + }); + expect(response.status()).toBe(200); + }); +}); diff --git a/tests/settings/notifications-payload.spec.ts b/tests/settings/notifications-payload.spec.ts index 3f254789..bb050203 100644 --- a/tests/settings/notifications-payload.spec.ts +++ b/tests/settings/notifications-payload.spec.ts @@ -107,6 +107,16 @@ test.describe('Notifications Payload Matrix', () => { name: `telegram-matrix-${Date.now()}`, url: '987654321', }, + { + type: 'slack', + name: `slack-matrix-${Date.now()}`, + url: '#slack-alerts', + }, + { + type: 'ntfy', + name: `ntfy-matrix-${Date.now()}`, + url: 'https://ntfy.sh/my-topic', + }, ] as const; for (const scenario of scenarios) { @@ -125,12 +135,20 @@ test.describe('Notifications Payload Matrix', () => { await page.getByTestId('provider-gotify-token').fill('bot123456789:ABCdefGHI'); } + if (scenario.type === 'slack') { + await page.getByTestId('provider-gotify-token').fill('https://hooks.slack.com/services/T00000000/B00000000/xxxxxxxxxxxxxxxxxxxx'); + } + + if (scenario.type === 'ntfy') { + await page.getByTestId('provider-gotify-token').fill('tk_ntfy_matrix_token'); + } + await page.getByTestId('provider-save-btn').click(); }); } await test.step('Verify payload contract per provider type', async () => { - expect(capturedCreatePayloads).toHaveLength(4); + expect(capturedCreatePayloads).toHaveLength(6); const discordPayload = capturedCreatePayloads.find((payload) => payload.type === 'discord'); expect(discordPayload).toBeTruthy(); @@ -152,6 +170,18 @@ test.describe('Notifications Payload Matrix', () => { expect(telegramPayload?.token).toBe('bot123456789:ABCdefGHI'); expect(telegramPayload?.gotify_token).toBeUndefined(); expect(telegramPayload?.url).toBe('987654321'); + + const slackPayload = capturedCreatePayloads.find((payload) => payload.type === 'slack'); + expect(slackPayload).toBeTruthy(); + expect(slackPayload?.token).toBe('https://hooks.slack.com/services/T00000000/B00000000/xxxxxxxxxxxxxxxxxxxx'); + expect(slackPayload?.gotify_token).toBeUndefined(); + expect(slackPayload?.url).toBe('#slack-alerts'); + + const ntfyPayload = capturedCreatePayloads.find((payload) => payload.type === 'ntfy'); + expect(ntfyPayload).toBeTruthy(); + expect(ntfyPayload?.token).toBe('tk_ntfy_matrix_token'); + expect(ntfyPayload?.gotify_token).toBeUndefined(); + expect(ntfyPayload?.url).toBe('https://ntfy.sh/my-topic'); }); }); @@ -324,7 +354,15 @@ test.describe('Notifications Payload Matrix', () => { await page.getByTestId('provider-name').fill(gotifyName); await page.getByTestId('provider-url').fill('https://gotify.example.com/message'); await page.getByTestId('provider-gotify-token').fill('super-secret-token'); + + const previewResponsePromise = page.waitForResponse( + (response) => + /\/api\/v1\/notifications\/providers\/preview$/.test(response.url()) + && response.request().method() === 'POST' + ); await page.getByTestId('provider-preview-btn').click(); + const previewResponse = await previewResponsePromise; + capturedPreviewPayload = (await previewResponse.request().postDataJSON()) as Record; }); await test.step('Save provider', async () => { @@ -334,8 +372,16 @@ test.describe('Notifications Payload Matrix', () => { await test.step('Send test from saved provider row', async () => { const providerRow = page.getByTestId('provider-row-gotify-transform-id'); await expect(providerRow).toBeVisible({ timeout: 5000 }); + + const testResponsePromise = page.waitForResponse( + (response) => + /\/api\/v1\/notifications\/providers\/test$/.test(response.url()) + && response.request().method() === 'POST' + ); const sendTestButton = providerRow.getByRole('button', { name: /send test/i }); await sendTestButton.click(); + const testResponse = await testResponsePromise; + capturedTestPayload = (await testResponse.request().postDataJSON()) as Record; }); await test.step('Assert token is not sent on preview/test payloads', async () => { diff --git a/tests/settings/notifications.spec.ts b/tests/settings/notifications.spec.ts index 1224827c..65127a23 100644 --- a/tests/settings/notifications.spec.ts +++ b/tests/settings/notifications.spec.ts @@ -141,7 +141,7 @@ test.describe('Notification Providers', () => { contentType: 'application/json', body: JSON.stringify([ { id: '1', name: 'Discord Alert', type: 'discord', url: 'https://discord.com/api/webhooks/test', enabled: true }, - { id: '2', name: 'Slack Notify', type: 'slack', url: 'https://hooks.example.com/services/test', enabled: true }, + { id: '2', name: 'Pagerduty Notify', type: 'pagerduty', url: 'https://hooks.example.com/services/test', enabled: true }, { id: '3', name: 'Generic Hook', type: 'generic', url: 'https://webhook.test.local', enabled: false }, ]), }); @@ -188,7 +188,7 @@ test.describe('Notification Providers', () => { body: JSON.stringify([ { id: '1', name: 'Discord One', type: 'discord', url: 'https://discord.com/api/webhooks/1', enabled: true }, { id: '2', name: 'Discord Two', type: 'discord', url: 'https://discord.com/api/webhooks/2', enabled: true }, - { id: '3', name: 'Slack Notify', type: 'slack', url: 'https://hooks.example.com/test', enabled: true }, + { id: '3', name: 'Pagerduty Notify', type: 'pagerduty', url: 'https://hooks.example.com/test', enabled: true }, ]), }); } else { @@ -206,7 +206,7 @@ test.describe('Notification Providers', () => { // Check that providers are visible - look for provider names await expect(page.getByText('Discord One')).toBeVisible(); await expect(page.getByText('Discord Two')).toBeVisible(); - await expect(page.getByText('Slack Notify')).toBeVisible(); + await expect(page.getByText('Pagerduty Notify')).toBeVisible(); }); await test.step('Verify legacy provider row renders explicit deprecated messaging', async () => { @@ -294,8 +294,8 @@ test.describe('Notification Providers', () => { await test.step('Verify provider type select contains supported options', async () => { const providerTypeSelect = page.getByTestId('provider-type'); - await expect(providerTypeSelect.locator('option')).toHaveCount(5); - await expect(providerTypeSelect.locator('option')).toHaveText(['Discord', 'Gotify', 'Generic Webhook', 'Email', 'Telegram']); + await expect(providerTypeSelect.locator('option')).toHaveCount(8); + await expect(providerTypeSelect.locator('option')).toHaveText(['Discord', 'Gotify', 'Generic Webhook', 'Email', 'Telegram', 'Slack', 'Pushover', 'Ntfy']); await expect(providerTypeSelect).toBeEnabled(); }); }); diff --git a/tests/settings/ntfy-notification-provider.spec.ts b/tests/settings/ntfy-notification-provider.spec.ts new file mode 100644 index 00000000..97eaf73b --- /dev/null +++ b/tests/settings/ntfy-notification-provider.spec.ts @@ -0,0 +1,681 @@ +/** + * Ntfy Notification Provider E2E Tests + * + * Tests the Ntfy notification provider type. + * Covers form rendering, CRUD operations, payload contracts, + * token security, and validation behavior specific to the Ntfy provider type. + */ + +import { test, expect, loginUser } from '../fixtures/auth-fixtures'; +import { waitForLoadingComplete } from '../utils/wait-helpers'; + +function generateProviderName(prefix: string = 'ntfy-test'): string { + return `${prefix}-${Date.now()}`; +} + +test.describe('Ntfy Notification Provider', () => { + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + await page.goto('/settings/notifications'); + await waitForLoadingComplete(page); + }); + + test.describe('Form Rendering', () => { + test('should show token field and topic URL placeholder when ntfy type selected', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Select ntfy provider type', async () => { + await page.getByTestId('provider-type').selectOption('ntfy'); + }); + + await test.step('Verify token field is visible', async () => { + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Verify token field label shows Access Token (optional)', async () => { + const tokenLabel = page.getByText(/access token.*optional/i); + await expect(tokenLabel.first()).toBeVisible(); + }); + + await test.step('Verify topic URL placeholder', async () => { + const urlInput = page.getByTestId('provider-url'); + await expect(urlInput).toHaveAttribute('placeholder', 'https://ntfy.sh/my-topic'); + }); + + await test.step('Verify JSON template section is shown for ntfy', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + + await test.step('Verify save button is accessible', async () => { + const saveButton = page.getByTestId('provider-save-btn'); + await expect(saveButton).toBeVisible(); + await expect(saveButton).toBeEnabled(); + }); + }); + + test('should toggle form fields correctly when switching between ntfy and discord', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify discord is default without token field', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + + await test.step('Switch to ntfy and verify token field appears', async () => { + await page.getByTestId('provider-type').selectOption('ntfy'); + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Switch back to discord and verify token field hidden', async () => { + await page.getByTestId('provider-type').selectOption('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + }); + + test('should show JSON template section for ntfy', async ({ page }) => { + await test.step('Open Add Provider form and select ntfy', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('ntfy'); + }); + + await test.step('Verify JSON template config section is visible', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + }); + }); + + test.describe('CRUD Operations', () => { + test('should create an ntfy notification provider with URL and token', async ({ page }) => { + const providerName = generateProviderName('ntfy-create'); + let capturedPayload: Record | null = null; + + await test.step('Mock create endpoint to capture payload', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'ntfy-provider-1', + ...rest, + ...(token !== undefined || gotify_token !== undefined ? { has_token: true } : {}), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Open form and select ntfy type', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('ntfy'); + }); + + await test.step('Fill ntfy provider form with URL and token', async () => { + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('https://ntfy.sh/my-topic'); + await page.getByTestId('provider-gotify-token').fill('tk_abc123xyz789'); + }); + + await test.step('Configure event notifications', async () => { + await page.getByTestId('notify-proxy-hosts').check(); + await page.getByTestId('notify-certs').check(); + }); + + await test.step('Save provider', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify provider appears in list', async () => { + const providerInList = page.getByText(providerName); + await expect(providerInList.first()).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Verify outgoing payload contract', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('ntfy'); + expect(capturedPayload?.name).toBe(providerName); + expect(capturedPayload?.url).toBe('https://ntfy.sh/my-topic'); + expect(capturedPayload?.token).toBe('tk_abc123xyz789'); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should create an ntfy notification provider with URL only (no token)', async ({ page }) => { + const providerName = generateProviderName('ntfy-notoken'); + let capturedPayload: Record | null = null; + + await test.step('Mock create endpoint to capture payload', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'ntfy-notoken-1', + ...rest, + ...(token !== undefined || gotify_token !== undefined ? { has_token: true } : {}), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Open form and select ntfy type', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('ntfy'); + }); + + await test.step('Fill ntfy provider form with URL only', async () => { + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('https://ntfy.sh/public-topic'); + }); + + await test.step('Configure event notifications', async () => { + await page.getByTestId('notify-proxy-hosts').check(); + }); + + await test.step('Save provider', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify provider appears in list', async () => { + const providerInList = page.getByText(providerName); + await expect(providerInList.first()).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Verify outgoing payload has no token', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('ntfy'); + expect(capturedPayload?.name).toBe(providerName); + expect(capturedPayload?.url).toBe('https://ntfy.sh/public-topic'); + expect(capturedPayload?.token).toBeUndefined(); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should edit ntfy provider and preserve token when token field left blank', async ({ page }) => { + let updatedPayload: Record | null = null; + + await test.step('Mock existing ntfy provider', async () => { + let providers = [ + { + id: 'ntfy-edit-id', + name: 'Ntfy Alerts', + type: 'ntfy', + url: 'https://ntfy.sh/my-topic', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(providers), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'PUT') { + updatedPayload = (await request.postDataJSON()) as Record; + providers = providers.map((p) => + p.id === 'ntfy-edit-id' ? { ...p, ...updatedPayload } : p + ); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify ntfy provider is displayed', async () => { + await expect(page.getByText('Ntfy Alerts')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Click edit on ntfy provider', async () => { + const providerRow = page.getByTestId('provider-row-ntfy-edit-id'); + const editButton = providerRow.getByRole('button', { name: /edit/i }); + await expect(editButton).toBeVisible({ timeout: 5000 }); + await editButton.click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify form loads with ntfy type', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('ntfy'); + }); + + await test.step('Verify stored token indicator is shown', async () => { + await expect(page.getByTestId('gotify-token-stored-indicator')).toBeVisible(); + }); + + await test.step('Update name without changing token', async () => { + const nameInput = page.getByTestId('provider-name'); + await nameInput.clear(); + await nameInput.fill('Ntfy Alerts v2'); + }); + + await test.step('Save changes', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers\/ntfy-edit-id/.test(resp.url()) && + resp.request().method() === 'PUT' && + resp.status() === 200 + ), + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'GET' && + resp.status() === 200 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify update payload preserves token omission', async () => { + expect(updatedPayload).toBeTruthy(); + expect(updatedPayload?.type).toBe('ntfy'); + expect(updatedPayload?.name).toBe('Ntfy Alerts v2'); + expect(updatedPayload?.token).toBeUndefined(); + expect(updatedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should test an ntfy notification provider', async ({ page }) => { + let testCalled = false; + + await test.step('Mock existing ntfy provider and test endpoint', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'ntfy-test-id', + name: 'Ntfy Test Provider', + type: 'ntfy', + url: 'https://ntfy.sh/my-topic', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/test', async (route, request) => { + if (request.method() === 'POST') { + testCalled = true; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Click Send Test on the provider', async () => { + const providerRow = page.getByTestId('provider-row-ntfy-test-id'); + const sendTestButton = providerRow.getByRole('button', { name: /send test/i }); + await expect(sendTestButton).toBeVisible({ timeout: 5000 }); + await expect(sendTestButton).toBeEnabled(); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/test') && + resp.status() === 200 + ), + sendTestButton.click(), + ]); + }); + + await test.step('Verify test was called', async () => { + expect(testCalled).toBe(true); + }); + }); + + test('should delete an ntfy notification provider', async ({ page }) => { + await test.step('Mock existing ntfy provider', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'ntfy-delete-id', + name: 'Ntfy To Delete', + type: 'ntfy', + url: 'https://ntfy.sh/my-topic', + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'DELETE') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify ntfy provider is displayed', async () => { + await expect(page.getByText('Ntfy To Delete')).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Delete provider', async () => { + page.on('dialog', async (dialog) => { + expect(dialog.type()).toBe('confirm'); + await dialog.accept(); + }); + + const deleteButton = page.getByRole('button', { name: /delete/i }) + .or(page.locator('button').filter({ has: page.locator('svg.lucide-trash2, svg[class*="trash"]') })); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/ntfy-delete-id') && + resp.status() === 200 + ), + deleteButton.first().click(), + ]); + }); + + await test.step('Verify deletion feedback', async () => { + const successIndicator = page.locator('[data-testid="toast-success"]') + .or(page.getByRole('status').filter({ hasText: /deleted|removed/i })) + .or(page.getByText(/no.*providers/i)); + await expect(successIndicator.first()).toBeVisible({ timeout: 5000 }); + }); + }); + }); + + test.describe('Security', () => { + test('GET response should NOT expose the access token value', async ({ page }) => { + let apiResponseBody: Array> | null = null; + + let resolveRouteBody: (data: Array>) => void; + const routeBodyPromise = new Promise>>((resolve) => { + resolveRouteBody = resolve; + }); + + await test.step('Mock provider list with has_token flag', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + const body = [ + { + id: 'ntfy-sec-id', + name: 'Ntfy Secure', + type: 'ntfy', + url: 'https://ntfy.sh/my-topic', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(body), + }); + resolveRouteBody!(body); + } else { + await route.continue(); + } + }); + }); + + await test.step('Navigate to trigger GET', async () => { + await page.reload(); + apiResponseBody = await Promise.race([ + routeBodyPromise, + new Promise>>((_resolve, reject) => + setTimeout( + () => reject(new Error('Timed out waiting for GET /api/v1/notifications/providers')), + 15000 + ) + ), + ]); + await waitForLoadingComplete(page); + }); + + await test.step('Verify access token is not in API response', async () => { + expect(apiResponseBody).toBeTruthy(); + const provider = apiResponseBody![0]; + expect(provider.token).toBeUndefined(); + expect(provider.gotify_token).toBeUndefined(); + const responseStr = JSON.stringify(provider); + expect(responseStr).not.toContain('tk_abc123xyz789'); + }); + }); + + test('access token should not appear in the url field or any visible field', async ({ page }) => { + await test.step('Mock provider with clean URL field', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'ntfy-url-sec-id', + name: 'Ntfy URL Check', + type: 'ntfy', + url: 'https://ntfy.sh/my-topic', + has_token: true, + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload and verify access token does not appear in provider row', async () => { + await page.reload(); + await waitForLoadingComplete(page); + await expect(page.getByText('Ntfy URL Check')).toBeVisible({ timeout: 5000 }); + + const providerRow = page.getByTestId('provider-row-ntfy-url-sec-id'); + const urlText = await providerRow.textContent(); + expect(urlText).not.toContain('tk_abc123xyz789'); + }); + }); + }); + + test.describe('Payload Contract', () => { + test('POST body should include type=ntfy, url field = topic URL, token field is write-only', async ({ page }) => { + const providerName = generateProviderName('ntfy-contract'); + let capturedPayload: Record | null = null; + let capturedGetResponse: Array> | null = null; + + await test.step('Mock create and list endpoints', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'ntfy-contract-1', + ...rest, + has_token: !!(token || gotify_token), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + capturedGetResponse = [...createdProviders]; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Create an ntfy provider via the UI', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('ntfy'); + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('https://ntfy.sh/my-topic'); + await page.getByTestId('provider-gotify-token').fill('tk_abc123xyz789'); + + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify POST payload: type=ntfy, url=topic URL, token=access token', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('ntfy'); + expect(capturedPayload?.url).toBe('https://ntfy.sh/my-topic'); + expect(capturedPayload?.token).toBe('tk_abc123xyz789'); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + + await test.step('Verify GET response: has_token=true, token value absent', async () => { + await expect(page.getByText(providerName).first()).toBeVisible({ timeout: 10000 }); + expect(capturedGetResponse).toBeTruthy(); + const provider = capturedGetResponse![0]; + expect(provider.has_token).toBe(true); + expect(provider.token).toBeUndefined(); + expect(provider.gotify_token).toBeUndefined(); + const responseStr = JSON.stringify(provider); + expect(responseStr).not.toContain('tk_abc123xyz789'); + }); + }); + }); +}); diff --git a/tests/settings/pushover-notification-provider.spec.ts b/tests/settings/pushover-notification-provider.spec.ts new file mode 100644 index 00000000..8cedad40 --- /dev/null +++ b/tests/settings/pushover-notification-provider.spec.ts @@ -0,0 +1,606 @@ +/** + * Pushover Notification Provider E2E Tests + * + * Tests the Pushover notification provider type. + * Covers form rendering, CRUD operations, payload contracts, + * token security, and validation behavior specific to the Pushover provider type. + */ + +import { test, expect, loginUser } from '../fixtures/auth-fixtures'; +import { waitForLoadingComplete } from '../utils/wait-helpers'; + +function generateProviderName(prefix: string = 'pushover-test'): string { + return `${prefix}-${Date.now()}`; +} + +test.describe('Pushover Notification Provider', () => { + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + await page.goto('/settings/notifications'); + await waitForLoadingComplete(page); + }); + + test.describe('Form Rendering', () => { + test('should show API token field and user key placeholder when pushover type selected', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Select pushover provider type', async () => { + await page.getByTestId('provider-type').selectOption('pushover'); + }); + + await test.step('Verify API token field is visible', async () => { + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Verify token field label shows API Token (Application)', async () => { + const tokenLabel = page.getByText(/api token.*application/i); + await expect(tokenLabel.first()).toBeVisible(); + }); + + await test.step('Verify user key placeholder', async () => { + const urlInput = page.getByTestId('provider-url'); + await expect(urlInput).toHaveAttribute('placeholder', 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG'); + }); + + await test.step('Verify User Key label replaces URL label', async () => { + const userKeyLabel = page.getByText(/user key/i); + await expect(userKeyLabel.first()).toBeVisible(); + }); + + await test.step('Verify JSON template section is shown for pushover', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + + await test.step('Verify save button is accessible', async () => { + const saveButton = page.getByTestId('provider-save-btn'); + await expect(saveButton).toBeVisible(); + await expect(saveButton).toBeEnabled(); + }); + }); + + test('should toggle form fields correctly when switching between pushover and discord', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify discord is default without token field', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + + await test.step('Switch to pushover and verify token field appears', async () => { + await page.getByTestId('provider-type').selectOption('pushover'); + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Switch back to discord and verify token field hidden', async () => { + await page.getByTestId('provider-type').selectOption('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + }); + + test('should show JSON template section for pushover', async ({ page }) => { + await test.step('Open Add Provider form and select pushover', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('pushover'); + }); + + await test.step('Verify JSON template config section is visible', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + }); + }); + + test.describe('CRUD Operations', () => { + test('should create a pushover notification provider', async ({ page }) => { + const providerName = generateProviderName('po-create'); + let capturedPayload: Record | null = null; + + await test.step('Mock create endpoint to capture payload', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'po-provider-1', + ...rest, + ...(token !== undefined || gotify_token !== undefined ? { has_token: true } : {}), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Open form and select pushover type', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('pushover'); + }); + + await test.step('Fill pushover provider form', async () => { + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('uQiRzpo4DXghDmr9QzzfQu27cmVRsG'); + await page.getByTestId('provider-gotify-token').fill('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + }); + + await test.step('Configure event notifications', async () => { + await page.getByTestId('notify-proxy-hosts').check(); + await page.getByTestId('notify-certs').check(); + }); + + await test.step('Save provider', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify provider appears in list', async () => { + const providerInList = page.getByText(providerName); + await expect(providerInList.first()).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Verify outgoing payload contract', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('pushover'); + expect(capturedPayload?.name).toBe(providerName); + expect(capturedPayload?.url).toBe('uQiRzpo4DXghDmr9QzzfQu27cmVRsG'); + expect(capturedPayload?.token).toBe('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should edit provider and preserve token when token field left blank', async ({ page }) => { + let updatedPayload: Record | null = null; + + await test.step('Mock existing pushover provider', async () => { + let providers = [ + { + id: 'po-edit-id', + name: 'Pushover Alerts', + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(providers), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'PUT') { + updatedPayload = (await request.postDataJSON()) as Record; + providers = providers.map((p) => + p.id === 'po-edit-id' ? { ...p, ...updatedPayload } : p + ); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify pushover provider is displayed', async () => { + await expect(page.getByText('Pushover Alerts')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Click edit on pushover provider', async () => { + const providerRow = page.getByTestId('provider-row-po-edit-id'); + const editButton = providerRow.getByRole('button', { name: /edit/i }); + await expect(editButton).toBeVisible({ timeout: 5000 }); + await editButton.click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify form loads with pushover type', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('pushover'); + }); + + await test.step('Verify stored token indicator is shown', async () => { + await expect(page.getByTestId('gotify-token-stored-indicator')).toBeVisible(); + }); + + await test.step('Update name without changing token', async () => { + const nameInput = page.getByTestId('provider-name'); + await nameInput.clear(); + await nameInput.fill('Pushover Alerts v2'); + }); + + await test.step('Save changes', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers\/po-edit-id/.test(resp.url()) && + resp.request().method() === 'PUT' && + resp.status() === 200 + ), + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'GET' && + resp.status() === 200 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify update payload preserves token omission', async () => { + expect(updatedPayload).toBeTruthy(); + expect(updatedPayload?.type).toBe('pushover'); + expect(updatedPayload?.name).toBe('Pushover Alerts v2'); + expect(updatedPayload?.token).toBeUndefined(); + expect(updatedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should test a pushover notification provider', async ({ page }) => { + let testCalled = false; + + await test.step('Mock existing pushover provider and test endpoint', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'po-test-id', + name: 'Pushover Test Provider', + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/test', async (route, request) => { + if (request.method() === 'POST') { + testCalled = true; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Click Send Test on the provider', async () => { + const providerRow = page.getByTestId('provider-row-po-test-id'); + const sendTestButton = providerRow.getByRole('button', { name: /send test/i }); + await expect(sendTestButton).toBeVisible({ timeout: 5000 }); + await expect(sendTestButton).toBeEnabled(); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/test') && + resp.status() === 200 + ), + sendTestButton.click(), + ]); + }); + + await test.step('Verify test was called', async () => { + expect(testCalled).toBe(true); + }); + }); + + test('should delete a pushover notification provider', async ({ page }) => { + await test.step('Mock existing pushover provider', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'po-delete-id', + name: 'Pushover To Delete', + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'DELETE') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify pushover provider is displayed', async () => { + await expect(page.getByText('Pushover To Delete')).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Delete provider', async () => { + page.on('dialog', async (dialog) => { + expect(dialog.type()).toBe('confirm'); + await dialog.accept(); + }); + + const deleteButton = page.getByRole('button', { name: /delete/i }) + .or(page.locator('button').filter({ has: page.locator('svg.lucide-trash2, svg[class*="trash"]') })); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/po-delete-id') && + resp.status() === 200 + ), + deleteButton.first().click(), + ]); + }); + + await test.step('Verify deletion feedback', async () => { + const successIndicator = page.locator('[data-testid="toast-success"]') + .or(page.getByRole('status').filter({ hasText: /deleted|removed/i })) + .or(page.getByText(/no.*providers/i)); + await expect(successIndicator.first()).toBeVisible({ timeout: 5000 }); + }); + }); + }); + + test.describe('Security', () => { + test('GET response should NOT expose the API token value', async ({ page }) => { + let apiResponseBody: Array> | null = null; + + let resolveRouteBody: (data: Array>) => void; + const routeBodyPromise = new Promise>>((resolve) => { + resolveRouteBody = resolve; + }); + + await test.step('Mock provider list with has_token flag', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + const body = [ + { + id: 'po-sec-id', + name: 'Pushover Secure', + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(body), + }); + resolveRouteBody!(body); + } else { + await route.continue(); + } + }); + }); + + await test.step('Navigate to trigger GET', async () => { + await page.reload(); + apiResponseBody = await Promise.race([ + routeBodyPromise, + new Promise>>((_resolve, reject) => + setTimeout( + () => reject(new Error('Timed out waiting for GET /api/v1/notifications/providers')), + 15000 + ) + ), + ]); + await waitForLoadingComplete(page); + }); + + await test.step('Verify API token is not in API response', async () => { + expect(apiResponseBody).toBeTruthy(); + const provider = apiResponseBody![0]; + expect(provider.token).toBeUndefined(); + expect(provider.gotify_token).toBeUndefined(); + const responseStr = JSON.stringify(provider); + expect(responseStr).not.toContain('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + }); + }); + + test('API token should not appear in the url field or any visible field', async ({ page }) => { + await test.step('Mock provider with clean URL field', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'po-url-sec-id', + name: 'Pushover URL Check', + type: 'pushover', + url: 'uQiRzpo4DXghDmr9QzzfQu27cmVRsG', + has_token: true, + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload and verify API token does not appear in provider row', async () => { + await page.reload(); + await waitForLoadingComplete(page); + await expect(page.getByText('Pushover URL Check')).toBeVisible({ timeout: 5000 }); + + const providerRow = page.getByTestId('provider-row-po-url-sec-id'); + const urlText = await providerRow.textContent(); + expect(urlText).not.toContain('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + expect(urlText).not.toContain('api.pushover.net'); + }); + }); + }); + + test.describe('Payload Contract', () => { + test('POST body should include type=pushover, url field = user key, token field is write-only', async ({ page }) => { + const providerName = generateProviderName('po-contract'); + let capturedPayload: Record | null = null; + let capturedGetResponse: Array> | null = null; + + await test.step('Mock create and list endpoints', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'po-contract-1', + ...rest, + has_token: !!(token || gotify_token), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + capturedGetResponse = [...createdProviders]; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Create a pushover provider via the UI', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('pushover'); + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('uQiRzpo4DXghDmr9QzzfQu27cmVRsG'); + await page.getByTestId('provider-gotify-token').fill('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify POST payload: type=pushover, url=user key, token=api token', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('pushover'); + expect(capturedPayload?.url).toBe('uQiRzpo4DXghDmr9QzzfQu27cmVRsG'); + expect(capturedPayload?.token).toBe('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + + await test.step('Verify GET response: has_token=true, token value absent', async () => { + await expect(page.getByText(providerName).first()).toBeVisible({ timeout: 10000 }); + expect(capturedGetResponse).toBeTruthy(); + const provider = capturedGetResponse![0]; + expect(provider.has_token).toBe(true); + expect(provider.token).toBeUndefined(); + expect(provider.gotify_token).toBeUndefined(); + const responseStr = JSON.stringify(provider); + expect(responseStr).not.toContain('azGDORePK8gMaC0QOYAMyEEuzJnyUi'); + }); + }); + }); +}); diff --git a/tests/settings/slack-notification-provider.spec.ts b/tests/settings/slack-notification-provider.spec.ts new file mode 100644 index 00000000..abdd276f --- /dev/null +++ b/tests/settings/slack-notification-provider.spec.ts @@ -0,0 +1,521 @@ +/** + * Slack Notification Provider E2E Tests + * + * Tests the Slack notification provider type. + * Covers form rendering, CRUD operations, payload contracts, + * webhook URL security, and validation behavior specific to the Slack provider type. + */ + +import { test, expect, loginUser } from '../fixtures/auth-fixtures'; +import { waitForLoadingComplete } from '../utils/wait-helpers'; + +function generateProviderName(prefix: string = 'slack-test'): string { + return `${prefix}-${Date.now()}`; +} + +test.describe('Slack Notification Provider', () => { + test.beforeEach(async ({ page, adminUser }) => { + await loginUser(page, adminUser); + await waitForLoadingComplete(page); + await page.goto('/settings/notifications'); + await waitForLoadingComplete(page); + }); + + test.describe('Form Rendering', () => { + test('should show webhook URL field and channel name when slack type selected', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Select slack provider type', async () => { + await page.getByTestId('provider-type').selectOption('slack'); + }); + + await test.step('Verify webhook URL (token) field is visible', async () => { + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Verify webhook URL field label shows Webhook URL', async () => { + const tokenLabel = page.getByText(/webhook url/i); + await expect(tokenLabel.first()).toBeVisible(); + }); + + await test.step('Verify channel name placeholder', async () => { + const urlInput = page.getByTestId('provider-url'); + await expect(urlInput).toHaveAttribute('placeholder', '#general'); + }); + + await test.step('Verify Channel Name label replaces URL label', async () => { + const channelLabel = page.getByText(/channel name/i); + await expect(channelLabel.first()).toBeVisible(); + }); + + await test.step('Verify JSON template section is shown for slack', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + + await test.step('Verify save button is accessible', async () => { + const saveButton = page.getByTestId('provider-save-btn'); + await expect(saveButton).toBeVisible(); + await expect(saveButton).toBeEnabled(); + }); + }); + + test('should toggle form fields when switching between slack and discord types', async ({ page }) => { + await test.step('Open Add Provider form', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify discord is default without token field', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + + await test.step('Switch to slack and verify token field appears', async () => { + await page.getByTestId('provider-type').selectOption('slack'); + await expect(page.getByTestId('provider-gotify-token')).toBeVisible(); + }); + + await test.step('Switch back to discord and verify token field hidden', async () => { + await page.getByTestId('provider-type').selectOption('discord'); + await expect(page.getByTestId('provider-gotify-token')).toHaveCount(0); + }); + }); + + test('should show JSON template section for slack', async ({ page }) => { + await test.step('Open Add Provider form and select slack', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('slack'); + }); + + await test.step('Verify JSON template config section is visible', async () => { + await expect(page.getByTestId('provider-config')).toBeVisible(); + }); + }); + }); + + test.describe('CRUD Operations', () => { + test('should create slack notification provider', async ({ page }) => { + const providerName = generateProviderName('slack-create'); + let capturedPayload: Record | null = null; + + await test.step('Mock create endpoint to capture payload', async () => { + const createdProviders: Array> = []; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'POST') { + const payload = (await request.postDataJSON()) as Record; + capturedPayload = payload; + const { token, gotify_token, ...rest } = payload; + const created: Record = { + id: 'slack-provider-1', + ...rest, + ...(token !== undefined || gotify_token !== undefined ? { has_token: true } : {}), + }; + createdProviders.push(created); + await route.fulfill({ + status: 201, + contentType: 'application/json', + body: JSON.stringify(created), + }); + return; + } + + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(createdProviders), + }); + return; + } + + await route.continue(); + }); + }); + + await test.step('Open form and select slack type', async () => { + await page.getByRole('button', { name: /add.*provider/i }).click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + await page.getByTestId('provider-type').selectOption('slack'); + }); + + await test.step('Fill slack provider form', async () => { + await page.getByTestId('provider-name').fill(providerName); + await page.getByTestId('provider-url').fill('#alerts'); + await page.getByTestId('provider-gotify-token').fill( + 'https://hooks.slack.com/services/T00000000/B00000000/xxxxxxxxxxxxxxxxxxxx' + ); + }); + + await test.step('Configure event notifications', async () => { + await page.getByTestId('notify-proxy-hosts').check(); + await page.getByTestId('notify-certs').check(); + }); + + await test.step('Save provider', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'POST' && + resp.status() === 201 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify provider appears in list', async () => { + const providerInList = page.getByText(providerName); + await expect(providerInList.first()).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Verify outgoing payload contract', async () => { + expect(capturedPayload).toBeTruthy(); + expect(capturedPayload?.type).toBe('slack'); + expect(capturedPayload?.name).toBe(providerName); + expect(capturedPayload?.url).toBe('#alerts'); + expect(capturedPayload?.token).toBe( + 'https://hooks.slack.com/services/T00000000/B00000000/xxxxxxxxxxxxxxxxxxxx' + ); + expect(capturedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should edit slack notification provider and preserve webhook URL', async ({ page }) => { + let updatedPayload: Record | null = null; + + await test.step('Mock existing slack provider', async () => { + let providers = [ + { + id: 'slack-edit-id', + name: 'Slack Alerts', + type: 'slack', + url: '#alerts', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(providers), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'PUT') { + updatedPayload = (await request.postDataJSON()) as Record; + providers = providers.map((p) => + p.id === 'slack-edit-id' ? { ...p, ...updatedPayload } : p + ); + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify slack provider is displayed', async () => { + await expect(page.getByText('Slack Alerts')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Click edit on slack provider', async () => { + const providerRow = page.getByTestId('provider-row-slack-edit-id'); + const editButton = providerRow.getByRole('button', { name: /edit/i }); + await expect(editButton).toBeVisible({ timeout: 5000 }); + await editButton.click(); + await expect(page.getByTestId('provider-name')).toBeVisible({ timeout: 5000 }); + }); + + await test.step('Verify form loads with slack type', async () => { + await expect(page.getByTestId('provider-type')).toHaveValue('slack'); + }); + + await test.step('Verify stored token indicator is shown', async () => { + await expect(page.getByTestId('gotify-token-stored-indicator')).toBeVisible(); + }); + + await test.step('Update name without changing webhook URL', async () => { + const nameInput = page.getByTestId('provider-name'); + await nameInput.clear(); + await nameInput.fill('Slack Alerts v2'); + }); + + await test.step('Save changes', async () => { + await Promise.all([ + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers\/slack-edit-id/.test(resp.url()) && + resp.request().method() === 'PUT' && + resp.status() === 200 + ), + page.waitForResponse( + (resp) => + /\/api\/v1\/notifications\/providers/.test(resp.url()) && + resp.request().method() === 'GET' && + resp.status() === 200 + ), + page.getByTestId('provider-save-btn').click(), + ]); + }); + + await test.step('Verify update payload preserves webhook URL omission', async () => { + expect(updatedPayload).toBeTruthy(); + expect(updatedPayload?.type).toBe('slack'); + expect(updatedPayload?.name).toBe('Slack Alerts v2'); + expect(updatedPayload?.token).toBeUndefined(); + expect(updatedPayload?.gotify_token).toBeUndefined(); + }); + }); + + test('should test slack notification provider', async ({ page }) => { + let testCalled = false; + + await test.step('Mock existing slack provider and test endpoint', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'slack-test-id', + name: 'Slack Test Provider', + type: 'slack', + url: '#alerts', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/test', async (route, request) => { + if (request.method() === 'POST') { + testCalled = true; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Click Send Test on the provider', async () => { + const providerRow = page.getByTestId('provider-row-slack-test-id'); + const sendTestButton = providerRow.getByRole('button', { name: /send test/i }); + await expect(sendTestButton).toBeVisible({ timeout: 5000 }); + await expect(sendTestButton).toBeEnabled(); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/test') && + resp.status() === 200 + ), + sendTestButton.click(), + ]); + }); + + await test.step('Verify test was called', async () => { + expect(testCalled).toBe(true); + }); + }); + + test('should delete slack notification provider', async ({ page }) => { + await test.step('Mock existing slack provider', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'slack-delete-id', + name: 'Slack To Delete', + type: 'slack', + url: '#alerts', + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + + await page.route('**/api/v1/notifications/providers/*', async (route, request) => { + if (request.method() === 'DELETE') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify({ success: true }), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload to get mocked provider', async () => { + await page.reload(); + await waitForLoadingComplete(page); + }); + + await test.step('Verify slack provider is displayed', async () => { + await expect(page.getByText('Slack To Delete')).toBeVisible({ timeout: 10000 }); + }); + + await test.step('Delete provider', async () => { + page.on('dialog', async (dialog) => { + expect(dialog.type()).toBe('confirm'); + await dialog.accept(); + }); + + const deleteButton = page.getByRole('button', { name: /delete/i }) + .or(page.locator('button').filter({ has: page.locator('svg.lucide-trash2, svg[class*="trash"]') })); + await Promise.all([ + page.waitForResponse( + (resp) => + resp.url().includes('/api/v1/notifications/providers/slack-delete-id') && + resp.status() === 200 + ), + deleteButton.first().click(), + ]); + }); + + await test.step('Verify deletion feedback', async () => { + const successIndicator = page.locator('[data-testid="toast-success"]') + .or(page.getByRole('status').filter({ hasText: /deleted|removed/i })) + .or(page.getByText(/no.*providers/i)); + await expect(successIndicator.first()).toBeVisible({ timeout: 5000 }); + }); + }); + }); + + test.describe('Security', () => { + test('GET response should NOT expose webhook URL', async ({ page }) => { + let apiResponseBody: Array> | null = null; + + let resolveRouteBody: (data: Array>) => void; + const routeBodyPromise = new Promise>>((resolve) => { + resolveRouteBody = resolve; + }); + + await test.step('Mock provider list with has_token flag', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + const body = [ + { + id: 'slack-sec-id', + name: 'Slack Secure', + type: 'slack', + url: '#alerts', + has_token: true, + enabled: true, + notify_proxy_hosts: true, + notify_certs: true, + notify_uptime: false, + }, + ]; + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(body), + }); + resolveRouteBody!(body); + } else { + await route.continue(); + } + }); + }); + + await test.step('Navigate to trigger GET', async () => { + await page.reload(); + apiResponseBody = await routeBodyPromise; + await waitForLoadingComplete(page); + }); + + await test.step('Verify webhook URL is not in API response', async () => { + expect(apiResponseBody).toBeTruthy(); + const provider = apiResponseBody![0]; + expect(provider.token).toBeUndefined(); + expect(provider.gotify_token).toBeUndefined(); + const responseStr = JSON.stringify(provider); + expect(responseStr).not.toContain('hooks.slack.com'); + expect(responseStr).not.toContain('/services/'); + }); + }); + + test('webhook URL should NOT be present in URL field', async ({ page }) => { + await test.step('Mock provider with clean URL field', async () => { + await page.route('**/api/v1/notifications/providers', async (route, request) => { + if (request.method() === 'GET') { + await route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify([ + { + id: 'slack-url-sec-id', + name: 'Slack URL Check', + type: 'slack', + url: '#alerts', + has_token: true, + enabled: true, + }, + ]), + }); + } else { + await route.continue(); + } + }); + }); + + await test.step('Reload and verify URL field does not contain webhook URL', async () => { + await page.reload(); + await waitForLoadingComplete(page); + await expect(page.getByText('Slack URL Check')).toBeVisible({ timeout: 5000 }); + + const providerRow = page.getByTestId('provider-row-slack-url-sec-id'); + const urlText = await providerRow.textContent(); + expect(urlText).not.toContain('hooks.slack.com'); + expect(urlText).not.toContain('/services/'); + }); + }); + }); +}); diff --git a/tests/settings/telegram-notification-provider.spec.ts b/tests/settings/telegram-notification-provider.spec.ts index 7cfdb07e..eb1237e4 100644 --- a/tests/settings/telegram-notification-provider.spec.ts +++ b/tests/settings/telegram-notification-provider.spec.ts @@ -409,6 +409,11 @@ test.describe('Telegram Notification Provider', () => { test('GET response should NOT expose bot token', async ({ page }) => { let apiResponseBody: Array> | null = null; + let resolveRouteBody: (data: Array>) => void; + const routeBodyPromise = new Promise>>((resolve) => { + resolveRouteBody = resolve; + }); + await test.step('Mock provider list with has_token flag', async () => { await page.route('**/api/v1/notifications/providers', async (route, request) => { if (request.method() === 'GET') { @@ -430,6 +435,7 @@ test.describe('Telegram Notification Provider', () => { contentType: 'application/json', body: JSON.stringify(body), }); + resolveRouteBody!(body); } else { await route.continue(); } @@ -437,21 +443,16 @@ test.describe('Telegram Notification Provider', () => { }); await test.step('Navigate to trigger GET', async () => { - // Register the response listener BEFORE reload to eliminate the race - // condition where Firefox processes the network response before the - // route callback assignment becomes visible to the test assertion. - // waitForLoadingComplete alone is insufficient because the spinner can - // disappear before the providers API response has been intercepted. - const responsePromise = page.waitForResponse( - (resp) => - resp.url().includes('/api/v1/notifications/providers') && - resp.request().method() === 'GET' && - resp.status() === 200, - { timeout: 15000 } - ); await page.reload(); - const response = await responsePromise; - apiResponseBody = (await response.json()) as Array>; + apiResponseBody = await Promise.race([ + routeBodyPromise, + new Promise>>((_resolve, reject) => + setTimeout( + () => reject(new Error('Timed out waiting for GET /api/v1/notifications/providers')), + 15000 + ) + ), + ]); await waitForLoadingComplete(page); });