Compare commits

...

527 Commits

Author SHA1 Message Date
GitHub Actions
3fe592926d chore: update electron-to-chromium version to 1.5.342 in package-lock.json 2026-04-22 00:23:17 +00:00
GitHub Actions
5bcf3069c6 chore: ensure both coverage output directories are created in frontend test coverage script 2026-04-22 00:21:53 +00:00
GitHub Actions
6546130518 chore: update QA report with detailed gate status and revalidation results 2026-04-22 00:13:35 +00:00
GitHub Actions
07108cfa8d chore: refactor frontend test coverage script to improve directory handling and cleanup 2026-04-22 00:13:35 +00:00
GitHub Actions
de945c358b chore: update coverage reports directory configuration in vitest 2026-04-22 00:13:35 +00:00
GitHub Actions
e5c7b85f82 chore: enhance accessibility tests by adding route readiness checks 2026-04-22 00:13:35 +00:00
GitHub Actions
6e06cc3396 chore: update security test paths in Playwright configuration 2026-04-22 00:13:35 +00:00
GitHub Actions
7e3b5b13b4 chore: update @tailwindcss packages to version 4.2.4 and tapable to version 2.3.3 2026-04-22 00:13:35 +00:00
GitHub Actions
91ba53476c chore: update QA/Security DoD Audit Report with latest findings and gate statuses 2026-04-22 00:13:35 +00:00
GitHub Actions
442425a4a5 chore: update version to v0.27.0 2026-04-22 00:13:35 +00:00
GitHub Actions
71fe278e33 chore: update Docker client initialization and container listing logic 2026-04-22 00:13:35 +00:00
GitHub Actions
468af25887 chore: add lefthook and backend test output files to .gitignore 2026-04-22 00:13:35 +00:00
GitHub Actions
d437de1ccf chore: add new output files to .gitignore for scan and coverage results 2026-04-22 00:13:35 +00:00
GitHub Actions
8c56f40131 chore: remove unused libc entries and clean up dependencies in package-lock.json 2026-04-22 00:13:35 +00:00
GitHub Actions
2bf4f869ab chore: update vulnerability suppression and documentation for CVE-2026-34040 in .grype.yaml, .trivyignore, and SECURITY.md 2026-04-22 00:13:35 +00:00
GitHub Actions
dd698afa7e chore: update go.mod and go.sum to remove unused dependencies and add new ones 2026-04-22 00:13:35 +00:00
GitHub Actions
5db3f7046c chore: add accessibility test suite documentation and baseline expiration dates 2026-04-22 00:13:35 +00:00
GitHub Actions
b59a788101 chore: include accessibility scans in non-security CI shards
Add automated accessibility suite execution to the standard non-security
end-to-end browser shards so regressions are caught during routine CI runs.

This change is necessary to enforce accessibility checks consistently across
Chromium, Firefox, and WebKit without creating a separate pipeline path.

Behavior impact:
- Non-security shard jobs now run accessibility tests alongside existing suites
- Security-specific job behavior remains unchanged
- Sharding logic remains unchanged, with only test scope expanded

Operational consideration:
- Monitor shard runtime balance after rollout; if sustained skew appears,
  split accessibility coverage into its own sharded workflow stage.
2026-04-22 00:13:35 +00:00
GitHub Actions
e7460f7e50 chore: update accessibility baseline and enhance loading waits for a11y tests 2026-04-22 00:13:35 +00:00
GitHub Actions
1e1727faa1 chore: add accessibility tests for domains, notifications, setup, and tasks pages 2026-04-22 00:13:35 +00:00
GitHub Actions
0c87c350e5 chore: add accessibility tests for security and uptime pages 2026-04-22 00:13:35 +00:00
GitHub Actions
03101012b9 chore: add accessibility tests for various pages including certificates, dashboard, dns providers, login, proxy hosts, and settings 2026-04-22 00:13:35 +00:00
GitHub Actions
5f855ea779 chore: add accessibility testing support with @axe-core/playwright and related utilities 2026-04-22 00:13:35 +00:00
GitHub Actions
a74d10d138 doc: Integrate @axe-core/playwright for Automated Accessibility Testing
Co-authored-by: Copilot <copilot@github.com>
2026-04-22 00:13:35 +00:00
Jeremy
515a95aaf1 Merge pull request #968 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-21 20:08:35 -04:00
renovate[bot]
1bcb4de6f8 fix(deps): update non-major-updates 2026-04-21 22:49:48 +00:00
Jeremy
07764db43e Merge pull request #966 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-04-21 09:12:51 -04:00
renovate[bot]
54f32c03d0 chore(deps): update non-major-updates 2026-04-21 12:38:30 +00:00
Jeremy
c983250327 Merge pull request #965 from Wikid82/development
Propagate changes from development into feature/beta-release
2026-04-20 20:57:07 -04:00
Jeremy
2308f372d7 Merge pull request #964 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-20 17:56:55 -04:00
Jeremy
d68001b949 Merge pull request #963 from Wikid82/main
Propagate changes from main into development
2026-04-20 17:56:25 -04:00
Jeremy
a599623ea9 Merge branch 'development' into main 2026-04-20 17:55:51 -04:00
renovate[bot]
96f0be19a4 fix(deps): update non-major-updates 2026-04-20 21:45:50 +00:00
Jeremy
0f0a442d74 Merge pull request #962 from Wikid82/hotfix/ci
fix(ci): shift GeoLite2 update to Sunday targeting development branch
2026-04-20 12:56:13 -04:00
Jeremy
c1470eaac0 Merge pull request #961 from Wikid82/development
Propagate changes from development into feature/beta-release
2026-04-20 12:37:40 -04:00
GitHub Actions
2123fbca77 fix(ci): shift GeoLite2 update to Sunday targeting development branch
Co-authored-by: Copilot <copilot@github.com>
2026-04-20 16:35:02 +00:00
Jeremy
a8cd4bf34c Merge branch 'feature/beta-release' into development 2026-04-20 12:17:15 -04:00
Jeremy
02911109ef Merge pull request #960 from Wikid82/main
Propagate changes from main into development
2026-04-20 08:50:29 -04:00
GitHub Actions
2bad9fec53 fix: make URL preview invite modal test deterministic 2026-04-20 12:48:33 +00:00
Jeremy
54ce6f677c Merge pull request #959 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-20 08:34:32 -04:00
Jeremy
26a75f5fe3 Merge branch 'development' into main 2026-04-20 08:26:40 -04:00
Jeremy
ad7704c1df Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-20 08:02:55 -04:00
Jeremy
877fee487b Merge pull request #958 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-04-20 07:57:00 -04:00
GitHub Actions
330ccae82f fix: update vulnerability suppression for buger/jsonparser to reflect upstream fix availability 2026-04-20 11:56:26 +00:00
renovate[bot]
0a5bb296a9 fix(deps): update non-major-updates 2026-04-20 11:56:08 +00:00
GitHub Actions
437a35bd47 fix: replace div with button for close action in whitelist delete modal
Co-authored-by: Copilot <copilot@github.com>
2026-04-20 11:29:10 +00:00
GitHub Actions
612d3655fa fix: improve IP normalization in normalizeIPOrCIDR function
Co-authored-by: Copilot <copilot@github.com>
2026-04-20 11:27:56 +00:00
GitHub Actions
38cdc5d9d0 fix(deps): update @oxc-project/types and @rolldown dependencies to version 0.126.0 and 1.0.0-rc.16 respectively 2026-04-20 11:16:56 +00:00
GitHub Actions
816124634b fix(deps): update @oxc-parser dependencies to version 0.126.0 and remove unused packages 2026-04-20 11:16:20 +00:00
GitHub Actions
2b2f3c876b chore: fix Renovate lookup failure for google/uuid dependency 2026-04-20 11:02:31 +00:00
Jeremy
20f2624653 Merge pull request #957 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-20 06:51:03 -04:00
Wikid82
6509bb5d1b chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: b018842033872f19ed9ccefb863ec954f8024db2ae913d0d4ea14e35ace4eba1
New: 62049119bd084e19fff4689bebe258f18a5f27a386e6d26ba5180941b613fc2b

Auto-generated by: .github/workflows/update-geolite2.yml
2026-04-20 02:58:45 +00:00
Jeremy
e8724c5edc Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-19 17:13:04 -04:00
GitHub Actions
2c284bdd49 test: add tests for handling empty UUID in DeleteWhitelist and invalid CIDR in Add method 2026-04-19 21:11:14 +00:00
GitHub Actions
db1e77ceb3 test(coverage): cover all modified lines for 100% patch coverage vs origin/main
- Add domains field to certificate mock to exercise per-domain loop
  in Dashboard component, covering the previously untested branch
- Extend CrowdSec whitelist test suite with backdrop-click close test
  to cover the dialog dismissal handler
- Remove duplicate describe blocks introduced when whitelist API tests
  were appended to crowdsec.test.ts, resolving ESLint vitest/no-identical-title
  errors that were blocking pre-commit hooks
2026-04-19 21:08:26 +00:00
GitHub Actions
df5e69236a fix(deps): update dependencies for improved stability and performance 2026-04-19 21:03:48 +00:00
renovate[bot]
a3259b042d fix(deps): update non-major-updates 2026-04-19 17:10:33 +00:00
GitHub Actions
f5e7c2bdfc fix(test): resolve CrowdSec card title lookup in Security test mock
The Security component renders the CrowdSec card title using the nested
translation key 'security.crowdsec.title', but the test mock only had the
flat key 'security.crowdsec'. The mock fallback returns the key string
itself when a lookup misses, causing getByText('CrowdSec') to find nothing.

Added 'security.crowdsec.title' to the securityTranslations map so the
mock resolves to the expected 'CrowdSec' string, matching the component's
actual t() call and allowing the title assertion to pass.
2026-04-18 01:39:06 +00:00
GitHub Actions
0859ab31ab fix(deps): update modernc.org/sqlite to version 1.49.1 for improved functionality 2026-04-18 01:36:58 +00:00
GitHub Actions
c02219cc92 fix(deps): update @asamuzakjp/dom-selector, @humanfs/core, @humanfs/node, and hasown to latest versions; add @humanfs/types dependency 2026-04-18 01:35:43 +00:00
GitHub Actions
d73b3aee5c fix(deps): update @humanfs/core and @humanfs/node to latest versions and add @humanfs/types dependency 2026-04-18 01:34:43 +00:00
Jeremy
80eb91e9a1 Merge pull request #956 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-17 21:33:31 -04:00
renovate[bot]
aa6c751007 fix(deps): update non-major-updates 2026-04-17 20:39:46 +00:00
GitHub Actions
1af786e7c8 fix: update eslint-plugin-react-hooks and typescript to latest versions for improved compatibility 2026-04-16 23:53:11 +00:00
GitHub Actions
c46c1976a2 fix: update typescript to version 6.0.3 for improved functionality and security 2026-04-16 23:52:39 +00:00
GitHub Actions
3b3ea83ecd chore: add database error handling tests for whitelist service and handler 2026-04-16 23:51:01 +00:00
GitHub Actions
5980a8081c fix: improve regex for delete button name matching in CrowdSec IP Whitelist tests 2026-04-16 14:12:07 +00:00
GitHub Actions
55f64f8050 fix: update translation keys for CrowdSec security titles and badges 2026-04-16 14:07:36 +00:00
GitHub Actions
983ae34147 fix(docker): persist CrowdSec LAPI database across container rebuilds 2026-04-16 14:04:15 +00:00
GitHub Actions
4232c0a8ee fix: update benchmark-action/github-action-benchmark to v1.22.0 and mlugg/setup-zig to v2.2.1 for improved security and functionality 2026-04-16 13:34:36 +00:00
GitHub Actions
402a8b3105 fix: update electron-to-chromium, eslint-plugin-sonarjs, minimatch, and ts-api-utils to latest versions 2026-04-16 13:34:36 +00:00
GitHub Actions
f46bb838ca feat: add QA audit report for CrowdSec IP Whitelist Management 2026-04-16 13:34:36 +00:00
GitHub Actions
3d0179a119 fix: update @asamuzakjp/css-color and @asamuzakjp/dom-selector to latest versions and add @asamuzakjp/generational-cache dependency 2026-04-16 13:34:36 +00:00
GitHub Actions
557b33dc73 fix: update docker/go-connections dependency to v0.7.0 2026-04-16 13:34:36 +00:00
GitHub Actions
2a1652d0b1 feat: add IP whitelist management details to architecture documentation 2026-04-16 13:34:36 +00:00
GitHub Actions
f0fdf9b752 test: update response key for whitelist entries and add validation test for missing fields 2026-04-16 13:34:36 +00:00
GitHub Actions
973efd6412 fix: initialize WhitelistSvc only if db is not nil and update error message in AddWhitelist handler 2026-04-16 13:34:36 +00:00
GitHub Actions
028342c63a fix: update JSON response key for whitelist entries in ListWhitelists handler 2026-04-16 13:34:36 +00:00
GitHub Actions
eb9b907ba3 feat: add end-to-end tests for CrowdSec IP whitelist management 2026-04-16 13:34:36 +00:00
GitHub Actions
aee0eeef82 feat: add unit tests for useCrowdSecWhitelist hooks 2026-04-16 13:34:36 +00:00
GitHub Actions
c977cf6190 feat: add whitelist management functionality to CrowdSecConfig 2026-04-16 13:34:36 +00:00
GitHub Actions
28bc73bb1a feat: add whitelist management hooks for querying and mutating whitelist entries 2026-04-16 13:34:36 +00:00
GitHub Actions
19719693b0 feat: add unit tests for CrowdSecWhitelistService and CrowdsecHandler 2026-04-16 13:34:36 +00:00
GitHub Actions
a243066691 feat: regenerate whitelist YAML on CrowdSec startup 2026-04-16 13:34:36 +00:00
GitHub Actions
741a59c333 feat: add whitelist management endpoints to CrowdsecHandler 2026-04-16 13:34:36 +00:00
GitHub Actions
5642a37c44 feat: implement CrowdSecWhitelistService for managing IP/CIDR whitelists 2026-04-16 13:34:36 +00:00
GitHub Actions
1726a19cb6 feat: add CrowdSecWhitelist model and integrate into API route registration 2026-04-16 13:34:36 +00:00
GitHub Actions
40090cda23 feat: add installation of crowdsecurity/whitelists parser 2026-04-16 13:34:36 +00:00
Jeremy
9945fac150 Merge branch 'development' into feature/beta-release 2026-04-16 09:33:49 -04:00
Jeremy
9c416599f8 Merge pull request #955 from Wikid82/renovate/development-non-major-updates
chore(deps): update node.js to 8510330 (development)
2026-04-16 09:33:22 -04:00
Jeremy
abf88ab4cb Merge pull request #954 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-04-16 09:33:04 -04:00
renovate[bot]
34903cdd49 chore(deps): update node.js to 8510330 2026-04-16 13:26:43 +00:00
renovate[bot]
98c720987d chore(deps): update non-major-updates 2026-04-16 13:26:37 +00:00
Jeremy
1bd7eab223 Merge pull request #953 from Wikid82/development
Propagate changes from development into feature/beta-release
2026-04-16 09:25:43 -04:00
Jeremy
080e17d85a Merge pull request #951 from Wikid82/main
chore(config): migrate config .github/renovate.json
2026-04-15 13:23:05 -04:00
Jeremy
a059edf60d Merge pull request #950 from Wikid82/main
chore(config): migrate config .github/renovate.json
2026-04-15 13:22:15 -04:00
GitHub Actions
0a3b64ba5c fix: correct misplaced env block in propagate-changes workflow 2026-04-15 17:19:19 +00:00
Jeremy
8ee0d0403a Merge pull request #949 from Wikid82/renovate/migrate-config
chore(config): migrate Renovate config
2026-04-15 13:07:10 -04:00
renovate[bot]
9dab9186e5 chore(config): migrate config .github/renovate.json 2026-04-15 17:05:08 +00:00
Jeremy
c63e4a3d6b Merge pull request #928 from Wikid82/feature/beta-release
feat: Custom Certificate Upload & Management
2026-04-15 12:54:04 -04:00
GitHub Actions
0e8ff1bc2a fix(deps): update @napi-rs/wasm-runtime and postcss to latest versions 2026-04-15 16:09:12 +00:00
Jeremy
683967bbfc Merge pull request #948 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-15 12:05:15 -04:00
renovate[bot]
15947616a9 fix(deps): update non-major-updates 2026-04-15 16:02:03 +00:00
GitHub Actions
813985a903 fix(dependencies): update mongo-driver to v2.5.1 2026-04-15 11:38:35 +00:00
GitHub Actions
bd48c17aab chore: update dependencies for prettier and std-env in package-lock.json 2026-04-15 11:37:28 +00:00
GitHub Actions
8239a94938 chore: Add tests for CertificateList and CertificateUploadDialog components
- Implement test to deselect a row checkbox in CertificateList by clicking it a second time.
- Add test to close detail dialog via the close button in CertificateList.
- Add test to close export dialog via the cancel button in CertificateList.
- Add test to show KEY format badge when a .key file is uploaded in CertificateUploadDialog.
- Add test to ensure no format badge is shown for unknown file extensions in CertificateUploadDialog.
2026-04-15 11:35:10 +00:00
GitHub Actions
fb8d80f6a3 fix: correct CertificateUploadDialog tests to provide required key file 2026-04-14 20:40:26 +00:00
GitHub Actions
8090c12556 feat(proxy-host): enhance certificate handling and update form integration 2026-04-14 20:35:11 +00:00
GitHub Actions
0e0d42c9fd fix(certificates): mark key file as aria-required for PEM/DER cert uploads 2026-04-14 19:10:57 +00:00
GitHub Actions
14b48f23b6 fix: add key file requirement message for PEM/DER certificates in CertificateUploadDialog 2026-04-14 16:35:37 +00:00
GitHub Actions
0c0adf0e5a fix: refactor context handling in Register tests for improved cleanup 2026-04-14 16:33:54 +00:00
GitHub Actions
135edd208c fix: update caniuse-lite to version 1.0.30001788 for improved compatibility 2026-04-14 12:58:15 +00:00
GitHub Actions
81a083a634 fix: resolve CI test failures and close patch coverage gaps 2026-04-14 12:42:22 +00:00
GitHub Actions
149a2071c3 fix: update electron-to-chromium to version 1.5.336 for improved compatibility 2026-04-14 02:35:05 +00:00
GitHub Actions
027a1b1f18 fix: replace fireEvent with userEvent for file uploads in CertificateUploadDialog tests 2026-04-14 02:33:25 +00:00
GitHub Actions
7adf39a6a0 fix: update axe-core to version 4.11.3 for improved functionality and security 2026-04-14 02:33:25 +00:00
Jeremy
5408ebc95b Merge pull request #947 from Wikid82/renovate/feature/beta-release-actions-upload-pages-artifact-5.x
chore(deps): update actions/upload-pages-artifact action to v5 (feature/beta-release)
2026-04-13 22:32:42 -04:00
Jeremy
92a90bb8a1 Merge pull request #946 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-13 22:32:26 -04:00
renovate[bot]
6391532b2d fix(deps): update non-major-updates 2026-04-14 01:08:04 +00:00
renovate[bot]
a161163508 chore(deps): update actions/upload-pages-artifact action to v5 2026-04-13 20:32:41 +00:00
GitHub Actions
5b6bf945d9 fix: add key_file validation for PEM/DER uploads and resolve CI test failures 2026-04-13 19:56:35 +00:00
GitHub Actions
877a32f180 fix: enhance form validation for certificate upload by adding required attributes and adjusting test logic 2026-04-13 17:31:05 +00:00
GitHub Actions
1fe8a79ea3 fix: update @typescript-eslint packages to version 8.58.2 and undici to version 7.25.0 2026-04-13 17:29:26 +00:00
GitHub Actions
7c8e8c001c fix: enhance error handling in ConvertPEMToPFX for empty certificate cases 2026-04-13 14:12:47 +00:00
GitHub Actions
29c56ab283 fix: add context parameter to route registration functions for improved lifecycle management 2026-04-13 14:12:47 +00:00
GitHub Actions
0391f2b3e3 fix: add PFX password parameter to ExportCertificate method and update tests 2026-04-13 14:12:47 +00:00
GitHub Actions
942f585dd1 fix: improve error response format in certificate validation 2026-04-13 14:12:47 +00:00
GitHub Actions
3005db6943 fix: remove unnecessary string checks for key file in Upload method 2026-04-13 14:12:47 +00:00
GitHub Actions
f3c33dc81b fix: update golang.org/x/term to v0.42.0 for compatibility improvements 2026-04-13 14:12:47 +00:00
Jeremy
44e2bdec95 Merge branch 'development' into feature/beta-release 2026-04-13 09:25:51 -04:00
Jeremy
d71fc0b95f Merge pull request #945 from Wikid82/renovate/development-pin-dependencies
chore(deps): pin dependencies (development)
2026-04-13 09:18:48 -04:00
renovate[bot]
f295788ac1 chore(deps): pin dependencies 2026-04-13 13:17:54 +00:00
GitHub Actions
c19aa55fd7 chore: update package-lock.json to upgrade dependencies for improved stability 2026-04-13 13:10:40 +00:00
GitHub Actions
ea3d93253f fix: update CADDY_SECURITY_VERSION to 1.1.62 for improved security 2026-04-13 13:10:40 +00:00
Jeremy
114dca89c6 Merge pull request #944 from Wikid82/renovate/feature/beta-release-major-7-github-artifact-actions
chore(deps): update actions/upload-artifact action to v7 (feature/beta-release)
2026-04-13 09:05:00 -04:00
Jeremy
c7932fa1d9 Merge pull request #942 from Wikid82/renovate/feature/beta-release-actions-setup-go-6.x
chore(deps): update actions/setup-go action to v6 (feature/beta-release)
2026-04-13 09:03:23 -04:00
renovate[bot]
f0ffc27ca7 chore(deps): update actions/upload-artifact action to v7 2026-04-13 13:02:54 +00:00
Jeremy
4dfcf70c08 Merge pull request #941 from Wikid82/renovate/feature/beta-release-actions-github-script-9.x
chore(deps): update actions/github-script action to v9 (feature/beta-release)
2026-04-13 09:02:37 -04:00
Jeremy
71b34061d9 Merge pull request #940 from Wikid82/renovate/feature/beta-release-actions-checkout-6.x
chore(deps): update actions/checkout action to v6 (feature/beta-release)
2026-04-13 09:02:14 -04:00
renovate[bot]
368130b07a chore(deps): update actions/setup-go action to v6 2026-04-13 13:01:36 +00:00
renovate[bot]
85216ba6e0 chore(deps): update actions/github-script action to v9 2026-04-13 13:01:30 +00:00
renovate[bot]
06aacdee98 chore(deps): update actions/checkout action to v6 2026-04-13 13:01:24 +00:00
Jeremy
ef44ae40ec Merge branch 'development' into feature/beta-release 2026-04-13 08:49:52 -04:00
Jeremy
26ea2e9da1 Merge pull request #937 from Wikid82/main
Propagate changes from main into development
2026-04-13 08:49:17 -04:00
Jeremy
b90da3740c Merge pull request #936 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update renovatebot/github-action action to v46.1.9 (feature/beta-release)
2026-04-13 08:48:48 -04:00
Jeremy
83b361ae57 Merge pull request #938 from Wikid82/nightly
Weekly: Promote nightly to main (2026-04-13)
2026-04-13 08:48:06 -04:00
GitHub Actions
0ae1dc998a test: update certificate deletion tests to use string UUIDs instead of integers 2026-04-13 12:04:47 +00:00
Jeremy
44f475778f Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-13 00:42:41 -04:00
Jeremy
7bd3a73bcf Merge pull request #935 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-04-13 00:42:15 -04:00
GitHub Actions
48f6b7a12b fix: update Dockerfile to include musl and musl-utils in apk upgrade for improved compatibility 2026-04-13 04:40:02 +00:00
renovate[bot]
122e1fc20b chore(deps): update renovatebot/github-action action to v46.1.9 2026-04-13 04:38:53 +00:00
GitHub Actions
850550c5da test: update common name display test to match exact text 2026-04-13 04:38:26 +00:00
GitHub Actions
3b4fa064d6 test: add end-to-end tests for certificate export dialog functionality 2026-04-13 04:32:26 +00:00
GitHub Actions
78a9231c8a chore: add test_output.txt to .gitignore to exclude test output files from version control 2026-04-13 04:24:16 +00:00
GitHub Actions
e88a4c7982 chore: update package-lock.json to remove unused dependencies and improve overall package management 2026-04-13 04:10:16 +00:00
GitHub Actions
9c056faec7 fix: downgrade versions of css-color, brace-expansion, baseline-browser-mapping, and electron-to-chromium for compatibility 2026-04-13 04:07:49 +00:00
GitHub Actions
e865fa2b8b chore: update package.json and package-lock.json to include vitest and coverage dependencies 2026-04-13 04:03:30 +00:00
GitHub Actions
e1bc648dfc test: add certificate feature unit tests and null-safety fix
Add comprehensive unit tests for the certificate upload, export,
and detail management feature:

- CertificateExportDialog: 21 tests covering format selection,
  blob download, error handling, and password-protected exports
- CertificateUploadDialog: 23 tests covering file validation,
  format detection, drag-and-drop, and upload flow
- CertificateDetailDialog: 19 tests covering detail display,
  loading state, missing fields, and branch coverage
- CertificateChainViewer: 8 tests covering chain visualization
- CertificateValidationPreview: 16 tests covering validation display
- FileDropZone: 18 tests covering drag-and-drop interactions
- useCertificates hooks: 10 tests covering all React Query hooks
- certificates API: 7 new tests for previously uncovered endpoints

Fix null-safety issue in ProxyHosts where cert.domains could be
undefined, causing a runtime error on split().

Frontend patch coverage: 90.6%, overall lines: 89.09%
2026-04-13 04:02:31 +00:00
GitHub Actions
9d8d97e556 fix: update @csstools/css-calc, @csstools/css-color-parser, @tanstack/query-core, globals, builtin-modules, knip, and undici to latest versions for improved functionality and security 2026-04-13 04:02:31 +00:00
GitHub Actions
9dc55675ca fix: update Coraza Caddy version to 2.5.0 for compatibility 2026-04-13 04:01:31 +00:00
GitHub Actions
30c9d735aa feat: add certificate export and upload dialogs
- Implemented CertificateExportDialog for exporting certificates in various formats (PEM, PFX, DER) with options to include private keys and set passwords.
- Created CertificateUploadDialog for uploading certificates, including validation and support for multiple file types (certificates, private keys, chain files).
- Updated DeleteCertificateDialog to use 'domains' instead of 'domain' for consistency.
- Refactored BulkDeleteCertificateDialog and DeleteCertificateDialog tests to accommodate changes in certificate structure.
- Added FileDropZone component for improved file upload experience.
- Enhanced translation files with new keys for certificate management features.
- Updated Certificates page to utilize the new CertificateUploadDialog and clean up the upload logic.
- Adjusted Dashboard and ProxyHosts pages to reflect changes in certificate data structure.
2026-04-13 04:01:31 +00:00
GitHub Actions
e49ea7061a fix: add go-pkcs12 v0.7.1 for PKCS#12 support 2026-04-13 04:01:31 +00:00
GitHub Actions
5c50d8b314 fix: update brace-expansion version to 1.1.14 for improved compatibility 2026-04-13 04:01:30 +00:00
Wikid82
00ba5b3650 chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: f5e80a9a3129d46e75c8cccd66bfac725b0449a6c89ba5093a16561d58f20bda
New: b018842033872f19ed9ccefb863ec954f8024db2ae913d0d4ea14e35ace4eba1

Auto-generated by: .github/workflows/update-geolite2.yml
2026-04-13 02:59:03 +00:00
Jeremy
af95c1bdb3 Merge pull request #934 from Wikid82/renovate/feature/beta-release-softprops-action-gh-release-3.x
chore(deps): update softprops/action-gh-release action to v3 (feature/beta-release)
2026-04-12 21:14:11 -04:00
renovate[bot]
01e3d910f1 chore(deps): update softprops/action-gh-release action to v3 2026-04-13 01:12:42 +00:00
Jeremy
1230694f55 Merge pull request #933 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-12 21:06:36 -04:00
renovate[bot]
77f15a225f fix(deps): update non-major-updates 2026-04-12 16:50:55 +00:00
Jeremy
d75abb80d1 Merge pull request #932 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-11 16:19:08 -04:00
GitHub Actions
42bc897610 fix: enhance certificate deletion handling with UUID validation and logging improvements 2026-04-11 17:54:42 +00:00
renovate[bot]
b15f7c3fbc fix(deps): update non-major-updates 2026-04-11 17:47:55 +00:00
GitHub Actions
bb99dacecd fix: update zlib and add libcrypto3 and libssl3 for improved security 2026-04-11 17:33:44 +00:00
GitHub Actions
4b925418f2 feat: Add certificate validation service with parsing and metadata extraction
- Implemented certificate parsing for PEM, DER, and PFX formats.
- Added functions to validate key matches and certificate chains.
- Introduced metadata extraction for certificates including common name, domains, and issuer organization.
- Created unit tests for all new functionalities to ensure reliability and correctness.
2026-04-11 07:17:45 +00:00
GitHub Actions
9e82efd23a fix: downgrade delve version from 1.26.2 to 1.26.1 for compatibility 2026-04-11 00:11:25 +00:00
GitHub Actions
8f7c10440c chore: align agent and instruction files with single-PR commit-slicing model
- Rewrote commit slicing guidance in Management, Planning, and subagent
  instruction files to enforce one-feature-one-PR with ordered logical commits
- Removed multi-PR branching logic from the execution workflow
- Prevents partial feature merges that cause user confusion on self-hosted tools
- All cross-references now use "Commit N" instead of "PR-N"
2026-04-10 23:41:05 +00:00
GitHub Actions
a439e1d467 fix: add git to Dockerfile dependencies for improved build capabilities 2026-04-10 21:03:54 +00:00
Jeremy
718a957ad9 Merge branch 'development' into feature/beta-release 2026-04-10 16:53:27 -04:00
GitHub Actions
059ff9c6b4 fix: update Go version from 1.26.1 to 1.26.2 in Dockerfile and documentation for security improvements 2026-04-10 20:48:46 +00:00
Jeremy
062b86642d Merge pull request #927 from Wikid82/feature/beta-release
fix: dependency updates, CVE suppression management, and Renovate version constraints
2026-04-10 16:32:51 -04:00
GitHub Actions
a5724aecf9 fix: update indirect dependencies for golang.org/x/arch, modernc.org/libc, and modernc.org/sqlite to latest versions 2026-04-10 19:22:04 +00:00
GitHub Actions
53dccbe82b fix: update baseline-browser-mapping and call-bind versions for security and compatibility 2026-04-10 19:22:04 +00:00
Jeremy
8d6645415a Merge pull request #926 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-04-10 15:21:01 -04:00
renovate[bot]
4cfcc9aa02 chore(deps): update non-major-updates 2026-04-10 19:18:28 +00:00
Jeremy
5d384e4afa Merge pull request #925 from Wikid82/renovate/feature/beta-release-actions-github-script-9.x
chore(deps): update actions/github-script action to v9 (feature/beta-release)
2026-04-10 15:17:21 -04:00
Jeremy
5bf25fdebc Merge pull request #924 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-10 15:17:05 -04:00
Jeremy
253d1ddd29 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-github-script-9.x 2026-04-10 11:58:09 -04:00
Jeremy
5eab41b559 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-10 11:57:50 -04:00
GitHub Actions
a076bb3265 chore(security): re-evaluate overdue CVE suppressions 2026-04-10 15:56:43 +00:00
Jeremy
9c85d9e737 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-github-script-9.x 2026-04-10 11:41:55 -04:00
Jeremy
1de4ce6729 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-10 11:41:36 -04:00
GitHub Actions
8e0f88e8bd fix: add suppression for CVE-2026-32286 due to pgproto3/v2 buffer overflow vulnerability 2026-04-10 15:39:52 +00:00
GitHub Actions
36460a884e fix: enforce version constraints for pgx and go-jose modules to maintain compatibility 2026-04-10 15:26:36 +00:00
renovate[bot]
585ae9494d chore(deps): update actions/github-script action to v9 2026-04-10 15:11:56 +00:00
renovate[bot]
ed9d6fe5d8 fix(deps): update non-major-updates 2026-04-10 15:11:47 +00:00
Jeremy
f0147b1315 Merge pull request #921 from Wikid82/feature/beta-release
fix: resolve 5 HIGH-severity CVEs blocking nightly container image scan
2026-04-09 21:13:01 -04:00
GitHub Actions
615e5a95f5 fix: downgrade pgx/v4 to v4.18.3 to address buffer overflow vulnerability 2026-04-09 19:09:25 +00:00
Jeremy
5b85d18217 Merge pull request #923 from Wikid82/renovate/feature/beta-release-github.com-jackc-pgx-v4-5.x
chore(deps): update module github.com/jackc/pgx/v4 to v5 (feature/beta-release)
2026-04-09 14:31:42 -04:00
renovate[bot]
f05c24dd66 chore(deps): update module github.com/jackc/pgx/v4 to v5 2026-04-09 18:24:32 +00:00
GitHub Actions
fd11279aa3 fix: update security policy for CVE-2026-31790 and CVE-2026-2673 vulnerabilities 2026-04-09 17:59:56 +00:00
GitHub Actions
59282952b0 fix(ci): provide Go 1.26.2 toolchain for Renovate dependency lookups 2026-04-09 17:55:51 +00:00
GitHub Actions
8742c76d52 fix: add Grype ignore for unfixed Alpine OpenSSL CVE-2026-31790
No upstream fix available for libcrypto3/libssl3 in Alpine 3.23.3.
Accepted risk documented in SECURITY.md. Monitoring Alpine security
advisories for patch availability.
2026-04-09 17:52:04 +00:00
Jeremy
9c0193e812 Merge pull request #922 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-09 13:42:52 -04:00
renovate[bot]
64465e1cd9 fix(deps): update non-major-updates 2026-04-09 17:27:46 +00:00
GitHub Actions
580e20d573 fix: resolve 5 HIGH-severity CVEs blocking nightly container image scan
Patch vulnerable transitive dependencies across all three compiled
binaries in the Docker image (backend, Caddy, CrowdSec):

- go-jose/v3 and v4: JOSE/JWT validation bypass (CVE-2026-34986)
- otel/sdk: resource leak in OpenTelemetry SDK (CVE-2026-39883)
- pgproto3/v2: buffer overflow via pgx/v4 bump (CVE-2026-32286)
- AWS SDK v2: event stream injection in CrowdSec deps (GHSA-xmrv-pmrh-hhx2)
- OTel HTTP exporters: request smuggling (CVE-2026-39882)
- gRPC: bumped to v1.80.0 for transitive go-jose/v4 resolution

All Dockerfile patches include Renovate annotations for automated
future tracking. Renovate config extended to cover Go version and
GitHub Action refs in skill example workflows, preventing version
drift in non-CI files. SECURITY.md updated with pre-existing Alpine
base image CVE (no upstream fix available).

Nightly Go stdlib CVEs (1.26.1) self-heal on next development sync;
example workflow pinned to 1.26.2 for correctness.
2026-04-09 17:24:25 +00:00
GitHub Actions
bb496daae3 fix(ci): improve health check for Charon container in nightly build 2026-04-09 14:08:19 +00:00
GitHub Actions
4cd568b0e5 fix(deps): update multiple dependencies in package-lock.json 2026-04-09 14:04:00 +00:00
GitHub Actions
efd70cd651 fix(deps): update golang.org/x/text to v0.36.0 and other dependencies 2026-04-09 14:01:05 +00:00
GitHub Actions
3d4a63b515 fix(go): update Go version to 1.26.2 2026-04-09 13:58:24 +00:00
Jeremy
42cec9e8c3 Merge pull request #919 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-09 09:41:24 -04:00
renovate[bot]
73565e0e0d fix(deps): update non-major-updates 2026-04-09 09:20:57 +00:00
Jeremy
6dddc5db43 Merge pull request #918 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-06 20:30:42 -04:00
renovate[bot]
ef90d1c0d7 fix(deps): update non-major-updates 2026-04-06 21:48:29 +00:00
Jeremy
0354f5cecf Merge pull request #917 from Wikid82/nightly
Weekly: Promote nightly to main (2026-04-06)
2026-04-06 12:14:47 -04:00
Jeremy
2d923246a9 Merge pull request #916 from Wikid82/development
Propagate changes from development into feature/beta-release
2026-04-06 01:24:35 -04:00
Jeremy
241c0d1b35 Merge pull request #914 from Wikid82/renovate/development-non-major-updates
chore(deps): update non-major-updates (development)
2026-04-06 01:08:26 -04:00
Jeremy
a9767baa69 Merge branch 'development' into renovate/development-non-major-updates 2026-04-06 01:08:07 -04:00
Jeremy
79f0080c80 Merge pull request #915 from Wikid82/main
Propagate changes from main into development
2026-04-06 01:07:49 -04:00
renovate[bot]
bfa6fc0920 chore(deps): update non-major-updates 2026-04-06 04:42:28 +00:00
Jeremy
c70c87386e Merge pull request #913 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-04-06 00:38:12 -04:00
Jeremy
a5c6eb95c6 Merge pull request #887 from Wikid82/feature/beta-release
Feature: CrowdSec Dashboard Integration with Observable Metrics
2026-04-06 00:37:46 -04:00
Wikid82
f5ab2cddd8 chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: 7840f4b8891e7c866f948d4b020cdc12aeea51b09450b44ad96d1f14f6e32879
New: f5e80a9a3129d46e75c8cccd66bfac725b0449a6c89ba5093a16561d58f20bda

Auto-generated by: .github/workflows/update-geolite2.yml
2026-04-06 02:58:45 +00:00
GitHub Actions
47d306b44b fix(docker): ensure CrowdSec hub index and collections bootstrap on every startup 2026-04-05 05:16:26 +00:00
GitHub Actions
5e73ba7bd0 fix(security): add temporary ignore rules for transitive HIGH vulnerabilities 2026-04-05 04:18:54 +00:00
GitHub Actions
32a30434b1 fix(security): prevent client injection of enrichment fields on decisions 2026-04-05 02:51:54 +00:00
GitHub Actions
138426311f fix(models): prevent zero-date serialization for optional ExpiresAt 2026-04-05 02:51:54 +00:00
GitHub Actions
a8ef9dd6ce fix(crowdsec): use read lock for non-mutating cache lookups 2026-04-05 02:51:54 +00:00
GitHub Actions
b48794df14 fix(deps): update smol-toml version constraint to ensure compatibility 2026-04-05 02:51:54 +00:00
GitHub Actions
85a80568b2 fix(ci): load Grype ignore config in supply chain verification 2026-04-05 02:51:54 +00:00
GitHub Actions
fc0e31df56 fix(deps): update tldts and tldts-core to version 7.0.28 for compatibility improvements 2026-04-05 02:51:54 +00:00
Jeremy
cb4ae8367c Merge pull request #910 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update dependency tldts to ^7.0.28 (feature/beta-release)
2026-04-04 22:32:58 -04:00
Jeremy
de020d9901 Merge pull request #909 from Wikid82/renovate/feature/beta-release-react-i18next-17.x
fix(deps): update dependency react-i18next to v17 (feature/beta-release)
2026-04-04 22:24:07 -04:00
renovate[bot]
0634357ee9 fix(deps): update dependency tldts to ^7.0.28 2026-04-05 02:04:41 +00:00
renovate[bot]
9753a13001 fix(deps): update dependency react-i18next to v17 2026-04-04 01:09:32 +00:00
Jeremy
d0deef1537 Merge branch 'development' into feature/beta-release 2026-04-03 21:08:07 -04:00
Jeremy
4603b57224 Merge pull request #908 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-03 21:07:43 -04:00
Jeremy
bb64ca64e2 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-03 21:07:31 -04:00
Jeremy
ce4a9c5626 Merge pull request #896 from Wikid82/renovate/feature/beta-release-react-i18next-17.x
fix(deps): update dependency react-i18next to v17 (feature/beta-release)
2026-04-03 21:07:00 -04:00
renovate[bot]
b45861090d fix(deps): update non-major-updates 2026-04-04 00:58:06 +00:00
Jeremy
4a3f655a49 Merge pull request #907 from Wikid82/main
Propagate changes from main into development
2026-04-03 20:58:02 -04:00
Jeremy
29e069ac94 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-04-03 20:56:19 -04:00
GitHub Actions
625fcf8e5c fix: update Trivy action version and extend vulnerability review dates in configuration files 2026-04-04 00:54:55 +00:00
GitHub Actions
2b8ed06c3c fix: remediate axios supply chain compromise and harden CI workflow permissions 2026-04-04 00:05:27 +00:00
GitHub Actions
34d73ad6ed fix: update dependencies for @emnapi/core, @emnapi/runtime, @emnapi/wasi-threads, @playwright/test, and dotenv for compatibility improvements 2026-04-03 23:20:41 +00:00
GitHub Actions
e06a8cb676 fix: update go-sqlite3 and other dependencies for compatibility and improvements 2026-04-03 22:57:25 +00:00
GitHub Actions
5ba8cd60c8 fix: add npmDedupe to postUpdateOptions for improved dependency management 2026-04-03 22:55:15 +00:00
GitHub Actions
29985714a3 fix: update CORAZA_CADDY_VERSION to 2.4.0 for compatibility improvements 2026-04-03 22:39:40 +00:00
GitHub Actions
64c9d7adbe fix: update CADDY_SECURITY_VERSION to 1.1.61 for security improvements 2026-04-03 22:38:28 +00:00
Jeremy
8d56760c64 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-03-30 21:19:47 -04:00
Jeremy
087ae9cc0d Merge pull request #890 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-30 21:19:28 -04:00
Jeremy
35b003ae5e Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-03-30 21:19:14 -04:00
Jeremy
cab3c68508 Merge pull request #895 from Wikid82/renovate/feature/beta-release-i18next-26.x
fix(deps): update dependency i18next to v26 (feature/beta-release)
2026-03-30 21:17:42 -04:00
Jeremy
b6558d4165 Merge pull request #894 from Wikid82/renovate/feature/beta-release-eslint-plugin-unicorn-64.x
chore(deps): update dependency eslint-plugin-unicorn to v64 (feature/beta-release)
2026-03-30 21:17:25 -04:00
Jeremy
64cbe5a74d Merge pull request #893 from Wikid82/renovate/feature/beta-release-eslint-markdown-8.x
chore(deps): update dependency @eslint/markdown to v8 (feature/beta-release)
2026-03-30 21:17:10 -04:00
Jeremy
1d3e60b4f8 Merge pull request #892 from Wikid82/renovate/feature/beta-release-codecov-codecov-action-6.x
chore(deps): update codecov/codecov-action action to v6 (feature/beta-release)
2026-03-30 21:16:50 -04:00
Jeremy
07e6ad2d09 Merge pull request #891 from Wikid82/renovate/feature/beta-release-actions-deploy-pages-5.x
chore(deps): update actions/deploy-pages action to v5 (feature/beta-release)
2026-03-30 21:16:33 -04:00
Jeremy
1911003db5 Merge pull request #888 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-03-30 21:16:01 -04:00
renovate[bot]
543388b5a4 fix(deps): update non-major-updates 2026-03-31 01:08:59 +00:00
Jeremy
e2774cccf7 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-03-30 18:42:13 -04:00
Jeremy
bf4dd17792 Merge branch 'feature/beta-release' into renovate/feature/beta-release-i18next-26.x 2026-03-30 18:41:56 -04:00
Jeremy
4abc29406f Merge branch 'feature/beta-release' into renovate/feature/beta-release-eslint-plugin-unicorn-64.x 2026-03-30 18:41:41 -04:00
Jeremy
b75f92a88b Merge branch 'feature/beta-release' into renovate/feature/beta-release-eslint-markdown-8.x 2026-03-30 18:41:22 -04:00
Jeremy
237a3a4d80 Merge branch 'feature/beta-release' into renovate/feature/beta-release-codecov-codecov-action-6.x 2026-03-30 18:40:59 -04:00
Jeremy
3e926298f2 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-deploy-pages-5.x 2026-03-30 18:40:36 -04:00
GitHub Actions
e84df69cb6 fix: add vulnerability suppressions for Docker AuthZ plugin bypass and Moby privilege validation issues 2026-03-30 22:38:33 +00:00
GitHub Actions
0a43a76a4a fix: update CROWDSEC_VERSION to 1.7.7 for compatibility improvements 2026-03-30 22:20:37 +00:00
GitHub Actions
c852838644 fix: update CORAZA_CADDY_VERSION to 2.3.0 for compatibility improvements 2026-03-30 22:19:49 +00:00
GitHub Actions
9740ddb813 fix: update CADDY_SECURITY_VERSION to 1.1.57 for security improvements 2026-03-30 22:19:07 +00:00
renovate[bot]
5abd01f61c fix(deps): update dependency react-i18next to v17 2026-03-30 22:01:12 +00:00
renovate[bot]
e40a241d62 fix(deps): update dependency i18next to v26 2026-03-30 22:01:05 +00:00
renovate[bot]
a72e587d29 chore(deps): update dependency eslint-plugin-unicorn to v64 2026-03-30 22:00:58 +00:00
renovate[bot]
976ae0272b chore(deps): update dependency @eslint/markdown to v8 2026-03-30 22:00:51 +00:00
renovate[bot]
ccd3081d09 chore(deps): update codecov/codecov-action action to v6 2026-03-30 22:00:43 +00:00
renovate[bot]
844c800cd9 chore(deps): update actions/deploy-pages action to v5 2026-03-30 22:00:38 +00:00
Jeremy
ecf314b2e5 Merge branch 'main' into bot/update-geolite2-checksum 2026-03-30 17:56:36 -04:00
Jeremy
a78529e218 Merge pull request #889 from Wikid82/nightly
Weekly: Promote nightly to main (2026-03-30)
2026-03-30 17:56:21 -04:00
Wikid82
e32f3dfb57 chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: c6549807950f93f609d6433fa295fa517fbdec0ad975a4aafba69c136d5d2347
New: 7840f4b8891e7c866f948d4b020cdc12aeea51b09450b44ad96d1f14f6e32879

Auto-generated by: .github/workflows/update-geolite2.yml
2026-03-30 02:58:26 +00:00
GitHub Actions
e6c4e46dd8 chore: Refactor test setup for Gin framework
- Removed redundant `gin.SetMode(gin.TestMode)` calls from individual test files.
- Introduced a centralized `TestMain` function in `testmain_test.go` to set the Gin mode for all tests.
- Ensured consistent test environment setup across various handler test files.
2026-03-25 22:00:07 +00:00
GitHub Actions
f40fca844f fix: update CADDY_SECURITY_VERSION to 1.1.53 for security improvements 2026-03-25 20:47:46 +00:00
GitHub Actions
c7daa4ac46 chore(deps): update electron-to-chromium, lucide-react, and undici to latest versions 2026-03-25 19:36:52 +00:00
GitHub Actions
0a4ac41242 fix: update CADDY_SECURITY_VERSION to 1.1.52 for security improvements 2026-03-25 19:34:48 +00:00
GitHub Actions
3336aae2a0 chore: enforce local patch coverage as a blocking DoD gate
- Added ~40 backend tests covering uncovered branches in CrowdSec
  dashboard handlers (error paths, validation, export edge cases)
- Patch coverage improved from 81.5% to 98.3%, exceeding 90% threshold
- Fixed DoD ordering: coverage tests now run before the patch report
  (the report requires coverage artifacts as input)
- Rewrote the local patch coverage DoD step in both the Management agent
  and testing instructions to clarify purpose, prerequisites, required
  action on findings, and blocking gate semantics
- Eliminated ambiguous "advisory" language that allowed agents to skip
  acting on uncovered lines
2026-03-25 19:33:19 +00:00
GitHub Actions
1fe69c2a15 feat: add Top Attacking IPs chart component and integrate into CrowdSec configuration page
- Implemented TopAttackingIPsChart component for visualizing top attacking IPs.
- Created hooks for fetching CrowdSec dashboard data including summary, timeline, top IPs, scenarios, and alerts.
- Added tests for the new hooks to ensure data fetching works as expected.
- Updated translation files for new dashboard terms in multiple languages.
- Refactored CrowdSecConfig page to include a tabbed interface for configuration and dashboard views.
- Added end-to-end tests for CrowdSec dashboard functionality including tab navigation, data display, and interaction with time range and refresh features.
2026-03-25 17:19:15 +00:00
Jeremy
846eedeab0 Merge pull request #885 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update dependency knip to ^6.0.5 (feature/beta-release)
2026-03-25 08:33:19 -04:00
renovate[bot]
37c7c4aeb8 chore(deps): update dependency knip to ^6.0.5 2026-03-25 11:35:17 +00:00
Jeremy
548a2b6851 Merge pull request #883 from Wikid82/feature/beta-release
feat: add support for Ntfy notification provider
2026-03-25 07:32:51 -04:00
GitHub Actions
c64890b5a0 fix: update TRIGGER_PR_NUMBER formatting for consistency in workflow 2026-03-25 10:00:34 +00:00
GitHub Actions
664b440d70 fix: update Ntfy setup instructions for clarity and security token terminology 2026-03-25 09:58:38 +00:00
Jeremy
c929dfbe4a Merge branch 'development' into feature/beta-release 2026-03-25 05:14:17 -04:00
GitHub Actions
20e724f19c fix: update docker-build.yml to include 'development' branch in push triggers 2026-03-25 09:13:15 +00:00
GitHub Actions
a6deff77a7 fix(deps): update electron-to-chromium to version 1.5.323 for improved stability 2026-03-25 08:48:35 +00:00
GitHub Actions
8702d7b76d fix(deps): update CADDY_SECURITY_VERSION to 1.1.51 for security improvements 2026-03-25 04:10:05 +00:00
GitHub Actions
c9f4e42735 fix: update SECURITY.md with new vulnerability details and remediation plans 2026-03-25 04:05:15 +00:00
GitHub Actions
86023788aa feat: add support for Ntfy notification provider
- Updated the list of supported notification provider types to include 'ntfy'.
- Modified the notification settings UI to accommodate the Ntfy provider, including form fields for topic URL and access token.
- Enhanced localization files to include translations for Ntfy-related fields in German, English, Spanish, French, and Chinese.
- Implemented tests for the Ntfy notification provider, covering form rendering, CRUD operations, payload contracts, and security measures.
- Updated existing tests to account for the new Ntfy provider in various scenarios.
2026-03-24 21:04:54 +00:00
GitHub Actions
5a2b6fec9d fix(deps): update katex to v0.16.42 for improved functionality 2026-03-24 20:25:38 +00:00
GitHub Actions
d90dc5af98 fix(deps): update go-toml to v2.3.0 for improved compatibility 2026-03-24 20:10:02 +00:00
Jeremy
1d62a3da5f Merge pull request #882 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-24 13:45:56 -04:00
Jeremy
f237fa595a Merge pull request #873 from Wikid82/feature/beta-release
fix(certificates): allow deletion of expired and unused certificates
2026-03-24 13:45:08 -04:00
renovate[bot]
07ce79b439 fix(deps): update non-major-updates 2026-03-24 17:37:02 +00:00
Jeremy
77511b0994 Merge pull request #881 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-24 08:54:12 -04:00
GitHub Actions
246b83c72d chore: update package-lock.json for dependency version consistency 2026-03-24 12:08:22 +00:00
renovate[bot]
a7e4e12f32 fix(deps): update non-major-updates 2026-03-24 11:59:32 +00:00
Jeremy
91c1fa9d0f Merge pull request #879 from Wikid82/renovate/feature/beta-release-major-1-lucide-monorepo
fix(deps): update dependency lucide-react to v1 (feature/beta-release)
2026-03-24 07:57:18 -04:00
Jeremy
5a2698123e Merge pull request #878 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-24 07:53:22 -04:00
Jeremy
752e4dbd66 Merge branch 'feature/beta-release' into renovate/feature/beta-release-major-1-lucide-monorepo 2026-03-24 02:42:23 -04:00
Jeremy
f2769eca1a Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-03-24 02:42:04 -04:00
Jeremy
e779041039 Merge branch 'development' into feature/beta-release 2026-03-24 02:41:29 -04:00
Jeremy
6c6c3f3373 Merge pull request #880 from Wikid82/main
Propagate changes from main into development
2026-03-24 02:41:00 -04:00
GitHub Actions
59adf32861 fix(deps): resolve Renovate lookup failure for geoip2-golang v2 module
Renovate could not resolve the Go module path
github.com/oschwald/geoip2-golang/v2 because the /v2 suffix is a Go
module convention, not a separate GitHub repository. Added a packageRules
entry with an explicit sourceUrl pointing to the actual upstream repo so
Renovate can correctly look up available versions.

No changes to application code, go.mod, or go.sum — the dependency was
already declared correctly.
2026-03-24 06:32:00 +00:00
renovate[bot]
55204289ec fix(deps): update dependency lucide-react to v1 2026-03-24 06:22:11 +00:00
renovate[bot]
95bf0b496d fix(deps): update non-major-updates 2026-03-24 06:20:22 +00:00
Jeremy
583633c74b Merge pull request #876 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-03-24 02:18:43 -04:00
GitHub Actions
c822ba7582 chore: downgrade vitest and related packages to version 4.0.18 2026-03-24 01:52:48 +00:00
GitHub Actions
a5daaa5e8c fix: add missing name field in package-lock.json 2026-03-24 01:51:42 +00:00
GitHub Actions
6967c73eaf chore: update dependencies to latest versions
- Upgraded @tanstack/query-core and @tanstack/react-query from 5.95.0 to 5.95.2
- Updated @typescript-eslint packages from 8.57.1 to 8.57.2
- Bumped @vitest packages from 4.1.0 to 4.1.1
- Updated knip from 6.0.3 to 6.0.4
- Upgraded picomatch from 4.0.3 to 4.0.4 and from 2.3.1 to 2.3.2
- Updated react-router and react-router-dom from 7.13.1 to 7.13.2
- Bumped typescript from 6.0.1-rc to 6.0.2
2026-03-24 01:50:32 +00:00
GitHub Actions
602b0b0e2e chore: update package versions in package-lock.json for consistency 2026-03-24 01:50:02 +00:00
GitHub Actions
49b3e4e537 fix(tests): resolve i18n mock issues in BulkDeleteCertificateDialog tests
Removed local i18n mock to allow global mock to function correctly, updated assertions to use resolved English translations for better consistency in test outcomes.
2026-03-24 01:47:43 +00:00
GitHub Actions
ca477c48d4 chore: Enhance documentation for E2E testing:
- Added clarity and structure to README files, including recent updates and getting started sections.
- Improved manual verification documentation for CrowdSec authentication, emphasizing expected outputs and success criteria.
- Updated debugging guide with detailed output examples and automatic trace capture information.
- Refined best practices for E2E tests, focusing on efficient polling, locator strategies, and state management.
- Documented triage report for DNS Provider feature tests, highlighting issues fixed and test results before and after improvements.
- Revised E2E test writing guide to include when to use specific helper functions and patterns for better test reliability.
- Enhanced troubleshooting documentation with clear resolutions for common issues, including timeout and token configuration problems.
- Updated tests README to provide quick links and best practices for writing robust tests.
2026-03-24 01:47:22 +00:00
GitHub Actions
7d986f2821 chore: update package versions in package-lock.json for consistency 2026-03-23 13:14:48 +00:00
GitHub Actions
849c3513bb feat(i18n): add aria-label for bulk delete certificates in multiple languages 2026-03-23 05:46:49 +00:00
GitHub Actions
a707d8e67e feat(i18n): add localized provider labels for certificate management 2026-03-23 05:45:23 +00:00
GitHub Actions
3cacecde5a fx: replace getAuthToken function with getStorageStateAuthHeaders for improved auth handling 2026-03-23 05:42:02 +00:00
GitHub Actions
4bdc771cd4 feat: synchronize selected certificate IDs with available certificates on update 2026-03-23 05:39:37 +00:00
GitHub Actions
f13d95df0f fix: specify gotestsum version in workflows for consistency 2026-03-23 05:32:52 +00:00
GitHub Actions
73aecc60e8 fix(i18n): restore localized noteText in all non-English certificate locales
- The certificate section's noteText had previously been translated into
  Chinese, German, Spanish, and French but was inadvertently overwritten
  with an English string when the individual certificate delete feature
  was introduced.
- All four locales now carry properly translated text that also reflects
  the updated policy: expired or expiring production certificates that
  are not attached to a proxy host are now eligible for deletion.
- Newly introduced keys (deleteConfirmExpiring and other delete-related
  keys) remain as English placeholders pending professional translation,
  which is the established pattern for this project.
2026-03-23 05:24:58 +00:00
Wikid82
6fc4409513 chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: aa154fc6bcd712644de232a4abcdd07dac1f801308c0b6f93dbc2b375443da7b
New: c6549807950f93f609d6433fa295fa517fbdec0ad975a4aafba69c136d5d2347

Auto-generated by: .github/workflows/update-geolite2.yml
2026-03-23 02:57:35 +00:00
GitHub Actions
9ed698b236 feat: enhance certificate management with expiring status
- Update isInUse function to handle certificates without an ID.
- Modify isDeletable function to include 'expiring' status as deletable.
- Adjust CertificateList component to reflect changes in deletable logic.
- Update BulkDeleteCertificateDialog and DeleteCertificateDialog to handle expiring certificates.
- Add tests for expiring certificates in CertificateList and BulkDeleteCertificateDialog.
- Update translations for expiring certificates in multiple languages.
2026-03-23 02:23:08 +00:00
GitHub Actions
69736503ac feat: add BulkDeleteCertificateDialog component for bulk certificate deletion
- Implemented BulkDeleteCertificateDialog with confirmation and listing of certificates to be deleted.
- Added translations for bulk delete functionality in English, German, Spanish, French, and Chinese.
- Created unit tests for BulkDeleteCertificateDialog to ensure proper rendering and functionality.
- Developed end-to-end tests for bulk certificate deletion, covering selection, confirmation, and cancellation scenarios.
2026-03-23 00:07:59 +00:00
Jeremy
5b8941554b Merge pull request #875 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-22 18:00:57 -04:00
renovate[bot]
0bb7826ad5 fix(deps): update non-major-updates 2026-03-22 20:26:16 +00:00
GitHub Actions
bae55fb876 chore(ci): prevent test log truncation in backend coverage workflows
- Install gotestsum in CI so the coverage script uses compact
  pkgname-formatted output instead of go test -v, which produces
  massive verbose logs that exceed GitHub Actions' step log buffer
- Upload the full test output as a downloadable artifact on every
  run (including failures) so truncated logs never block debugging
- Aligns upload-artifact pin to v7.0.0 matching the rest of the repo
2026-03-22 18:49:02 +00:00
GitHub Actions
97255f84e6 fix: add tests for delete certificate functionality and error handling in CertificateList 2026-03-22 17:33:11 +00:00
Jeremy
174f1fe511 Merge pull request #874 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-22 12:00:19 -04:00
GitHub Actions
53fc2f1e78 fix: remove unused waitForToast import from certificate-delete.spec.ts 2026-03-22 14:29:31 +00:00
GitHub Actions
ef5e2e2ea2 fix: enhance setupAuditTestDB for proper database connection handling and documentation 2026-03-22 14:29:31 +00:00
renovate[bot]
b2c40345f8 fix(deps): update non-major-updates 2026-03-22 14:24:03 +00:00
Jeremy
a38de8518f Merge branch 'development' into feature/beta-release 2026-03-22 09:52:02 -04:00
GitHub Actions
a98e37b8b4 fix: update @vitest/eslint-plugin, i18next, and react-i18next versions for compatibility 2026-03-22 13:30:41 +00:00
GitHub Actions
441864be95 fix: add DeleteCertificateDialog component with confirmation dialog for certificate deletion
- Implement DeleteCertificateDialog component to handle certificate deletion confirmation.
- Add tests for DeleteCertificateDialog covering various scenarios including rendering, confirmation, and cancellation.
- Update translation files for multiple languages to include new strings related to certificate deletion.
- Create end-to-end tests for certificate deletion UX, including button visibility, confirmation dialog, and success/failure scenarios.
2026-03-22 13:30:41 +00:00
GitHub Actions
2c9c791ae5 fix: update package versions in package-lock.json for compatibility 2026-03-22 13:30:41 +00:00
GitHub Actions
ea3e8e8371 docs: track CVE-2026-27171 zlib CPU exhaustion as a known medium vulnerability 2026-03-22 13:30:41 +00:00
Jeremy
c5dc4a9d71 Merge pull request #872 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update dependency i18next to ^25.10.3 (feature/beta-release)
2026-03-21 21:59:28 -04:00
renovate[bot]
3b3ae29414 fix(deps): update dependency i18next to ^25.10.3 2026-03-22 01:11:06 +00:00
Jeremy
551532d41b Merge pull request #870 from Wikid82/fix/cwe-614-secure-cookie-attribute
fix(security): harden auth cookie to always set Secure attribute (CWE-614)
2026-03-21 15:14:46 -04:00
GitHub Actions
20537d7bd9 fix(e2e): add Authorization header to API calls in gaps and webkit specs 2026-03-21 16:21:58 +00:00
Jeremy
66b37b5a98 Merge branch 'development' into fix/cwe-614-secure-cookie-attribute 2026-03-21 12:18:38 -04:00
Jeremy
9d4b6e5b43 Merge pull request #871 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-21 12:17:46 -04:00
renovate[bot]
f335b3f03f fix(deps): update non-major-updates 2026-03-21 16:17:20 +00:00
GitHub Actions
52f759cc00 fix(e2e): pass Authorization header in import session cleanup helpers
- Add getStoredAuthHeader helper that reads charon_auth_token from
  localStorage and constructs an Authorization: Bearer header
- Apply the header to all page.request.* API calls in readImportStatus
  and issuePendingSessionCancel
- The previous code relied on the browser cookie jar for these cleanup
  API calls; with Secure=true on auth cookies, browsers refuse to send
  cookies over HTTP to 127.0.0.1 (IP address, not localhost hostname)
  causing silent 401s that left pending ImportSession rows in the DB
- Unreleased sessions caused all subsequent caddy-import tests to show
  the pending-session banner instead of the Caddyfile textarea, failing
  every test after the first
- The fix mirrors how the React app authenticates: via Authorization
  header, which is transport-independent and works on both HTTP and HTTPS
2026-03-21 14:21:55 +00:00
GitHub Actions
cc3cb1da4b fix(security): harden auth cookie to always set Secure attribute
- Remove the conditional secure=false branch from setSecureCookie that
  allowed cookies to be issued without the Secure flag when requests
  arrived over HTTP from localhost or RFC 1918 private addresses
- Pass the literal true to c.SetCookie directly, eliminating the
  dataflow path that triggered CodeQL go/cookie-secure-not-set (CWE-614)
- Remove the now-dead codeql suppression comment; the root cause is
  gone, not merely silenced
- Update setSecureCookie doc comment to reflect that Secure is always
  true: all major browsers (Chrome 66+, Firefox 75+, Safari 14+) honour
  the Secure attribute on localhost HTTP connections, and direct
  HTTP-on-private-IP access without TLS is an unsupported deployment
  model for Charon which is designed to sit behind Caddy TLS termination
- Update the five TestSetSecureCookie HTTP/local tests that previously
  asserted Secure=false to now assert Secure=true, reflecting the
  elimination of the insecure code path
- Add Secure=true assertion to TestClearSecureCookie to provide explicit
  coverage of the clear-cookie path
2026-03-21 13:17:45 +00:00
GitHub Actions
2c608bf684 docs: track CVE-2026-27171 zlib CPU exhaustion as a known medium vulnerability 2026-03-21 12:30:20 +00:00
Jeremy
a855ed0cf6 Merge pull request #869 from Wikid82/feature/beta-release
fix: resolve security header profile preset slugs when assigning via UUID string
2026-03-21 01:46:32 -04:00
GitHub Actions
ad7e97e7df fix: align test expectations with updated proxy host handler behavior 2026-03-21 03:05:10 +00:00
GitHub Actions
a2fea2b368 fix: update tools list in agent markdown files for consistency 2026-03-21 02:35:28 +00:00
GitHub Actions
c428a5be57 fix: propagate pipeline exit codes in CI quality-checks workflow 2026-03-21 02:23:16 +00:00
GitHub Actions
22769977e3 fix: clarify that advanced_config requires Caddy JSON, not Caddyfile syntax 2026-03-21 02:12:24 +00:00
Jeremy
50fb6659da Merge pull request #863 from Wikid82/feature/beta-release
fix(uptime): fix TCP monitor UX — correct format guidance and add client-side validation
2026-03-20 22:03:08 -04:00
GitHub Actions
e4f2606ea2 fix: resolve security header profile preset slugs when assigning via UUID string 2026-03-21 01:59:34 +00:00
GitHub Actions
af5cdf48cf fix: suppress pgproto3/v2 CVE-2026-4427 alias in vulnerability ignore files 2026-03-21 01:42:18 +00:00
GitHub Actions
1940f7f55d fix(tests): improve DOM order validation for type selector and URL input in CreateMonitorModal 2026-03-21 00:47:03 +00:00
GitHub Actions
c785c5165d fix: validate TCP format and update aria attributes in CreateMonitorModal 2026-03-21 00:47:03 +00:00
GitHub Actions
eaf981f635 fix(deps): update katex to version 0.16.40 and tldts to version 7.0.27 in package-lock.json 2026-03-21 00:47:03 +00:00
GitHub Actions
4284bcf0b6 fix(security): update known vulnerabilities section in SECURITY.md to reflect critical CVE-2025-68121 and additional high-severity issues 2026-03-21 00:47:03 +00:00
GitHub Actions
586f7cfc98 fix(security): enhance vulnerability reporting and documentation in SECURITY.md 2026-03-21 00:47:03 +00:00
GitHub Actions
15e9efeeae fix(security): add security review instructions to Management and QA Security agents 2026-03-21 00:47:03 +00:00
Jeremy
cd8bb2f501 Merge pull request #868 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-20 20:14:19 -04:00
renovate[bot]
fa42e79af3 fix(deps): update non-major-updates 2026-03-21 00:12:20 +00:00
Jeremy
859ddaef1f Merge pull request #867 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-20 14:10:06 -04:00
renovate[bot]
3b247cdd73 fix(deps): update non-major-updates 2026-03-20 18:09:46 +00:00
Jeremy
00aab022f5 Merge pull request #866 from Wikid82/renovate/feature/beta-release-knip-6.x
chore(deps): update dependency knip to v6 (feature/beta-release)
2026-03-20 14:08:29 -04:00
renovate[bot]
a40764d7da chore(deps): update dependency knip to v6 2026-03-20 12:00:39 +00:00
Jeremy
87b3db7019 Merge branch 'development' into feature/beta-release 2026-03-20 02:14:04 -04:00
Jeremy
ded533d690 Merge pull request #865 from Wikid82/renovate/feature/beta-release-nick-fields-retry-4.x
chore(deps): update nick-fields/retry action to v4 (feature/beta-release)
2026-03-20 02:13:46 -04:00
Jeremy
fc4ceafa20 Merge pull request #864 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-20 02:13:31 -04:00
renovate[bot]
5b02eebfe5 chore(deps): update nick-fields/retry action to v4 2026-03-20 05:30:43 +00:00
renovate[bot]
338c9a3eef chore(deps): update non-major-updates 2026-03-20 05:30:39 +00:00
GitHub Actions
68d21fc20b fix: patch CVE-2026-30836 in Caddy build by pinning smallstep/certificates to v0.30.0 2026-03-20 04:15:29 +00:00
GitHub Actions
ea9ebdfdf2 fix(tools): update tools list in agent markdown files for consistency 2026-03-20 04:14:56 +00:00
GitHub Actions
1d09c793f6 fix(uptime): remove 'tcp://' prefix from Redis monitor URL in create and payload validation 2026-03-20 02:57:00 +00:00
GitHub Actions
856fd4097b fix(deps): update undici and tar to latest versions for improved stability 2026-03-20 02:47:00 +00:00
GitHub Actions
bb14ae73cc fix(uptime): fix TCP monitor UX — correct format guidance and add client-side validation
The TCP monitor creation form showed a placeholder that instructed users to enter a URL with the tcp:// scheme prefix (e.g., tcp://192.168.1.1:8080). Following this guidance caused a silent HTTP 500 error because Go's net.SplitHostPort rejects any input containing a scheme prefix, expecting bare host:port format only.

- Corrected the urlPlaceholder translation key to remove the tcp:// prefix
- Added per-type dynamic placeholder (urlPlaceholderHttp / urlPlaceholderTcp) so the URL input shows the correct example format as soon as the user selects a monitor type
- Added per-type helper text below the URL input explaining the required format, updated in real time when the type selector changes
- Added client-side validation: typing a scheme prefix (://) in TCP mode shows an inline error and blocks form submission before the request reaches the backend
- Reordered the Create Monitor form so the type selector appears before the URL input, giving users the correct format context before they type
- Type selector onChange now clears any stale urlError to prevent incorrect error messages persisting after switching from TCP back to HTTP
- Added 5 new i18n keys across all 5 supported locales (en, de, fr, es, zh)
- Added 10 RTL unit tests covering all new validation paths including the type-change error-clear scenario
- Added 9 Playwright E2E tests covering placeholder variants, helper text, inline error lifecycle, submission blocking, and successful TCP creation

Closes #issue-5 (TCP monitor UI cannot add monitor when following placeholder)
2026-03-20 01:19:43 +00:00
Jeremy
44450ff88a Merge pull request #862 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update dependency anchore/grype to v0.110.0 (feature/beta-release)
2026-03-19 19:46:25 -04:00
renovate[bot]
3a80e032f4 chore(deps): update dependency anchore/grype to v0.110.0 2026-03-19 21:09:01 +00:00
Jeremy
6e2d89372f Merge pull request #859 from Wikid82/feature/beta-release
fix(frontend): stabilize CrowdSec first-enable UX and guard empty-value regression
2026-03-19 16:56:50 -04:00
GitHub Actions
5bf7b54496 chore: proactively pin grpc and goxmldsig in Docker builder stages to patch embedded binary CVEs 2026-03-19 18:18:28 +00:00
GitHub Actions
0bdcb2a091 chore: suppress third-party binary CVEs with documented justification and expiry dates 2026-03-19 18:18:28 +00:00
GitHub Actions
b988179685 fix: update @emnapi/core, @emnapi/runtime, baseline-browser-mapping, and i18next to latest versions for improved stability 2026-03-19 18:18:28 +00:00
GitHub Actions
cbfe80809e fix: update @emnapi/core, @emnapi/runtime, and katex to latest versions for improved stability 2026-03-19 18:18:28 +00:00
GitHub Actions
9f826f764c fix: update dependencies in go.work.sum for improved compatibility and performance 2026-03-19 18:18:28 +00:00
Jeremy
262a805317 Merge pull request #861 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-19 14:15:42 -04:00
renovate[bot]
ec25165e54 fix(deps): update non-major-updates 2026-03-19 18:02:03 +00:00
GitHub Actions
7b34e2ecea fix: update google.golang.org/grpc to version 1.79.3 for improved compatibility 2026-03-19 13:10:18 +00:00
GitHub Actions
ec9b8ac925 fix: update @types/debug to version 4.1.13 for improved stability 2026-03-19 12:59:23 +00:00
GitHub Actions
431d88c47c fix: update @tanstack/query-core, @tanstack/react-query, @types/debug, eslint-plugin-testing-library, i18next, and knip to latest versions for improved stability and performance 2026-03-19 12:58:46 +00:00
GitHub Actions
e08e1861d6 fix: update @oxc-project and @rolldown packages to version 1.0.0-rc.10 for improved compatibility 2026-03-19 05:17:14 +00:00
GitHub Actions
64d2d4d423 fix: update ts-api-utils to version 2.5.0 for improved functionality 2026-03-19 05:16:32 +00:00
Jeremy
9f233a0128 Merge pull request #860 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-18 20:30:26 -04:00
renovate[bot]
6939c792bd chore(deps): update non-major-updates 2026-03-18 23:07:56 +00:00
GitHub Actions
853940b74a fix: update mockResolvedValue calls for getSecurityStatus to improve test clarity 2026-03-18 23:06:24 +00:00
GitHub Actions
5aa8940af2 fix: update tools list in agent markdown files for consistency and clarity 2026-03-18 23:04:52 +00:00
GitHub Actions
cd3f2a90b4 fix: seed lapi-status in renderWithSeed to prevent loading gaps 2026-03-18 22:19:22 +00:00
GitHub Actions
bf89c2603d fix: enhance invite token validation for hex format and case sensitivity 2026-03-18 22:15:39 +00:00
GitHub Actions
19b388d865 fix: update Caddy security version to 1.1.50 in Dockerfile 2026-03-18 22:11:50 +00:00
GitHub Actions
25e40f164d fix: replace userEvent.click with user.click for consistency in CrowdSec tests 2026-03-18 22:08:05 +00:00
GitHub Actions
5505f66c41 fix: clarify comments on optimistic updates and server state handling in Security component 2026-03-18 22:06:40 +00:00
GitHub Actions
9a07619b89 fix: assert cloud-metadata error and no raw IPv6 leak for mapped metadata IP 2026-03-18 19:08:55 +00:00
GitHub Actions
faf2041a82 fix: sanitize IPv4-mapped IPv6 address in SSRF error message 2026-03-18 19:06:31 +00:00
GitHub Actions
460834f8f3 fix: use correct checkbox assertion for CrowdSec toggle test 2026-03-18 19:05:16 +00:00
GitHub Actions
75ae77a6bf fix: assert all db.Create calls in uptime service tests 2026-03-18 19:03:53 +00:00
GitHub Actions
73f2134caf fix(tests): improve server readiness check in UptimeService test to prevent misleading failures 2026-03-18 18:45:59 +00:00
GitHub Actions
c5efc30f43 fix: eliminate bcrypt DefaultCost from test setup to prevent CI flakiness 2026-03-18 18:13:18 +00:00
GitHub Actions
3099d74b28 fix: ensure cloud metadata SSRF error is consistent for IPv4-mapped addresses 2026-03-18 17:23:53 +00:00
GitHub Actions
fcc9309f2e chore(deps): update indirect dependencies for improved compatibility and performance 2026-03-18 17:12:01 +00:00
Jeremy
e581a9e7e7 Merge branch 'development' into feature/beta-release 2026-03-18 13:11:50 -04:00
Jeremy
ac72e6c3ac Merge pull request #858 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-18 13:11:20 -04:00
renovate[bot]
db824152ef fix(deps): update non-major-updates 2026-03-18 17:00:26 +00:00
GitHub Actions
1de29fe6fc fix(frontend): stabilize CrowdSec first-enable UX and guard empty-value regression
When CrowdSec is first enabled, the 10-60 second startup window caused
the toggle to immediately flicker back to unchecked, the card badge to
show 'Disabled' throughout startup, CrowdSecKeyWarning to flash before
bouncer registration completed, and CrowdSecConfig to show alarming
LAPI-not-ready banners to the user.

Root cause: the toggle, badge, and warning conditions all read from
stale sources (crowdsecStatus local state and status.crowdsec.enabled
server data) which neither reflects user intent during a pending mutation.

- Derive crowdsecChecked from crowdsecPowerMutation.variables during
  the pending window so the UI reflects intent immediately on click,
  not the lagging server state
- Show a 'Starting...' badge in warning variant throughout the startup
  window so the user knows the operation is in progress
- Suppress CrowdSecKeyWarning unconditionally while the mutation is
  pending, preventing the bouncer key alert from flashing before
  registration completes on the backend
- Broadcast the mutation's running state to the QueryClient cache via
  a synthetic crowdsec-starting key so CrowdSecConfig.tsx can read it
  without prop drilling
- In CrowdSecConfig, suppress the LAPI 'not running' (red) and
  'initializing' (yellow) banners while the startup broadcast is active,
  with a 90-second safety cap to prevent stale state from persisting
  if the tab is closed mid-mutation
- Add security.crowdsec.starting translation key to all five locales
- Add two backend regression tests confirming that empty-string setting
  values are accepted (not rejected by binding validation), preventing
  silent re-introduction of the Issue 4 bug
- Add nine RTL tests covering toggle stabilization, badge text, warning
  suppression, and LAPI banner suppression/expiry
- Add four Playwright E2E tests using route interception to simulate
  the startup delay in a real browser context

Fixes Issues 3 and 4 from the fresh-install bug report.
2026-03-18 16:57:23 +00:00
GitHub Actions
ac2026159e chore: update tailwindcss to version 4.2.2 in package.json 2026-03-18 16:46:50 +00:00
GitHub Actions
cfb28055cf fix: add vulnerability suppressions for CVE-2026-2673 in libcrypto3 and libssl3 with justification and review timeline 2026-03-18 11:08:58 +00:00
GitHub Actions
a2d8970b22 chore: Refactor agent tools for improved organization and efficiency across documentation, frontend development, planning, Playwright testing, QA security, and supervisor roles. 2026-03-18 10:36:14 +00:00
GitHub Actions
abadf9878a chore(deps): update electron-to-chromium to version 1.5.321 2026-03-18 10:27:06 +00:00
GitHub Actions
87590ac4e8 fix: simplify error handling and improve readability in URL validation and uptime service tests 2026-03-18 10:25:25 +00:00
Jeremy
999a81dce7 Merge pull request #857 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update dependency knip to ^5.88.0 (feature/beta-release)
2026-03-18 06:24:40 -04:00
Jeremy
031457406a Merge pull request #855 from Wikid82/feature/beta-release
fix(uptime): allow RFC 1918 IPs for admin-configured monitors
2026-03-18 06:09:51 -04:00
renovate[bot]
3d9d183b77 chore(deps): update dependency knip to ^5.88.0 2026-03-18 10:07:26 +00:00
GitHub Actions
379c664b5c fix(test): align cloud-metadata SSRF handler test with updated error message
The settings handler SSRF test table expected the generic "private ip"
error string for the cloud-metadata case (169.254.169.254). After the
url_validator was updated to return a distinct "cloud metadata" error for
that address, the handler test's errorContains check failed on every CI run.

Updated the test case expectation from "private" to "cloud metadata" to
match the more precise error message now produced by the validator.
2026-03-18 03:38:29 +00:00
GitHub Actions
4d8f09e279 fix: improve readiness checks and error handling in uptime service tests 2026-03-18 03:22:32 +00:00
GitHub Actions
8a0e91ac3b chore: strengthen AllowRFC1918 permit tests to assert success and URL correctness 2026-03-18 03:22:32 +00:00
GitHub Actions
3bc798bc9d fix: normalize IPv4-mapped cloud-metadata address to its IPv4 form before error reporting
- IPv4-mapped cloud metadata (::ffff:169.254.169.254) previously fell through
  the IPv4-mapped IPv6 detection block and returned the generic private-IP error
  instead of the cloud-metadata error, making the two cases inconsistent
- The IPv4-mapped error path used ip.String() (the raw ::ffff:… form) directly
  rather than sanitizeIPForError, potentially leaking the unsanitized IPv6
  address in error messages visible to callers
- Now extracts the IPv4 from the mapped address before both the cloud-metadata
  comparison and the sanitization call, so ::ffff:169.254.169.254 produces the
  same "access to cloud metadata endpoints is blocked" error as 169.254.169.254
  and the error message is always sanitized through the shared helper
- Updated the corresponding test to assert the cloud-metadata message and the
  absence of the raw IPv6 representation in the error text
2026-03-18 03:22:32 +00:00
GitHub Actions
8b4e0afd43 fix: format SeedDefaultSecurityConfig for improved readability 2026-03-18 03:22:32 +00:00
GitHub Actions
c7c4fc8915 fix(deps): update flatted to version 3.4.2 for improved stability 2026-03-18 03:22:32 +00:00
Jeremy
41c0252cf1 Merge pull request #856 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update module github.com/greenpau/caddy-security to v1.1.49 (feature/beta-release)
2026-03-17 23:15:17 -04:00
renovate[bot]
4c375ad86f chore(deps): update module github.com/greenpau/caddy-security to v1.1.49 2026-03-18 02:33:53 +00:00
Jeremy
459a8fef42 Merge branch 'development' into feature/beta-release 2026-03-17 22:32:24 -04:00
GitHub Actions
00a18704e8 fix(uptime): allow RFC 1918 IPs for admin-configured monitors
HTTP/HTTPS uptime monitors targeting LAN addresses (192.168.x.x,
10.x.x.x, 172.16.x.x) permanently reported 'down' on fresh installs
because SSRF protection rejects RFC 1918 ranges at two independent
checkpoints: the URL validator (DNS-resolution layer) and the safe
dialer (TCP-connect layer). Fixing only one layer leaves the monitor
broken in practice.

- Add IsRFC1918() predicate to the network package covering only the
  three RFC 1918 CIDRs; 169.254.x.x (link-local / cloud metadata)
  and loopback are intentionally excluded
- Add WithAllowRFC1918() functional option to both SafeHTTPClient and
  ValidationConfig; option defaults to false so existing behaviour is
  unchanged for every call site except uptime monitors
- In uptime_service.go, pass WithAllowRFC1918() to both
  ValidateExternalURL and NewSafeHTTPClient together; a coordinating
  comment documents that both layers must be relaxed as a unit
- 169.254.169.254 and the full 169.254.0.0/16 link-local range remain
  unconditionally blocked; the cloud-metadata error path is preserved
- 21 new tests across three packages, including an explicit regression
  guard that confirms RFC 1918 blocks are still applied without the
  option set (TestValidateExternalURL_RFC1918BlockedByDefault)

Fixes issues 6 and 7 from the fresh-install bug report.
2026-03-17 21:22:56 +00:00
Jeremy
dc9bbacc27 Merge pull request #854 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update release-drafter/release-drafter digest to 44a942e (feature/beta-release)
2026-03-17 16:41:13 -04:00
Jeremy
4da4e1a0d4 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-03-17 14:37:17 -04:00
Jeremy
3318b4af80 Merge pull request #852 from Wikid82/feature/beta-release
feat(security): seed default SecurityConfig row on application startup
2026-03-17 14:36:45 -04:00
GitHub Actions
c1aaa48ecb chore: cover error path in SeedDefaultSecurityConfig and letsencrypt cert cleanup loop
- The DB error return branch in SeedDefaultSecurityConfig was never
  exercised because all seed tests only ran against a healthy in-memory
  database; added a test that closes the underlying connection before
  calling the function so the FirstOrCreate error path is reached
- The letsencrypt certificate cleanup loop in Register was unreachable
  in all existing tests because no test pre-seeded a ProxyHost with
  an letsencrypt cert association; added a test that creates that
  precondition so the log and Update lines inside the loop execute
- These were the last two files blocking patch coverage on PR #852
2026-03-17 17:45:39 +00:00
renovate[bot]
f82a892405 chore(deps): update release-drafter/release-drafter digest to 44a942e 2026-03-17 17:17:04 +00:00
GitHub Actions
287e85d232 fix(ci): quote shell variables to prevent word splitting in integration test
- All unquoted $i loop counter comparisons and ${TMP_COOKIE} curl
  option arguments in the rate limit integration script were flagged
  by shellcheck SC2086
- Unquoted variables in [ ] test expressions and curl -b/-c options
  can cause subtle failures if the value ever contains whitespace or
  glob characters, and are a shellcheck hard warning that blocks CI
  linting gates
- Quoted all affected variables in place with no logic changes
2026-03-17 17:15:19 +00:00
Jeremy
fa6fbc8ce9 Merge pull request #853 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update paulhatch/semantic-version action to v6.0.2 (feature/beta-release)
2026-03-17 13:14:55 -04:00
GitHub Actions
61418fa9dd fix(security): persist RateLimitMode in Upsert and harden integration test payload
- The security config Upsert update path copied all rate limit fields
  from the incoming request onto the existing database record except
  RateLimitMode, so the seeded default value of "disabled" always
  survived a POST regardless of what the caller sent
- This silently prevented the Caddy rate_limit handler from being
  injected on any container with a pre-existing config record (i.e.,
  every real deployment and every CI run after migration)
- Added the missing field assignment so RateLimitMode is correctly
  persisted on update alongside all other rate limit settings
- Integration test payload now also sends rate_limit_enable alongside
  rate_limit_mode so the handler sync logic fires via its explicit
  first branch, providing belt-and-suspenders correctness independent
  of which path the caller uses to express intent
2026-03-17 17:06:02 +00:00
GitHub Actions
0df1126aa9 fix(deps): update modernc.org/sqlite to version 1.47.0 for improved functionality 2026-03-17 14:31:42 +00:00
renovate[bot]
1c72469ad6 chore(deps): update paulhatch/semantic-version action to v6.0.2 2026-03-17 14:30:44 +00:00
GitHub Actions
338f864f60 fix(ci): set correct rate_limit_mode field in integration test security config
- The rate-limit integration test was sending rate_limit_enable:true in the
  security config POST, but the backend injects the Caddy rate_limit handler
  only when rate_limit_mode is the string "enabled"
- Because rate_limit_mode was absent from the payload, the database default
  of "disabled" persisted and the guard condition always evaluated false,
  leaving the handler uninjected across all 10 verify attempts
- Replaced the boolean rate_limit_enable with the string field
  rate_limit_mode:"enabled" to match the exact contract the backend enforces
2026-03-17 14:29:35 +00:00
GitHub Actions
8b0011f6c6 fix(ci): enhance rate limit integration test reliability
- Added HTTP status checks for login and security config POST requests to ensure proper error handling.
- Implemented a readiness gate for the Caddy admin API before applying security configurations.
- Increased sleep duration before verifying rate limit handler to accommodate Caddy's configuration propagation.
- Changed verification failure from a warning to a hard exit to prevent misleading test results.
- Updated Caddy admin API URL to use the canonical trailing slash in multiple locations.
- Adjusted retry parameters for rate limit verification to reduce polling noise.
- Removed stale GeoIP checksum validation from the Dockerfile's non-CI path to simplify the build process.
2026-03-17 14:05:25 +00:00
GitHub Actions
e6a044c532 fix(deps): update caniuse-lite to version 1.0.30001780 for improved compatibility 2026-03-17 12:40:55 +00:00
GitHub Actions
bb1e59ea93 fix(deps): update bytedance/gopkg to version 0.1.4 for improved functionality 2026-03-17 12:38:43 +00:00
GitHub Actions
b761d7d4f7 feat(security): seed default SecurityConfig row on application startup
On a fresh install the security_configs table is auto-migrated but
contains no rows. Any code path reading SecurityConfig by name received
an empty Go struct with zero values, producing an all-disabled UI state
that offered no guidance to the user and made the security status
endpoint appear broken.

Adds a SeedDefaultSecurityConfig function that uses FirstOrCreate to
guarantee a default row exists with safe, disabled-by-default values on
every startup. The call is idempotent — existing rows are never modified,
so upgrades are unaffected. If the seed fails the application logs a
warning and continues rather than crashing.

Zero-valued rate-limit fields are intentional and safe: the Cerberus
rate-limit middleware applies hardcoded fallback thresholds when the
stored values are zero, so enabling rate limiting without configuring
thresholds results in sensible defaults rather than a divide-by-zero or
traffic block.

Adds three unit tests covering the empty-database, idempotent, and
do-not-overwrite-existing paths.
2026-03-17 12:33:40 +00:00
Jeremy
418fb7d17c Merge pull request #851 from Wikid82/feature/beta-release
fix(settings): allow empty string as a valid setting value
2026-03-16 23:24:37 -04:00
Jeremy
5084483984 Merge branch 'development' into feature/beta-release 2026-03-16 22:05:55 -04:00
GitHub Actions
3c96810aa1 fix(deps): update @babel/helpers, @babel/parser, @babel/runtime, and enhanced-resolve to latest versions for improved stability 2026-03-17 02:05:00 +00:00
GitHub Actions
dcd1ec7e95 fix: improve error handling in TestSettingsHandler_UpdateSetting_EmptyValueAccepted 2026-03-17 02:01:48 +00:00
GitHub Actions
4f222b6308 fix: make 'value' field optional in UpdateSettingRequest struct 2026-03-17 01:40:35 +00:00
Jeremy
071ae38d35 Merge pull request #850 from Wikid82/feature/beta-release
Feature: Pushover Notification Provider
2026-03-16 20:09:08 -04:00
GitHub Actions
3385800f41 fix(deps): update core-js-compat to version 3.49.0 for improved compatibility 2026-03-16 21:48:19 +00:00
GitHub Actions
4fe538b37e chore: add unit tests for Slack and Pushover service flags, and validate Pushover dispatch behavior 2026-03-16 21:38:40 +00:00
Jeremy
2bdf4f8286 Merge branch 'development' into feature/beta-release 2026-03-16 14:26:07 -04:00
Jeremy
a96366957e Merge pull request #849 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-16 14:24:11 -04:00
renovate[bot]
c44642241c chore(deps): update non-major-updates 2026-03-16 18:22:12 +00:00
GitHub Actions
b5bf505ab9 fix: update go-sqlite3 to version 1.14.37 and modernc.org/sqlite to version 1.46.2 for improved stability 2026-03-16 18:20:35 +00:00
GitHub Actions
51f59e5972 fix: update @typescript-eslint packages to version 8.57.1 for improved compatibility and stability 2026-03-16 18:19:36 +00:00
GitHub Actions
65d02e754e feat: add support for Pushover notification provider
- Updated the list of supported notification provider types to include 'pushover'.
- Enhanced the notifications API tests to validate Pushover integration.
- Modified the notifications form to include fields specific to Pushover, such as API Token and User Key.
- Implemented CRUD operations for Pushover providers in the settings.
- Added end-to-end tests for Pushover provider functionality, including form rendering, payload validation, and security checks.
- Updated translations to include Pushover-specific labels and placeholders.
2026-03-16 18:16:14 +00:00
Jeremy
816c0595e1 Merge pull request #834 from Wikid82/feature/beta-release
Feature: Slack Notification Provider
2026-03-16 11:15:29 -04:00
GitHub Actions
9496001811 fix: update undici to version 7.24.4 for improved stability and security 2026-03-16 12:33:58 +00:00
Jeremy
ec1b79c2b7 Merge branch 'development' into feature/beta-release 2026-03-16 08:30:45 -04:00
Jeremy
bab79f2349 Merge pull request #846 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-16 08:28:36 -04:00
renovate[bot]
edd7405313 chore(deps): update non-major-updates 2026-03-16 12:28:25 +00:00
GitHub Actions
79800871fa fix: harden frontend-builder with npm upgrade to mitigate bundled CVEs 2026-03-16 12:26:55 +00:00
Jeremy
67dd87d3a9 Merge pull request #845 from Wikid82/main
Propagate changes from main into development
2026-03-16 08:24:38 -04:00
GitHub Actions
5e5eae7422 fix: ensure Semgrep hook triggers on Dockerfile-only commits 2026-03-16 11:44:27 +00:00
GitHub Actions
78f216eaef fix: enhance payload handling in Slack provider creation to track token presence 2026-03-16 11:41:06 +00:00
GitHub Actions
95a65069c0 fix: handle existing PR outputs in promotion job 2026-03-16 11:17:37 +00:00
Jeremy
1e4b2d1d03 Merge pull request #843 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-16 07:15:40 -04:00
renovate[bot]
81f1dce887 fix(deps): update non-major-updates 2026-03-16 11:06:23 +00:00
GitHub Actions
b66cc34e1c fix: update Caddy security version to 1.1.48 in Dockerfile 2026-03-15 20:49:53 +00:00
GitHub Actions
5bafd92edf fix: supply slack webhook token in handler create sub-tests
The slack sub-tests in TestDiscordOnly_CreateRejectsNonDiscord and
TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents were
omitting the required token field from their request payloads.
CreateProvider enforces that Slack providers must have a non-empty
token (the webhook URL) at creation time. Without it the service
returns "slack webhook URL is required", which the handler does not
classify as a 400 validation error, so it falls through to 500.

Add a token field to each test struct, populate it for the slack
case with a valid-format Slack webhook URL, and use
WithSlackURLValidator to bypass the real format check in unit tests —
matching the pattern used in all existing service-level Slack tests.
2026-03-15 15:17:23 +00:00
GitHub Actions
6e4294dce1 fix: validate Slack webhook URL at provider create/update time 2026-03-15 12:23:27 +00:00
GitHub Actions
82b1c85b7c fix: clarify feature flag behavior for Slack notifications in documentation 2026-03-15 12:14:48 +00:00
GitHub Actions
41ecb7122f fix: update baseline-browser-mapping and caniuse-lite to latest versions 2026-03-15 11:58:48 +00:00
GitHub Actions
2fa7608b9b fix: guard routeBodyPromise against indefinite hang in security test 2026-03-15 11:51:16 +00:00
GitHub Actions
285ee2cdda fix: expand Semgrep ruleset to cover TypeScript, Dockerfile, and shell security 2026-03-15 11:45:18 +00:00
GitHub Actions
72598ed2ce fix: inject Slack URL validator via constructor option instead of field mutation 2026-03-15 11:27:51 +00:00
GitHub Actions
8670cdfd2b fix: format notification services table for better readability 2026-03-15 11:17:34 +00:00
GitHub Actions
f8e8440388 fix: correct GeoIP CI detection to require truthy value 2026-03-15 11:15:56 +00:00
GitHub Actions
ab4dee5fcd fix: make Slack webhook URL validator injectable on NotificationService 2026-03-15 11:15:10 +00:00
Jeremy
04e87e87d5 Merge pull request #841 from Wikid82/renovate/feature/beta-release-jsdom-29.x
chore(deps): update dependency jsdom to v29 (feature/beta-release)
2026-03-15 07:00:19 -04:00
Jeremy
cc96435db1 Merge pull request #840 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update softprops/action-gh-release digest to b25b93d (feature/beta-release)
2026-03-15 06:59:51 -04:00
renovate[bot]
53af0a6866 chore(deps): update dependency jsdom to v29 2026-03-15 10:56:03 +00:00
renovate[bot]
3577ce6c56 chore(deps): update softprops/action-gh-release digest to b25b93d 2026-03-15 10:55:54 +00:00
Jeremy
0ce35f2d64 Merge branch 'development' into feature/beta-release 2026-03-14 23:47:43 -04:00
GitHub Actions
4b170b69e0 fix: update Caddy security version to 1.1.47 in Dockerfile 2026-03-15 03:25:41 +00:00
GitHub Actions
1096b00b94 fix: set PORT environment variable for httpbin backend in integration scripts 2026-03-14 16:44:35 +00:00
GitHub Actions
6180d53a93 fix: update undici to version 7.24.2 in package-lock.json 2026-03-14 16:44:35 +00:00
Jeremy
fca1139c81 Merge pull request #838 from Wikid82/renovate/feature/beta-release-release-drafter-release-drafter-7.x
chore(deps): update release-drafter/release-drafter action to v7 (feature/beta-release)
2026-03-14 12:30:46 -04:00
Jeremy
847b10322a Merge pull request #837 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-14 12:30:29 -04:00
Jeremy
59251c8f27 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-03-14 12:30:02 -04:00
GitHub Actions
58b087bc63 fix: replace curl with wget for backend readiness checks in integration scripts 2026-03-14 13:17:06 +00:00
renovate[bot]
8ab926dc8b chore(deps): update release-drafter/release-drafter action to v7 2026-03-14 13:16:45 +00:00
renovate[bot]
85f258d9f6 chore(deps): update non-major-updates 2026-03-14 13:15:37 +00:00
GitHub Actions
042c5ec6e5 fix(ci): replace abandoned httpbin image with maintained Go alternative 2026-03-13 22:44:19 +00:00
GitHub Actions
05d19c0471 fix: update lru-cache and other dependencies to latest versions 2026-03-13 20:07:30 +00:00
GitHub Actions
48af524313 chore(security): expand Semgrep coverage to include frontend and secrets scanning 2026-03-13 20:07:30 +00:00
GitHub Actions
bad97102e1 fix: repair GeoIP CI detection and harden httpbin startup in integration tests 2026-03-13 20:07:30 +00:00
GitHub Actions
98a4efcd82 fix: handle errors gracefully when commenting on PRs in supply chain verification workflow 2026-03-13 20:07:30 +00:00
Jeremy
f631dfc628 Merge pull request #836 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-13 15:58:41 -04:00
renovate[bot]
eb5b74cbe3 chore(deps): update non-major-updates 2026-03-13 19:08:11 +00:00
GitHub Actions
1785ccc39f fix: remove zlib vulnerability suppression and update review dates for Nebula ECDSA signature malleability 2026-03-13 14:14:22 +00:00
GitHub Actions
4b896c2e3c fix: replace curl with wget for healthcheck commands in Docker configurations 2026-03-13 14:13:37 +00:00
GitHub Actions
88a9cdb0ff fix(deps): update @vitejs/plugin-react to version 6.0.1 and adjust peer dependency for @rolldown/plugin-babel 2026-03-13 12:33:00 +00:00
GitHub Actions
354ff0068a fix: upgrade zlib package in Dockerfile to ensure latest security patches 2026-03-13 12:10:38 +00:00
GitHub Actions
0c419d8f85 chore: add Slack provider validation tests for payload and webhook URL 2026-03-13 12:09:35 +00:00
GitHub Actions
26be592f4d feat: add Slack notification provider support
- Updated the notification provider types to include 'slack'.
- Modified API tests to handle 'slack' as a valid provider type.
- Enhanced frontend forms to display Slack-specific fields (webhook URL and channel name).
- Implemented CRUD operations for Slack providers, ensuring proper payload structure.
- Added E2E tests for Slack notification provider, covering form rendering, validation, and security checks.
- Updated translations to include Slack-related text.
- Ensured that sensitive information (like tokens) is not exposed in API responses.
2026-03-13 03:40:02 +00:00
GitHub Actions
fb9b6cae76 fix(deps): update caddy-security version to 1.1.46 2026-03-13 01:37:09 +00:00
Jeremy
5bb9b2a6fb Merge branch 'development' into feature/beta-release 2026-03-12 13:52:54 -04:00
GitHub Actions
593694a4b4 fix(deps): update goccy/go-json to version 0.10.6 2026-03-12 17:49:05 +00:00
GitHub Actions
b207993299 fix(deps): update baseline-browser-mapping to version 2.10.7 and undici to version 7.23.0 2026-03-12 17:48:14 +00:00
Jeremy
a807288052 Merge pull request #833 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-03-12 13:45:33 -04:00
renovate[bot]
49b956f916 chore(deps): update non-major-updates 2026-03-12 17:38:44 +00:00
GitHub Actions
53227de55c chore: Refactor code structure for improved readability and maintainability 2026-03-12 10:10:25 +00:00
GitHub Actions
58921556a1 fix(deps): update golang.org/x/term to version 0.41.0 2026-03-12 10:06:34 +00:00
GitHub Actions
442164cc5c fix(deps): update golang.org/x/crypto and golang.org/x/net dependencies to latest versions 2026-03-12 10:05:51 +00:00
Jeremy
8414004d8f Merge pull request #832 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-12 05:53:18 -04:00
renovate[bot]
7932188dae fix(deps): update non-major-updates 2026-03-12 09:30:08 +00:00
GitHub Actions
d4081d954f chore: update dependencies and configuration for Vite and Vitest
- Bump versions of @vitejs/plugin-react, @vitest/coverage-istanbul, @vitest/coverage-v8, and @vitest/ui to their beta releases.
- Upgrade Vite and Vitest to their respective beta versions.
- Adjust Vite configuration to disable code splitting for improved React initialization stability.
2026-03-12 04:31:31 +00:00
GitHub Actions
2e85a341c8 chore: upgrade ESLint and related plugins to version 10.x
- Updated @eslint/js and eslint to version 10.0.0 in package.json.
- Adjusted overrides for eslint-plugin-react-hooks, eslint-plugin-jsx-a11y, and eslint-plugin-promise to ensure compatibility with ESLint v10.
- Modified lefthook.yml to reflect the upgrade and noted the need for plugin support for ESLint v10.
2026-03-12 00:00:01 +00:00
GitHub Actions
2969eb58e4 chore: update TypeScript to 6.0.1-rc and adjust package dependencies
- Removed duplicate @typescript-eslint/utils dependency in frontend/package.json
- Updated TypeScript version from 5.9.3 to 6.0.1-rc in frontend/package.json and package.json
- Adjusted ResizeObserver mock to use globalThis in tests
- Modified tsconfig.json and tsconfig.node.json to include empty types array
- Cleaned up package-lock.json to reflect TypeScript version change and updated dev dependencies
2026-03-11 22:19:35 +00:00
456 changed files with 44869 additions and 13050 deletions

View File

@@ -47,7 +47,7 @@ services:
# - <PATH_TO_YOUR_CADDYFILE>:/import/Caddyfile:ro
# - <PATH_TO_YOUR_SITES_DIR>:/import/sites:ro # If your Caddyfile imports other files
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3

View File

@@ -87,7 +87,7 @@ services:
- playwright_caddy_config:/config
- /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
healthcheck:
test: ["CMD", "curl", "-sf", "http://localhost:8080/api/v1/health"]
test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"]
interval: 5s
timeout: 3s
retries: 12

View File

@@ -48,11 +48,12 @@ services:
tmpfs:
# True tmpfs for E2E test data - fresh on every run, in-memory only
# mode=1777 allows any user to write (container runs as non-root)
- /app/data:size=100M,mode=1777
# 256M gives headroom for the backup service's 100MB disk-space check
- /app/data:size=256M,mode=1777
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"]
interval: 5s
timeout: 5s
retries: 10

View File

@@ -52,7 +52,7 @@ services:
# - ./my-existing-Caddyfile:/import/Caddyfile:ro
# - ./sites:/import/sites:ro # If your Caddyfile imports other files
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
test: ["CMD-SHELL", "wget -qO /dev/null http://localhost:8080/api/v1/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3

View File

@@ -303,6 +303,19 @@ ACQUIS_EOF
# Also handle case where it might be without trailing slash
sed -i 's|log_dir: /var/log$|log_dir: /var/log/crowdsec|g' "$CS_CONFIG_DIR/config.yaml"
# Redirect CrowdSec LAPI database to persistent volume
# Default path /var/lib/crowdsec/data/crowdsec.db is ephemeral (not volume-mounted),
# so it is destroyed on every container rebuild. The bouncer API key (stored on the
# persistent volume at /app/data/crowdsec/) survives rebuilds but the LAPI database
# that validates it does not — causing perpetual key rejection.
# Redirecting db_path to the volume-mounted CS_DATA_DIR fixes this.
sed -i "s|db_path: /var/lib/crowdsec/data/crowdsec.db|db_path: ${CS_DATA_DIR}/crowdsec.db|g" "$CS_CONFIG_DIR/config.yaml"
if grep -q "db_path:.*${CS_DATA_DIR}" "$CS_CONFIG_DIR/config.yaml"; then
echo "✓ CrowdSec LAPI database redirected to persistent volume: ${CS_DATA_DIR}/crowdsec.db"
else
echo "⚠️ WARNING: Could not verify LAPI db_path redirect — bouncer keys may not survive rebuilds"
fi
# Verify LAPI configuration was applied correctly
if grep -q "listen_uri:.*:8085" "$CS_CONFIG_DIR/config.yaml"; then
echo "✓ CrowdSec LAPI configured for port 8085"
@@ -310,10 +323,11 @@ ACQUIS_EOF
echo "✗ WARNING: LAPI port configuration may be incorrect"
fi
# Update hub index to ensure CrowdSec can start
if [ ! -f "/etc/crowdsec/hub/.index.json" ]; then
echo "Updating CrowdSec hub index..."
timeout 60s cscli hub update 2>/dev/null || echo "⚠️ Hub update timed out or failed, continuing..."
# Always refresh hub index on startup (stale index causes hash mismatch errors on collection install)
echo "Updating CrowdSec hub index..."
if ! timeout 60s cscli hub update 2>&1; then
echo "⚠️ Hub index update failed (network issue?). Collections may fail to install."
echo " CrowdSec will still start with whatever index is cached."
fi
# Ensure local machine is registered (auto-heal for volume/config mismatch)
@@ -321,12 +335,11 @@ ACQUIS_EOF
echo "Registering local machine..."
cscli machines add -a --force 2>/dev/null || echo "Warning: Machine registration may have failed"
# Install hub items (parsers, scenarios, collections) if local mode enabled
if [ "$SECURITY_CROWDSEC_MODE" = "local" ]; then
echo "Installing CrowdSec hub items..."
if [ -x /usr/local/bin/install_hub_items.sh ]; then
/usr/local/bin/install_hub_items.sh 2>/dev/null || echo "Warning: Some hub items may not have installed"
fi
# Always ensure required collections are present (idempotent — already-installed items are skipped).
# Collections are just config files with zero runtime cost when CrowdSec is disabled.
echo "Ensuring CrowdSec hub items are installed..."
if [ -x /usr/local/bin/install_hub_items.sh ]; then
/usr/local/bin/install_hub_items.sh || echo "⚠️ Some hub items may not have installed. CrowdSec can still start."
fi
# Fix ownership AFTER cscli commands (they run as root and create root-owned files)
@@ -365,7 +378,7 @@ echo "Caddy started (PID: $CADDY_PID)"
echo "Waiting for Caddy admin API..."
i=1
while [ "$i" -le 30 ]; do
if curl -sf http://127.0.0.1:2019/config/ > /dev/null 2>&1; then
if wget -qO /dev/null http://127.0.0.1:2019/config/ 2>/dev/null; then
echo "Caddy is ready!"
break
fi

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -43,7 +43,7 @@ You are "lazy" in the smartest way possible. You never do what a subordinate can
- **Identify Goal**: Understand the user's request.
- **STOP**: Do not look at the code. Do not run `list_dir`. No code is to be changed or implemented until there is a fundamentally sound plan of action that has been approved by the user.
- **Action**: Immediately call `Planning` subagent.
- *Prompt*: "Research the necessary files for '{user_request}' and write a comprehensive plan detailing as many specifics as possible to `docs/plans/current_spec.md`. Be an artist with directions and discriptions. Include file names, function names, and component names wherever possible. Break the plan into phases based on the least amount of requests. Include a Commit Slicing Strategy section that decides whether to split work into multiple PRs and, when split, defines PR-1/PR-2/PR-3 scope, dependencies, and acceptance criteria. Review and suggest updaetes to `.gitignore`, `codecov.yml`, `.dockerignore`, and `Dockerfile` if necessary. Return only when the plan is complete."
- *Prompt*: "Research the necessary files for '{user_request}' and write a comprehensive plan detailing as many specifics as possible to `docs/plans/current_spec.md`. Be an artist with directions and discriptions. Include file names, function names, and component names wherever possible. Break the plan into phases based on the least amount of requests. Include a Commit Slicing Strategy section that organizes work into logical commits within a single PR — one feature = one PR, with ordered commits (Commit 1, Commit 2, …) each defining scope, files, dependencies, and validation gates. Review and suggest updaetes to `.gitignore`, `codecov.yml`, `.dockerignore`, and `Dockerfile` if necessary. Return only when the plan is complete."
- **Task Specifics**:
- If the task is to just run tests or audits, there is no need for a plan. Directly call `QA_Security` to perform the tests and write the report. If issues are found, return to `Planning` for a remediation plan and delegate the fixes to the corresponding subagents.
@@ -59,27 +59,26 @@ You are "lazy" in the smartest way possible. You never do what a subordinate can
- **Ask**: "Plan created. Shall I authorize the construction?"
4. **Phase 4: Execution (Waterfall)**:
- **Single-PR or Multi-PR Decision**: Read the Commit Slicing Strategy in `docs/plans/current_spec.md`.
- **If single PR**:
- **Read Commit Slicing Strategy**: Read the Commit Slicing Strategy in `docs/plans/current_spec.md` to understand the ordered commits.
- **Single PR, Multiple Commits**: All work ships as one PR. Each commit maps to a phase in the plan.
- **Backend**: Call `Backend_Dev` with the plan file.
- **Frontend**: Call `Frontend_Dev` with the plan file.
- **If multi-PR**:
- Execute in PR slices, one slice at a time, in dependency order.
- Require each slice to pass review + QA gates before starting the next slice.
- Keep every slice deployable and independently testable.
- **MANDATORY**: Implementation agents must perform linting and type checks locally before declaring their slice "DONE". This is a critical step that must not be skipped to avoid broken commits and security issues.
- Execute commits in dependency order. Each commit must pass its validation gates before the next commit begins.
- The PR is merged only when all commits are complete and all DoD gates pass.
- **MANDATORY**: Implementation agents must perform linting and type checks locally before declaring their commit "DONE". This is a critical step that must not be skipped to avoid broken commits and security issues.
5. **Phase 5: Review**:
- **Supervisor**: Call `Supervisor` to review the implementation against the plan. Provide feedback and ensure alignment with best practices.
6. **Phase 6: Audit**:
- Review Security: Read `security.md.instrutctions.md` and `SECURITY.md` to understand the security requirements and best practices for Charon. Ensure that any open concerns or issues are addressed in the QA Audit and `SECURITY.md` is updated accordingly.
- **QA**: Call `QA_Security` to meticulously test current implementation as well as regression test. Run all linting, security tasks, and manual lefthook checks. Write a report to `docs/reports/qa_report.md`. Start back at Phase 1 if issues are found.
7. **Phase 7: Closure**:
- **Docs**: Call `Docs_Writer`.
- **Manual Testing**: create a new test plan in `docs/issues/*.md` for tracking manual testing focused on finding potential bugs of the implemented features.
- **Final Report**: Summarize the successful subagent runs.
- **PR Roadmap**: If split mode was used, include a concise roadmap of completed and remaining PR slices.
- **Commit Roadmap**: Include a concise summary of completed and remaining commits within the PR.
**Mandatory Commit Message**: When you reach a stopping point, provide a copy and paste code block commit message at the END of the response on format laid out in `.github/instructions/commit-message.instructions.md`
- **STRICT RULES**:
@@ -166,23 +165,27 @@ The task is not complete until ALL of the following pass with zero issues:
- **Base URL**: Uses `PLAYWRIGHT_BASE_URL` or default from `playwright.config.js`
- All E2E tests must pass before proceeding to unit tests
2. **Local Patch Coverage Preflight (MANDATORY - Before Unit/Coverage Tests)**:
- Ensure the local patch report is run first via VS Code task `Test: Local Patch Report` or `bash scripts/local-patch-report.sh`.
- Verify both artifacts exist: `test-results/local-patch-report.md` and `test-results/local-patch-report.json`.
- Use this report to identify changed files needing coverage before running backend/frontend coverage suites.
3. **Coverage Tests (MANDATORY - Verify Explicitly)**:
2. **Coverage Tests (MANDATORY - Verify Explicitly)**:
- **Backend**: Ensure `Backend_Dev` ran VS Code task "Test: Backend with Coverage" or `scripts/go-test-coverage.sh`
- **Frontend**: Ensure `Frontend_Dev` ran VS Code task "Test: Frontend with Coverage" or `scripts/frontend-test-coverage.sh`
- **Why**: These are in manual stage of pre-commit for performance. Subagents MUST run them via VS Code tasks or scripts.
- Minimum coverage: 85% for both backend and frontend.
- All tests must pass with zero failures.
- **Outputs**: `backend/coverage.txt` and `frontend/coverage/lcov.info` — these are required inputs for step 3.
3. **Local Patch Coverage Report (MANDATORY - After Coverage Tests)**:
- **Purpose**: Identify uncovered lines in files modified by this task so missing tests are written before declaring Done. This is the bridge between "overall coverage is fine" and "the actual lines I changed are tested."
- **Prerequisites**: `backend/coverage.txt` and `frontend/coverage/lcov.info` must exist (generated by step 2). If missing, run coverage tests first.
- **Run**: VS Code task `Test: Local Patch Report` or `bash scripts/local-patch-report.sh`.
- **Verify artifacts**: Both `test-results/local-patch-report.md` and `test-results/local-patch-report.json` must exist with non-empty results.
- **Act on findings**: If patch coverage for any changed file is below **90%**, delegate to the responsible agent (`Backend_Dev` or `Frontend_Dev`) to add targeted tests covering the uncovered lines. Re-run coverage (step 2) and this report until the threshold is met.
- **Blocking gate**: 90% overall patch coverage. Do not proceed to pre-commit or security scans until resolved or explicitly waived by the user.
4. **Type Safety (Frontend)**:
- Ensure `Frontend_Dev` ran VS Code task "Lint: TypeScript Check" or `npm run type-check`
- **Why**: This check is in manual stage of pre-commit for performance. Subagents MUST run it explicitly.
5. **Pre-commit Hooks**: Ensure `QA_Security` ran `pre-commit run --all-files` (fast hooks only; coverage was verified in step 3)
5. **Pre-commit Hooks**: Ensure `QA_Security` ran `pre-commit run --all-files` (fast hooks only; coverage was verified in step 2)
6. **Security Scans**: Ensure `QA_Security` ran the following with zero Critical or High severity issues:
- **Trivy Filesystem Scan**: Fast scan of source code and dependencies

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,204 @@
---
applyTo: SECURITY.md
---
# Instructions: Maintaining `SECURITY.md`
`SECURITY.md` is the project's living security record. It serves two audiences simultaneously: users who need to know what risks exist right now, and the broader community who need confidence that vulnerabilities are being tracked and remediated with discipline. Treat it like a changelog, but for security events — every known issue gets an entry, every resolved issue keeps its entry.
---
## File Structure
`SECURITY.md` must always contain the following top-level sections, in this order:
1. A brief project security policy preamble (responsible disclosure contact, response SLA)
2. **`## Known Vulnerabilities`** — active, unpatched issues
3. **`## Patched Vulnerabilities`** — resolved issues, retained permanently for audit trail
No other top-level sections are required. Do not collapse or remove sections even when they are empty — use the explicit empty-state placeholder defined below.
---
## Section 1: Known Vulnerabilities
This section lists every vulnerability that is currently unpatched or only partially mitigated. Entries must be sorted with the highest severity first, then by discovery date descending within the same severity tier.
### Entry Format
Each entry is an H3 heading followed by a structured block:
```markdown
### [SEVERITY] CVE-XXXX-XXXXX · Short Title
| Field | Value |
|--------------|-------|
| **ID** | CVE-XXXX-XXXXX (or `CHARON-YYYY-NNN` if no CVE assigned yet) |
| **Severity** | Critical / High / Medium / Low · CVSS v3.1 score if known (e.g. `8.1 · High`) |
| **Status** | Investigating / Fix In Progress / Awaiting Upstream / Mitigated (partial) |
**What**
One to three sentences describing the vulnerability class and its impact.
Be specific: name the weakness type (e.g. SQL injection, path traversal, SSRF).
**Who**
- Discovered by: [Reporter name or handle, or "Internal audit", or "Automated scan (tool name)"]
- Reported: YYYY-MM-DD
- Affects: [User roles, API consumers, unauthenticated users, etc.]
**Where**
- Component: [Module or service name]
- File(s): `path/to/affected/file.go`, `path/to/other/file.ts`
- Versions affected: `>= X.Y.Z` (or "all versions" / "prior to X.Y.Z")
**When**
- Discovered: YYYY-MM-DD
- Disclosed (if public): YYYY-MM-DD (or "Not yet publicly disclosed")
- Target fix: YYYY-MM-DD (or sprint/milestone reference)
**How**
A concise technical description of the attack vector, prerequisites, and exploitation
method. Omit proof-of-concept code. Reference CVE advisories or upstream issue
trackers where appropriate.
**Planned Remediation**
Describe the fix strategy: library upgrade, logic refactor, config change, etc.
If a workaround is available in the meantime, document it here.
Link to the tracking issue: [#NNN](https://github.com/owner/repo/issues/NNN)
```
### Empty State
When there are no known vulnerabilities:
```markdown
## Known Vulnerabilities
No known unpatched vulnerabilities at this time.
Last reviewed: YYYY-MM-DD
```
---
## Section 2: Patched Vulnerabilities
This section is a permanent, append-only ledger. Entries are never deleted. Sort newest-patched first. This section builds community trust by demonstrating that issues are resolved promptly and transparently.
### Entry Format
```markdown
### ✅ [SEVERITY] CVE-XXXX-XXXXX · Short Title
| Field | Value |
|--------------|-------|
| **ID** | CVE-XXXX-XXXXX (or internal ID) |
| **Severity** | Critical / High / Medium / Low · CVSS v3.1 score |
| **Patched** | YYYY-MM-DD in `vX.Y.Z` |
**What**
Same description carried over from the Known Vulnerabilities entry.
**Who**
- Discovered by: [Reporter or method]
- Reported: YYYY-MM-DD
**Where**
- Component: [Module or service name]
- File(s): `path/to/affected/file.go`
- Versions affected: `< X.Y.Z`
**When**
- Discovered: YYYY-MM-DD
- Patched: YYYY-MM-DD
- Time to patch: N days
**How**
Same technical description as the original entry.
**Resolution**
Describe exactly what was changed to fix the issue.
- Commit: [`abc1234`](https://github.com/owner/repo/commit/abc1234)
- PR: [#NNN](https://github.com/owner/repo/pull/NNN)
- Release: [`vX.Y.Z`](https://github.com/owner/repo/releases/tag/vX.Y.Z)
**Credit**
[Optional] Thank the reporter if they consented to attribution.
```
### Empty State
```markdown
## Patched Vulnerabilities
No patched vulnerabilities on record yet.
```
---
## Lifecycle: Moving an Entry from Known → Patched
When a fix ships:
1. Remove the entry from `## Known Vulnerabilities` entirely.
2. Add a new entry to the **top** of `## Patched Vulnerabilities` using the patched format above.
3. Carry forward all original fields verbatim — do not rewrite the history of the issue.
4. Add the `**Resolution**` and `**Credit**` blocks with patch details.
5. Update the `Last reviewed` date on the Known Vulnerabilities section if it is now empty.
Do not edit or backfill existing Patched entries once they are committed.
---
## Severity Classification
Use the following definitions consistently:
| Severity | CVSS Range | Meaning |
|----------|------------|---------|
| **Critical** | 9.010.0 | Remote code execution, auth bypass, full data exposure |
| **High** | 7.08.9 | Significant data exposure, privilege escalation, DoS |
| **Medium** | 4.06.9 | Limited data exposure, requires user interaction or auth |
| **Low** | 0.13.9 | Minimal impact, difficult to exploit, defense-in-depth |
When a CVE CVSS score is not yet available, assign a preliminary severity based on these definitions and note it as `(preliminary)` until confirmed.
---
## Internal IDs
If a vulnerability has no CVE assigned, use the format `CHARON-YYYY-NNN` where `YYYY` is the year and `NNN` is a zero-padded sequence number starting at `001` for each year. Example: `CHARON-2025-003`. Assign a CVE ID in the entry retroactively if one is issued later, and add the internal ID as an alias in parentheses.
---
## Responsible Disclosure Preamble
The preamble at the top of `SECURITY.md` (before the vulnerability sections) must include:
- The preferred contact method for reporting vulnerabilities (e.g. a GitHub private advisory link, a security email address, or both)
- An acknowledgment-first response commitment: confirm receipt within 48 hours, even if the full investigation takes longer
- A statement that reporters will not be penalized or publicly named without consent
- A link to the full disclosure policy if one exists
Example:
```markdown
## Reporting a Vulnerability
To report a security issue, please use
[GitHub Private Security Advisories](https://github.com/owner/repo/security/advisories/new)
or email `security@example.com`.
We will acknowledge your report within **48 hours** and provide a remediation
timeline within **7 days**. Reporters are credited with their consent.
We do not pursue legal action against good-faith security researchers.
```
---
## Maintenance Rules
- **Review cadence**: Update the `Last reviewed` date in the Known Vulnerabilities section at least once per release cycle, even if no entries changed.
- **No silent patches**: Every security fix — no matter how minor — must produce an entry in `## Patched Vulnerabilities` before or alongside the release.
- **No redaction**: Do not redact or soften historical entries. Accuracy builds trust; minimizing past issues destroys it.
- **Dependency vulnerabilities**: Transitive dependency CVEs that affect Charon's exposed attack surface must be tracked here the same as first-party vulnerabilities. Pure dev-dependency CVEs with no runtime impact may be omitted at maintainer discretion, but must still be noted in the relevant dependency update PR.
- **Partial mitigations**: If a workaround is deployed but the root cause is not fixed, the entry stays in `## Known Vulnerabilities` with `Status: Mitigated (partial)` and the workaround documented in `**Planned Remediation**`.

View File

@@ -23,21 +23,21 @@ runSubagent({
- Validate: `plan_file` exists and contains a `Handoff Contract` JSON.
- Kickoff: call `Planning` to create the plan if not present.
- Decide: check if work should be split into multiple PRs (size, risk, cross-domain impact).
- Decide: check how to organize work into logical commits within a single PR (size, risk, cross-domain impact).
- Run: execute `Backend Dev` then `Frontend Dev` sequentially.
- Parallel: run `QA and Security`, `DevOps` and `Doc Writer` in parallel for CI / QA checks and documentation.
- Return: a JSON summary with `subagent_results`, `overall_status`, and aggregated artifacts.
2.1) Multi-Commit Slicing Protocol
- If a task is large or high-risk, split into PR slices and execute in order.
- Each slice must have:
- All work for a single feature ships as one PR with ordered logical commits.
- Each commit must have:
- Scope boundary (what is included/excluded)
- Dependency on previous slices
- Validation gates (tests/scans required for that slice)
- Explicit rollback notes
- Do not start the next slice until the current slice is complete and verified.
- Keep each slice independently reviewable and deployable.
- Dependency on previous commits
- Validation gates (tests/scans required for that commit)
- Explicit rollback notes for the PR as a whole
- Do not start the next commit until the current commit is complete and verified.
- Keep each commit independently reviewable within the PR.
3) Return Contract that all subagents must return
@@ -55,7 +55,7 @@ runSubagent({
- On a subagent failure, the Management agent must capture `tests.output` and decide to retry (1 retry maximum), or request a revert/rollback.
- Clearly mark the `status` as `failed`, and include `errors` and `failing_tests` in the `summary`.
- For multi-PR execution, mark failed slice as blocked and stop downstream slices until resolved.
- For multi-commit execution, mark failed commit as blocked and stop downstream commits until resolved.
5) Example: Run a full Feature Implementation

View File

@@ -12,9 +12,19 @@ instruction files take precedence over agent files and operator documentation.
**MANDATORY**: Before running unit tests, verify the application UI/UX functions correctly end-to-end.
## 0.5 Local Patch Coverage Preflight (Before Unit Tests)
## 0.5 Local Patch Coverage Report (After Coverage Tests)
**MANDATORY**: After E2E and before backend/frontend unit coverage runs, generate a local patch report so uncovered changed lines are visible early.
**MANDATORY**: After running backend and frontend coverage tests (which generate
`backend/coverage.txt` and `frontend/coverage/lcov.info`), run the local patch
report to identify uncovered lines in changed files.
**Purpose**: Overall coverage can be healthy while the specific lines you changed
are untested. This step catches that gap. If uncovered lines are found in
feature code, add targeted tests before completing the task.
**Prerequisites**: Coverage artifacts must exist before running the report:
- `backend/coverage.txt` — generated by `scripts/go-test-coverage.sh`
- `frontend/coverage/lcov.info` — generated by `scripts/frontend-test-coverage.sh`
Run one of the following from `/projects/Charon`:
@@ -26,11 +36,14 @@ Test: Local Patch Report
bash scripts/local-patch-report.sh
```
Required artifacts:
Required output artifacts:
- `test-results/local-patch-report.md`
- `test-results/local-patch-report.json`
This preflight is advisory for thresholds during rollout, but artifact generation is required in DoD.
**Action on results**: If patch coverage for any changed file is below 90%, add
tests targeting the uncovered changed lines. Re-run coverage and this report to
verify improvement. Artifact generation is required for DoD regardless of
threshold results.
### PREREQUISITE: Start E2E Environment

83
.github/renovate.json vendored
View File

@@ -6,11 +6,11 @@
":separateMultipleMajorReleases",
"helpers:pinGitHubActionDigests"
],
"baseBranches": [
"baseBranchPatterns": [
"feature/beta-release",
"development"
],
"postUpdateOptions": ["npmDedupe"],
"timezone": "America/New_York",
"dependencyDashboard": true,
"dependencyDashboardApproval": true,
@@ -130,6 +130,32 @@
"datasourceTemplate": "go",
"versioningTemplate": "semver"
},
{
"customType": "regex",
"description": "Track gotestsum version in codecov workflow",
"managerFilePatterns": [
"/^\\.github/workflows/codecov-upload\\.yml$/"
],
"matchStrings": [
"gotestsum@v(?<currentValue>[^\\s]+)"
],
"depNameTemplate": "gotest.tools/gotestsum",
"datasourceTemplate": "go",
"versioningTemplate": "semver"
},
{
"customType": "regex",
"description": "Track gotestsum version in quality checks workflow",
"managerFilePatterns": [
"/^\\.github/workflows/quality-checks\\.yml$/"
],
"matchStrings": [
"gotestsum@v(?<currentValue>[^\\s]+)"
],
"depNameTemplate": "gotest.tools/gotestsum",
"datasourceTemplate": "go",
"versioningTemplate": "semver"
},
{
"customType": "regex",
"description": "Track govulncheck version in scripts",
@@ -205,20 +231,39 @@
"datasourceTemplate": "github-releases",
"versioningTemplate": "semver",
"extractVersionTemplate": "^v(?<version>.*)$"
},
{
"customType": "regex",
"description": "Track go-version in skill example workflows",
"managerFilePatterns": ["/^\\.github/skills/examples/.*\\.yml$/"],
"matchStrings": [
"go-version: [\"']?(?<currentValue>[\\d\\.]+)[\"']?"
],
"depNameTemplate": "golang/go",
"datasourceTemplate": "golang-version",
"versioningTemplate": "semver"
}
],
"github-actions": {
"managerFilePatterns": [
"/^\\.github/skills/examples/.*\\.ya?ml$/"
]
},
"packageRules": [
{
"description": "THE MEGAZORD: Group ALL non-major updates (NPM, Docker, Go, Actions) into one PR",
"matchPackagePatterns": ["*"],
"matchUpdateTypes": [
"minor",
"patch",
"pin",
"digest"
],
"groupName": "non-major-updates"
"groupName": "non-major-updates",
"matchPackageNames": [
"*"
]
},
{
"description": "Feature branches: Auto-merge non-major updates after proven stable",
@@ -250,11 +295,41 @@
"matchPackageNames": ["caddy"],
"allowedVersions": "<3.0.0"
},
{
"description": "Go: keep pgx within v4 (CrowdSec requires pgx/v4 module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/jackc/pgx/v4"],
"allowedVersions": "<5.0.0"
},
{
"description": "Go: keep go-jose/v3 within v3 (v4 is a different Go module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/go-jose/go-jose/v3"],
"allowedVersions": "<4.0.0"
},
{
"description": "Go: keep go-jose/v4 within v4 (v5 would be a different Go module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/go-jose/go-jose/v4"],
"allowedVersions": "<5.0.0"
},
{
"description": "Safety: Keep MAJOR updates separate and require manual review",
"matchUpdateTypes": ["major"],
"automerge": false,
"labels": ["manual-review"]
},
{
"description": "Fix Renovate lookup for geoip2-golang v2 module path",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/oschwald/geoip2-golang/v2"],
"sourceUrl": "https://github.com/oschwald/geoip2-golang"
},
{
"description": "Fix Renovate lookup for google/uuid",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/google/uuid"],
"sourceUrl": "https://github.com/google/uuid"
}
]
}

View File

@@ -20,12 +20,12 @@ jobs:
steps:
- name: Checkout Code
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: "1.26.1"
go-version: "1.26.2"
- name: Run GORM Security Scanner
id: gorm-scan
@@ -56,7 +56,7 @@ jobs:
- name: Comment on PR
if: always() && github.event_name == 'pull_request'
uses: actions/github-script@v7
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const critical = ${{ steps.parse-report.outputs.critical }};
@@ -89,7 +89,7 @@ jobs:
- name: Upload GORM Scan Report
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: gorm-security-report-${{ github.run_id }}
path: docs/reports/gorm-scan-ci-*.txt

View File

@@ -35,7 +35,7 @@ fi
# Check Grype
if ! command -v grype >/dev/null 2>&1; then
log_error "Grype not found - install from: https://github.com/anchore/grype"
log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.109.1"
log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.111.0"
error_exit "Grype is required for vulnerability scanning" 2
fi
@@ -50,8 +50,8 @@ SYFT_INSTALLED_VERSION=$(syft version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\
GRYPE_INSTALLED_VERSION=$(grype version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown")
# Set defaults matching CI workflow
set_default_env "SYFT_VERSION" "v1.42.2"
set_default_env "GRYPE_VERSION" "v0.109.1"
set_default_env "SYFT_VERSION" "v1.42.4"
set_default_env "GRYPE_VERSION" "v0.111.0"
set_default_env "IMAGE_TAG" "charon:local"
set_default_env "FAIL_ON_SEVERITY" "Critical,High"
@@ -139,7 +139,10 @@ log_info "This may take 30-60 seconds on first run (database download)"
# Run Grype against the SBOM (generated from image, not filesystem)
# This matches exactly what CI does in supply-chain-pr.yml
# --config ensures .grype.yaml ignore rules are applied, separating
# ignored matches from actionable ones in the JSON output
if grype sbom:sbom.cyclonedx.json \
--config .grype.yaml \
--output json \
--file grype-results.json; then
log_success "Vulnerability scan complete"
@@ -149,6 +152,7 @@ fi
# Generate SARIF output for GitHub Security (matches CI)
grype sbom:sbom.cyclonedx.json \
--config .grype.yaml \
--output sarif \
--file grype-results.sarif 2>/dev/null || true

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number }}
cancel-in-progress: false
permissions:
contents: read
jobs:
add-to-project:
runs-on: ubuntu-latest

View File

@@ -12,6 +12,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: write
jobs:
update-draft:
runs-on: ubuntu-latest
@@ -21,6 +24,6 @@ jobs:
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Draft Release
uses: release-drafter/release-drafter@6a93d829887aa2e0748befe2e808c66c0ec6e4c7 # v6
uses: release-drafter/release-drafter@5de93583980a40bd78603b6dfdcda5b4df377b32 # v7
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: true
permissions:
contents: read
jobs:
auto-label:
runs-on: ubuntu-latest
@@ -15,7 +18,7 @@ jobs:
issues: write
steps:
- name: Auto-label based on title and body
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const issue = context.payload.issue;

View File

@@ -33,7 +33,7 @@ jobs:
- name: Calculate Semantic Version
id: semver
uses: paulhatch/semantic-version@f29500c9d60a99ed5168e39ee367e0976884c46e # v6.0.1
uses: paulhatch/semantic-version@9f72830310d5ed81233b641ee59253644cd8a8fc # v6.0.2
with:
# The prefix to use to create tags
tag_prefix: "v"
@@ -89,7 +89,7 @@ jobs:
- name: Create GitHub Release (creates tag via API)
if: ${{ steps.semver.outputs.changed == 'true' && steps.check_release.outputs.exists == 'false' }}
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2
uses: softprops/action-gh-release@b4309332981a82ec1c5618f44dd2e27cc8bfbfda # v3
with:
tag_name: ${{ steps.determine_tag.outputs.tag }}
name: Release ${{ steps.determine_tag.outputs.tag }}

View File

@@ -12,7 +12,7 @@ concurrency:
cancel-in-progress: true
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
GOTOOLCHAIN: auto
# Minimal permissions at workflow level; write permissions granted at job level for push only
@@ -35,7 +35,7 @@ jobs:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
@@ -52,7 +52,7 @@ jobs:
# This avoids gh-pages branch errors and permission issues on fork PRs
if: github.event.workflow_run.event == 'push' && github.event.workflow_run.head_branch == 'main'
# Security: Pinned to full SHA for supply chain security
uses: benchmark-action/github-action-benchmark@4e0b38bc48375986542b13c0d8976b7b80c60c00 # v1
uses: benchmark-action/github-action-benchmark@a60cea5bc7b49e15c1f58f411161f99e0df48372 # v1.22.0
with:
name: Go Benchmark
tool: 'go'

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check for Caddy v3 and open issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const upstream = { owner: 'caddyserver', repo: 'caddy' };

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
cerberus-integration:
name: Cerberus Security Stack Integration
@@ -31,7 +34,7 @@ jobs:
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
docker build -t charon:local --build-arg CI="${CI:-false}" .
echo "✅ Successfully built charon:local"
- name: Run Cerberus integration tests

View File

@@ -23,7 +23,7 @@ concurrency:
cancel-in-progress: true
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -45,7 +45,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
@@ -126,6 +126,9 @@ jobs:
echo "__CHARON_EOF__"
} >> "$GITHUB_ENV"
- name: Install gotestsum
run: go install gotest.tools/gotestsum@v1.13.0
- name: Run Go tests with coverage
working-directory: ${{ github.workspace }}
env:
@@ -134,8 +137,16 @@ jobs:
bash scripts/go-test-coverage.sh 2>&1 | tee backend/test-output.txt
exit "${PIPESTATUS[0]}"
- name: Upload test output artifact
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: backend-test-output
path: backend/test-output.txt
retention-days: 7
- name: Upload backend coverage to Codecov
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./backend/coverage.txt
@@ -155,7 +166,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -172,7 +183,7 @@ jobs:
exit "${PIPESTATUS[0]}"
- name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./frontend/coverage

View File

@@ -15,7 +15,7 @@ concurrency:
env:
GOTOOLCHAIN: auto
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
permissions:
contents: read
@@ -52,7 +52,7 @@ jobs:
run: bash scripts/ci/check-codeql-parity.sh
- name: Initialize CodeQL
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
uses: github/codeql-action/init@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
with:
languages: ${{ matrix.language }}
queries: security-and-quality
@@ -63,7 +63,7 @@ jobs:
- name: Setup Go
if: matrix.language == 'go'
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
@@ -92,10 +92,10 @@ jobs:
run: mkdir -p sarif-results
- name: Autobuild
uses: github/codeql-action/autobuild@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
uses: github/codeql-action/autobuild@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
uses: github/codeql-action/analyze@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
with:
category: "/language:${{ matrix.language }}"
output: sarif-results/${{ matrix.language }}

View File

@@ -88,7 +88,7 @@ jobs:
- name: Upload GHCR prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: prune-ghcr-log-${{ github.run_id }}
path: |
@@ -159,7 +159,7 @@ jobs:
- name: Upload Docker Hub prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: prune-dockerhub-log-${{ github.run_id }}
path: |

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
permissions:
contents: read
jobs:
create-labels:
runs-on: ubuntu-latest
@@ -15,7 +18,7 @@ jobs:
issues: write
steps:
- name: Create all project labels
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const labels = [

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
crowdsec-integration:
name: CrowdSec Bouncer Integration
@@ -31,7 +34,7 @@ jobs:
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
docker build -t charon:local --build-arg CI="${CI:-false}" .
echo "✅ Successfully built charon:local"
- name: Run CrowdSec integration tests

View File

@@ -23,7 +23,7 @@ name: Docker Build, Publish & Test
on:
pull_request:
push:
branches: [main]
branches: [main, development]
workflow_dispatch:
workflow_run:
workflows: ["Docker Lint"]
@@ -33,6 +33,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
env:
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
@@ -42,7 +45,7 @@ env:
TRIGGER_HEAD_SHA: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
TRIGGER_REF: ${{ github.event_name == 'workflow_run' && format('refs/heads/{0}', github.event.workflow_run.head_branch) || github.ref }}
TRIGGER_HEAD_REF: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref }}
TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || github.event.pull_request.number }}
TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || format('{0}', github.event.pull_request.number) }}
TRIGGER_ACTOR: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.actor.login || github.actor }}
jobs:
@@ -130,7 +133,7 @@ jobs:
- name: Log in to GitHub Container Registry
if: steps.skip.outputs.skip_build != 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -138,7 +141,7 @@ jobs:
- name: Log in to Docker Hub
if: steps.skip.outputs.skip_build != 'true' && env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -234,7 +237,7 @@ jobs:
- name: Build and push Docker image (with retry)
if: steps.skip.outputs.skip_build != 'true'
id: build-and-push
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # v3.0.2
uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0
with:
timeout_minutes: 25
max_attempts: 3
@@ -344,7 +347,7 @@ jobs:
- name: Upload Image Artifact
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: ${{ env.TRIGGER_EVENT == 'pull_request' && format('pr-image-{0}', env.TRIGGER_PR_NUMBER) || 'push-image' }}
path: /tmp/charon-pr-image.tar
@@ -538,7 +541,7 @@ jobs:
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '0'
version: 'v0.69.3'
version: 'v0.70.0'
continue-on-error: true
- name: Run Trivy vulnerability scanner (SARIF)
@@ -550,7 +553,7 @@ jobs:
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
version: 'v0.69.3'
version: 'v0.70.0'
continue-on-error: true
- name: Check Trivy SARIF exists
@@ -565,7 +568,7 @@ jobs:
- name: Upload Trivy results
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -574,7 +577,7 @@ jobs:
# Generate SBOM (Software Bill of Materials) for supply chain security
# Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml
- name: Generate SBOM
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1
uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
@@ -583,7 +586,7 @@ jobs:
# Create verifiable attestation for the SBOM
- name: Attest SBOM
uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0
uses: actions/attest-sbom@c604332985a26aa8cf1bdc465b92731239ec6b9e # v4.1.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
@@ -594,7 +597,7 @@ jobs:
# Install Cosign for keyless signing
- name: Install Cosign
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
@@ -660,7 +663,7 @@ jobs:
echo "image_ref=${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:${PR_TAG}" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -698,7 +701,7 @@ jobs:
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '0'
version: 'v0.69.3'
version: 'v0.70.0'
- name: Run Trivy scan on PR image (SARIF - blocking)
id: trivy-scan
@@ -709,7 +712,7 @@ jobs:
output: 'trivy-pr-results.sarif'
severity: 'CRITICAL,HIGH'
exit-code: '1' # Intended to block, but continued on error for now
version: 'v0.69.3'
version: 'v0.70.0'
continue-on-error: true
- name: Check Trivy PR SARIF exists
@@ -724,14 +727,14 @@ jobs:
- name: Upload Trivy scan results
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'docker-pr-image'
- name: Upload Trivy compatibility results (docker-build category)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -739,7 +742,7 @@ jobs:
- name: Upload Trivy compatibility results (docker-publish alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-publish.yml:build-and-push'
@@ -747,7 +750,7 @@ jobs:
- name: Upload Trivy compatibility results (nightly alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'trivy-nightly'

View File

@@ -44,7 +44,7 @@ jobs:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
@@ -53,7 +53,7 @@ jobs:
- name: Detect changed files
id: changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
COMMIT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
with:
@@ -95,7 +95,7 @@ jobs:
- name: Process issue files
id: process
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
with:

View File

@@ -38,7 +38,7 @@ jobs:
# Step 2: Set up Node.js (for building any JS-based doc tools)
- name: 🔧 Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
@@ -352,7 +352,7 @@ jobs:
# Step 4: Upload the built site
- name: 📤 Upload artifact
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4
uses: actions/upload-pages-artifact@fc324d3547104276b827a68afc52ff2a11cc49c9 # v5
with:
path: '_site'
@@ -372,7 +372,7 @@ jobs:
# Deploy to GitHub Pages
- name: 🚀 Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
uses: actions/deploy-pages@cd2ce8fcbc39b97be8ca5fce6e763baed58fa128 # v5
# Create a summary
- name: 📋 Create deployment summary

View File

@@ -83,7 +83,7 @@ on:
env:
NODE_VERSION: '20'
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
GOTOOLCHAIN: auto
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: ${{ github.repository_owner }}/charon
@@ -142,7 +142,7 @@ jobs:
- name: Set up Go
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
@@ -151,14 +151,14 @@ jobs:
- name: Set up Node.js
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
- name: Cache npm dependencies
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
with:
path: ~/.npm
key: npm-${{ hashFiles('package-lock.json') }}
@@ -175,7 +175,7 @@ jobs:
- name: Build Docker image
id: build-image
if: steps.resolve-image.outputs.image_source == 'build'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7
with:
context: .
file: ./Dockerfile
@@ -191,7 +191,7 @@ jobs:
- name: Upload Docker image artifact
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-image
path: charon-e2e-image.tar
@@ -225,7 +225,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -233,7 +233,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -348,7 +348,7 @@ jobs:
- name: Upload HTML report (Chromium Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-chromium-security
path: playwright-report/
@@ -356,7 +356,7 @@ jobs:
- name: Upload Chromium Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-chromium-security
path: coverage/e2e/
@@ -364,7 +364,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-chromium-security
path: test-results/**/*.zip
@@ -383,7 +383,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-chromium-security
path: diagnostics/
@@ -396,7 +396,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-chromium-security
path: docker-logs-chromium-security.txt
@@ -427,7 +427,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -435,7 +435,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -558,7 +558,7 @@ jobs:
- name: Upload HTML report (Firefox Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-firefox-security
path: playwright-report/
@@ -566,7 +566,7 @@ jobs:
- name: Upload Firefox Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-firefox-security
path: coverage/e2e/
@@ -574,7 +574,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-firefox-security
path: test-results/**/*.zip
@@ -593,7 +593,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-firefox-security
path: diagnostics/
@@ -606,7 +606,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-firefox-security
path: docker-logs-firefox-security.txt
@@ -637,7 +637,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -645,7 +645,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -768,7 +768,7 @@ jobs:
- name: Upload HTML report (WebKit Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-webkit-security
path: playwright-report/
@@ -776,7 +776,7 @@ jobs:
- name: Upload WebKit Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-webkit-security
path: coverage/e2e/
@@ -784,7 +784,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-webkit-security
path: test-results/**/*.zip
@@ -803,7 +803,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-webkit-security
path: diagnostics/
@@ -816,7 +816,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-webkit-security
path: docker-logs-webkit-security.txt
@@ -859,7 +859,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -899,7 +899,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -980,6 +980,7 @@ jobs:
--project=chromium \
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
--output=playwright-output/chromium-shard-${{ matrix.shard }} \
tests/a11y \
tests/core \
tests/dns-provider-crud.spec.ts \
tests/dns-provider-types.spec.ts \
@@ -1004,7 +1005,7 @@ jobs:
- name: Upload HTML report (Chromium shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-chromium-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1012,7 +1013,7 @@ jobs:
- name: Upload Playwright output (Chromium shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-output-chromium-shard-${{ matrix.shard }}
path: playwright-output/chromium-shard-${{ matrix.shard }}/
@@ -1020,7 +1021,7 @@ jobs:
- name: Upload Chromium coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-chromium-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1028,7 +1029,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-chromium-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1047,7 +1048,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-chromium-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1060,7 +1061,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-chromium-shard-${{ matrix.shard }}
path: docker-logs-chromium-shard-${{ matrix.shard }}.txt
@@ -1096,7 +1097,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -1136,7 +1137,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -1225,6 +1226,7 @@ jobs:
--project=firefox \
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
--output=playwright-output/firefox-shard-${{ matrix.shard }} \
tests/a11y \
tests/core \
tests/dns-provider-crud.spec.ts \
tests/dns-provider-types.spec.ts \
@@ -1249,7 +1251,7 @@ jobs:
- name: Upload HTML report (Firefox shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-firefox-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1257,7 +1259,7 @@ jobs:
- name: Upload Playwright output (Firefox shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-output-firefox-shard-${{ matrix.shard }}
path: playwright-output/firefox-shard-${{ matrix.shard }}/
@@ -1265,7 +1267,7 @@ jobs:
- name: Upload Firefox coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-firefox-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1273,7 +1275,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-firefox-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1292,7 +1294,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-firefox-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1305,7 +1307,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-firefox-shard-${{ matrix.shard }}
path: docker-logs-firefox-shard-${{ matrix.shard }}.txt
@@ -1341,7 +1343,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
@@ -1381,7 +1383,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -1470,6 +1472,7 @@ jobs:
--project=webkit \
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
--output=playwright-output/webkit-shard-${{ matrix.shard }} \
tests/a11y \
tests/core \
tests/dns-provider-crud.spec.ts \
tests/dns-provider-types.spec.ts \
@@ -1494,7 +1497,7 @@ jobs:
- name: Upload HTML report (WebKit shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: playwright-report-webkit-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1502,7 +1505,7 @@ jobs:
- name: Upload Playwright output (WebKit shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: playwright-output-webkit-shard-${{ matrix.shard }}
path: playwright-output/webkit-shard-${{ matrix.shard }}/
@@ -1510,7 +1513,7 @@ jobs:
- name: Upload WebKit coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: e2e-coverage-webkit-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1518,7 +1521,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: traces-webkit-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1537,7 +1540,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: e2e-diagnostics-webkit-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1550,7 +1553,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: docker-logs-webkit-shard-${{ matrix.shard }}
path: docker-logs-webkit-shard-${{ matrix.shard }}.txt
@@ -1606,7 +1609,7 @@ jobs:
steps:
- name: Check test results
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
EFFECTIVE_BROWSER: ${{ inputs.browser || 'all' }}
EFFECTIVE_CATEGORY: ${{ inputs.test_category || 'all' }}

View File

@@ -7,6 +7,9 @@ on:
required: true
type: string
permissions:
contents: read
jobs:
cleanup:
runs-on: ubuntu-latest

View File

@@ -9,6 +9,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest

View File

@@ -15,13 +15,16 @@ on:
default: "false"
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: wikid82/charon
permissions:
contents: read
jobs:
sync-development-to-nightly:
runs-on: ubuntu-latest
@@ -86,7 +89,7 @@ jobs:
contents: read
steps:
- name: Dispatch Missing Nightly Validation Workflows
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const owner = context.repo.owner;
@@ -178,7 +181,7 @@ jobs:
echo "image=${ALPINE_IMAGE_REF}" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -186,7 +189,7 @@ jobs:
- name: Log in to Docker Hub
if: env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -209,7 +212,7 @@ jobs:
- name: Build and push Docker image
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
platforms: linux/amd64,linux/arm64
@@ -263,7 +266,7 @@ jobs:
- name: Generate SBOM
id: sbom_primary
continue-on-error: true
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1
uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.resolve_digest.outputs.digest }}
format: cyclonedx-json
@@ -282,7 +285,7 @@ jobs:
echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback"
SYFT_VERSION="v1.42.2"
SYFT_VERSION="v1.42.4"
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
ARCH="$(uname -m)"
case "$ARCH" in
@@ -325,7 +328,7 @@ jobs:
' sbom-nightly.json >/dev/null
- name: Upload SBOM artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: sbom-nightly
path: sbom-nightly.json
@@ -333,7 +336,7 @@ jobs:
# Install Cosign for keyless signing
- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
@@ -375,7 +378,7 @@ jobs:
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -391,14 +394,28 @@ jobs:
-p 8080:8080 \
"${IMAGE_REF}"
# Wait for container to start
sleep 10
# Wait for container to become healthy
echo "⏳ Waiting for Charon to be healthy..."
MAX_ATTEMPTS=30
ATTEMPT=0
while [[ ${ATTEMPT} -lt ${MAX_ATTEMPTS} ]]; do
ATTEMPT=$((ATTEMPT + 1))
echo "Attempt ${ATTEMPT}/${MAX_ATTEMPTS}..."
if docker exec charon-nightly wget -qO- http://127.0.0.1:8080/health > /dev/null 2>&1; then
echo "✅ Charon is healthy!"
docker exec charon-nightly wget -qO- http://127.0.0.1:8080/health
break
fi
sleep 2
done
# Check container is running
docker ps | grep charon-nightly
# Basic health check
curl -f http://localhost:8080/health || exit 1
if [[ ${ATTEMPT} -ge ${MAX_ATTEMPTS} ]]; then
echo "❌ Health check failed after ${MAX_ATTEMPTS} attempts"
docker logs charon-nightly
docker stop charon-nightly
docker rm charon-nightly
exit 1
fi
# Cleanup
docker stop charon-nightly
@@ -435,7 +452,7 @@ jobs:
name: sbom-nightly
- name: Scan with Grype
uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2
uses: anchore/scan-action@e1165082ffb1fe366ebaf02d8526e7c4989ea9d2 # v7.4.0
with:
sbom: sbom-nightly.json
fail-build: false
@@ -447,11 +464,11 @@ jobs:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ needs.build-and-push-nightly.outputs.digest }}
format: 'sarif'
output: 'trivy-nightly.sarif'
version: 'v0.69.3'
version: 'v0.70.0'
trivyignores: '.trivyignore'
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-nightly.sarif'
category: 'trivy-nightly'

View File

@@ -12,6 +12,10 @@ concurrency:
group: ${{ github.workflow }}-${{ inputs.pr_number || github.event.pull_request.number }}
cancel-in-progress: true
permissions:
contents: read
pull-requests: write
jobs:
validate:
name: Validate history-rewrite checklist (conditional)
@@ -21,7 +25,7 @@ jobs:
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Validate PR checklist (only for history-rewrite changes)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
PR_NUMBER: ${{ inputs.pr_number }}
with:

View File

@@ -28,15 +28,17 @@ jobs:
(github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'development')
steps:
- name: Set up Node (for github-script)
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
- name: Propagate Changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
CURRENT_BRANCH: ${{ github.event.workflow_run.head_branch || github.ref_name }}
CURRENT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CHARON_TOKEN: ${{ secrets.CHARON_TOKEN }}
with:
script: |
const currentBranch = process.env.CURRENT_BRANCH || context.ref.replace('refs/heads/', '');
@@ -133,7 +135,9 @@ jobs:
const sensitive = files.some(fn => configPaths.some(sp => fn.startsWith(sp) || fn.includes(sp)));
if (sensitive) {
core.info(`${src} -> ${base} contains sensitive changes (${files.join(', ')}). Skipping automatic propagation.`);
const preview = files.slice(0, 25).join(', ');
const suffix = files.length > 25 ? ` …(+${files.length - 25} more)` : '';
core.info(`${src} -> ${base} contains sensitive changes (${preview}${suffix}). Skipping automatic propagation.`);
return;
}
} catch (error) {
@@ -203,6 +207,3 @@ jobs:
await createPR('development', targetBranch);
}
}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CHARON_TOKEN: ${{ secrets.CHARON_TOKEN }}

View File

@@ -16,7 +16,7 @@ permissions:
checks: write
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -31,7 +31,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version: ${{ env.GO_VERSION }}
@@ -138,7 +138,7 @@ jobs:
} >> "$GITHUB_ENV"
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version: ${{ env.GO_VERSION }}
@@ -148,14 +148,24 @@ jobs:
run: |
bash "scripts/repo_health_check.sh"
- name: Install gotestsum
run: go install gotest.tools/gotestsum@v1.13.0
- name: Run Go tests
id: go-tests
working-directory: ${{ github.workspace }}
env:
CGO_ENABLED: 1
run: |
bash "scripts/go-test-coverage.sh" 2>&1 | tee backend/test-output.txt
exit "${PIPESTATUS[0]}"
bash "scripts/go-test-coverage.sh" 2>&1 | tee backend/test-output.txt; exit "${PIPESTATUS[0]}"
- name: Upload test output artifact
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: backend-test-output
path: backend/test-output.txt
retention-days: 7
- name: Go Test Summary
if: always()
@@ -232,11 +242,12 @@ jobs:
PERF_MAX_MS_GETSTATUS_P95_PARALLEL: 1500ms
PERF_MAX_MS_LISTDECISIONS_P95: 2000ms
run: |
go test -run TestPerf -v ./internal/api/handlers -count=1 2>&1 | tee perf-output.txt; PERF_STATUS="${PIPESTATUS[0]}"
{
echo "## 🔍 Running performance assertions (TestPerf)"
go test -run TestPerf -v ./internal/api/handlers -count=1 | tee perf-output.txt
cat perf-output.txt
} >> "$GITHUB_STEP_SUMMARY"
exit "${PIPESTATUS[0]}"
exit "$PERF_STATUS"
frontend-quality:
name: Frontend (React)
@@ -251,12 +262,18 @@ jobs:
bash "scripts/repo_health_check.sh"
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Verify lockfile integrity and audit dependencies
working-directory: frontend
run: |
npm ci --ignore-scripts
npm audit --audit-level=critical
- name: Check if frontend was modified in PR
id: check-frontend
run: |
@@ -298,8 +315,7 @@ jobs:
id: frontend-tests
working-directory: ${{ github.workspace }}
run: |
bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt
exit "${PIPESTATUS[0]}"
bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt; exit "${PIPESTATUS[0]}"
- name: Frontend Test Summary
if: always()

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
rate-limit-integration:
name: Rate Limiting Integration
@@ -31,7 +34,7 @@ jobs:
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
docker build -t charon:local --build-arg CI="${CI:-false}" .
echo "✅ Successfully built charon:local"
- name: Run rate limit integration tests
@@ -68,7 +71,7 @@ jobs:
echo "### Caddy Admin Config (rate_limit handlers)"
echo '```json'
curl -s http://localhost:2119/config 2>/dev/null | grep -A 20 '"handler":"rate_limit"' | head -30 || echo "Could not retrieve Caddy config"
curl -s http://localhost:2119/config/ 2>/dev/null | grep -A 20 '"handler":"rate_limit"' | head -30 || echo "Could not retrieve Caddy config"
echo '```'
echo ""

View File

@@ -10,7 +10,7 @@ concurrency:
cancel-in-progress: false
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -45,14 +45,14 @@ jobs:
fi
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
with:
node-version: ${{ env.NODE_VERSION }}
@@ -67,7 +67,7 @@ jobs:
- name: Install Cross-Compilation Tools (Zig)
# Security: Pinned to full SHA for supply chain security
uses: goto-bus-stop/setup-zig@abea47f85e598557f500fa1fd2ab7464fcb39406 # v2
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
with:
version: 0.13.0
@@ -75,7 +75,7 @@ jobs:
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@ec59f474b9834571250b370d4735c50f8e2d1e29 # v7
uses: goreleaser/goreleaser-action@e24998b8b67b290c2fa8b7c14fcfa7de2c5c9b8c # v7
with:
distribution: goreleaser
version: '~> v2.5'

View File

@@ -14,6 +14,9 @@ permissions:
pull-requests: write
issues: write
env:
GO_VERSION: '1.26.2'
jobs:
renovate:
runs-on: ubuntu-latest
@@ -24,8 +27,13 @@ jobs:
with:
fetch-depth: 1
- name: Set up Go
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
- name: Run Renovate
uses: renovatebot/github-action@0b17c4eb901eca44d018fb25744a50a74b2042df # v46.1.4
uses: renovatebot/github-action@83ec54fee49ab67d9cd201084c1ff325b4b462e4 # v46.1.10
with:
configurationFile: .github/renovate.json
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -30,7 +30,7 @@ jobs:
echo "GITHUB_TOKEN=${{ secrets.CHARON_TOKEN }}" >> "$GITHUB_ENV"
fi
- name: Prune renovate branches
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
github-token: ${{ env.GITHUB_TOKEN }}
script: |

View File

@@ -9,6 +9,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
repo_health:
name: Repo health
@@ -34,7 +37,7 @@ jobs:
- name: Upload health output
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: repo-health-output
path: |

View File

@@ -22,6 +22,9 @@ concurrency:
group: security-pr-${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
security-scan:
name: Trivy Binary Scan
@@ -240,7 +243,7 @@ jobs:
- name: Download PR image artifact
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
# actions/download-artifact v4.1.8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c
uses: actions/download-artifact@484a0b528fb4d7bd804637ccb632e47a0e638317
with:
name: ${{ steps.check-artifact.outputs.artifact_name }}
run-id: ${{ steps.check-artifact.outputs.run_id }}
@@ -361,7 +364,7 @@ jobs:
- name: Run Trivy filesystem scan (SARIF output)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
# aquasecurity/trivy-action 0.35.0
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'
@@ -385,7 +388,7 @@ jobs:
- name: Upload Trivy SARIF to GitHub Security
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# github/codeql-action v4
uses: github/codeql-action/upload-sarif@1a97b0f94ec9297d6f58aefe5a6b5441c045bed4
uses: github/codeql-action/upload-sarif@34950e1b113b30df4edee1a6d3a605242df0c40b
with:
sarif_file: 'trivy-binary-results.sarif'
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
@@ -393,7 +396,7 @@ jobs:
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
# aquasecurity/trivy-action 0.35.0
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'

View File

@@ -19,6 +19,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
permissions:
contents: read
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/charon
@@ -61,7 +64,7 @@ jobs:
echo "Base image digest: $DIGEST"
- name: Log in to Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@@ -77,7 +80,7 @@ jobs:
- name: Build Docker image (NO CACHE)
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7
with:
context: .
platforms: linux/amd64
@@ -99,7 +102,7 @@ jobs:
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '1' # Fail workflow if vulnerabilities found
version: 'v0.69.3'
version: 'v0.70.0'
continue-on-error: true
- name: Run Trivy vulnerability scanner (SARIF)
@@ -110,10 +113,10 @@ jobs:
format: 'sarif'
output: 'trivy-weekly-results.sarif'
severity: 'CRITICAL,HIGH,MEDIUM'
version: 'v0.69.3'
version: 'v0.70.0'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
with:
sarif_file: 'trivy-weekly-results.sarif'
@@ -124,10 +127,10 @@ jobs:
format: 'json'
output: 'trivy-weekly-results.json'
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
version: 'v0.69.3'
version: 'v0.70.0'
- name: Upload Trivy JSON results
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: trivy-weekly-scan-${{ github.run_number }}
path: trivy-weekly-results.json

View File

@@ -266,7 +266,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate SBOM
if: steps.set-target.outputs.image_name != ''
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1
uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0
id: sbom
with:
image: ${{ steps.set-target.outputs.image_name }}
@@ -281,19 +281,19 @@ jobs:
echo "component_count=${COMPONENT_COUNT}" >> "$GITHUB_OUTPUT"
echo "✅ SBOM generated with ${COMPONENT_COUNT} components"
# Scan for vulnerabilities using manual Grype installation (pinned to v0.107.1)
# Scan for vulnerabilities using manual Grype installation (pinned to v0.110.0)
- name: Install Grype
if: steps.set-target.outputs.image_name != ''
run: |
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.109.1
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.111.0
- name: Scan for vulnerabilities
if: steps.set-target.outputs.image_name != ''
id: grype-scan
run: |
echo "🔍 Scanning SBOM for vulnerabilities..."
grype sbom:sbom.cyclonedx.json -o json > grype-results.json
grype sbom:sbom.cyclonedx.json -o sarif > grype-results.sarif
grype sbom:sbom.cyclonedx.json --config .grype.yaml -o json > grype-results.json
grype sbom:sbom.cyclonedx.json --config .grype.yaml -o sarif > grype-results.sarif
- name: Debug Output Files
if: steps.set-target.outputs.image_name != ''
@@ -362,7 +362,7 @@ jobs:
- name: Upload SARIF to GitHub Security
if: steps.check-artifact.outputs.artifact_found == 'true'
uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
continue-on-error: true
with:
sarif_file: grype-results.sarif
@@ -381,9 +381,12 @@ jobs:
- name: Comment on PR
if: steps.set-target.outputs.image_name != '' && steps.pr-number.outputs.is_push != 'true' && steps.pr-number.outputs.pr_number != ''
continue-on-error: true
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
PR_NUMBER="${{ steps.pr-number.outputs.pr_number }}"
COMPONENT_COUNT="${{ steps.sbom-count.outputs.component_count }}"
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
@@ -429,29 +432,38 @@ jobs:
EOF
)
# Find and update existing comment or create new one
COMMENT_ID=$(gh api \
# Fetch existing comments — skip gracefully on 403 / permission errors
COMMENTS_JSON=""
if ! COMMENTS_JSON=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
--jq '.[] | select(.body | contains("Supply Chain Verification Results")) | .id' | head -1)
"/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" 2>/dev/null); then
echo "⚠️ Cannot access PR comments (likely token permissions / fork / event context). Skipping PR comment."
exit 0
fi
if [[ -n "${COMMENT_ID}" ]]; then
COMMENT_ID=$(echo "${COMMENTS_JSON}" | jq -r '.[] | select(.body | contains("Supply Chain Verification Results")) | .id' | head -1)
if [[ -n "${COMMENT_ID:-}" && "${COMMENT_ID}" != "null" ]]; then
echo "📝 Updating existing comment..."
gh api \
--method PATCH \
if ! gh api --method PATCH \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" \
-f body="${COMMENT_BODY}"
-f body="${COMMENT_BODY}"; then
echo "⚠️ Failed to update comment (permissions?). Skipping."
exit 0
fi
else
echo "📝 Creating new comment..."
gh api \
--method POST \
if ! gh api --method POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
-f body="${COMMENT_BODY}"
-f body="${COMMENT_BODY}"; then
echo "⚠️ Failed to create comment (permissions?). Skipping."
exit 0
fi
fi
echo "✅ PR comment posted"

View File

@@ -119,7 +119,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate and Verify SBOM
if: steps.image-check.outputs.exists == 'true'
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0.23.1
uses: anchore/sbom-action@e22c389904149dbc22b58101806040fa8d37a610 # v0.24.0
with:
image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
format: cyclonedx-json
@@ -144,7 +144,7 @@ jobs:
- name: Upload SBOM Artifact
if: steps.image-check.outputs.exists == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: sbom-${{ steps.tag.outputs.tag }}
path: sbom-verify.cyclonedx.json
@@ -233,7 +233,7 @@ jobs:
# Scan for vulnerabilities using official Anchore action (auto-updated by Renovate)
- name: Scan for Vulnerabilities
if: steps.validate-sbom.outputs.valid == 'true'
uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2
uses: anchore/scan-action@e1165082ffb1fe366ebaf02d8526e7c4989ea9d2 # v7.4.0
id: scan
with:
sbom: sbom-verify.cyclonedx.json
@@ -324,7 +324,7 @@ jobs:
- name: Upload Vulnerability Scan Artifact
if: steps.validate-sbom.outputs.valid == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: vulnerability-scan-${{ steps.tag.outputs.tag }}
path: |
@@ -362,7 +362,7 @@ jobs:
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.event == 'pull_request')
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
result-encoding: string
script: |

View File

@@ -2,7 +2,7 @@ name: Update GeoLite2 Checksum
on:
schedule:
- cron: '0 2 * * 1' # Weekly on Mondays at 2 AM UTC
- cron: '0 2 * * 0' # Weekly on Sundays at 2 AM UTC
workflow_dispatch:
permissions:
@@ -105,7 +105,7 @@ jobs:
- name: Create Pull Request
if: steps.checksum.outputs.needs_update == 'true'
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v8.1.1
with:
title: "chore(docker): update GeoLite2-Country.mmdb checksum"
body: |
@@ -141,7 +141,8 @@ jobs:
---
**Auto-generated by:** `.github/workflows/update-geolite2.yml`
**Trigger:** Scheduled weekly check (Mondays 2 AM UTC)
- **Trigger:** Scheduled weekly check (Sundays 2 AM UTC)
base: development
branch: bot/update-geolite2-checksum
delete-branch: true
commit-message: |
@@ -160,7 +161,7 @@ jobs:
- name: Report failure via GitHub Issue
if: failure()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const errorType = '${{ steps.checksum.outputs.error }}' || 'unknown';
@@ -182,7 +183,7 @@ jobs:
### Workflow Details
- **Run URL:** ${runUrl}
- **Triggered:** ${context.eventName === 'schedule' ? 'Scheduled (weekly)' : 'Manual dispatch'}
- **Triggered:** ${context.eventName === 'schedule' ? 'Scheduled (weekly, Sundays)' : 'Manual dispatch'}
- **Timestamp:** ${new Date().toISOString()}
### Required Actions

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
waf-integration:
name: Coraza WAF Integration
@@ -31,7 +34,7 @@ jobs:
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
docker build -t charon:local --build-arg CI="${CI:-false}" .
echo "✅ Successfully built charon:local"
- name: Run WAF integration tests

View File

@@ -47,7 +47,7 @@ jobs:
steps:
- name: Check Nightly Workflow Status
id: check
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const skipCheck = '${{ inputs.skip_workflow_check }}' === 'true';
@@ -200,8 +200,8 @@ jobs:
runs-on: ubuntu-latest
if: needs.check-nightly-health.outputs.is_healthy == 'true'
outputs:
pr_number: ${{ steps.create-pr.outputs.pr_number }}
pr_url: ${{ steps.create-pr.outputs.pr_url }}
pr_number: ${{ steps.create-pr.outputs.pr_number || steps.existing-pr.outputs.pr_number }}
pr_url: ${{ steps.create-pr.outputs.pr_url || steps.existing-pr.outputs.pr_url }}
skipped: ${{ steps.check-diff.outputs.skipped }}
steps:
@@ -274,7 +274,7 @@ jobs:
- name: Check for Existing PR
id: existing-pr
if: steps.check-diff.outputs.skipped != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const { data: pulls } = await github.rest.pulls.list({
@@ -297,7 +297,7 @@ jobs:
- name: Create Promotion PR
id: create-pr
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const fs = require('fs');
@@ -399,7 +399,7 @@ jobs:
- name: Update Existing PR
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists == 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const prNumber = ${{ steps.existing-pr.outputs.pr_number }};
@@ -425,7 +425,7 @@ jobs:
contents: read
steps:
- name: Dispatch missing required workflows on nightly head
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const owner = context.repo.owner;
@@ -483,7 +483,7 @@ jobs:
steps:
- name: Create Failure Issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const isHealthy = '${{ needs.check-nightly-health.outputs.is_healthy }}';

9
.gitignore vendored
View File

@@ -314,3 +314,12 @@ validation-evidence/**
.github/agents/# Tools Configuration.md
docs/reports/codecove_patch_report.md
vuln-results.json
test_output.txt
coverage_results.txt
final-results.json
new-results.json
scan_output.json
coverage_output.txt
frontend/lint_output.txt
lefthook_out.txt
backend/test_out.txt

View File

@@ -4,136 +4,618 @@
# Documentation: https://github.com/anchore/grype#specifying-matches-to-ignore
ignore:
# CVE-2026-22184: zlib Global Buffer Overflow in untgz utility
# Severity: CRITICAL
# Package: zlib 1.3.1-r2 (Alpine Linux base image)
# Status: No upstream fix available as of 2026-01-16
# CVE-2026-2673: OpenSSL TLS 1.3 server key exchange group downgrade
# Severity: HIGH (CVSS 7.5)
# Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 (Alpine apk)
# Status: No upstream fix available — Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18
#
# Vulnerability Details:
# - Global buffer overflow in TGZfname() function
# - Unbounded strcpy() allows attacker-controlled archive names
# - Can lead to memory corruption, DoS, potential RCE
# - When DEFAULT is in the TLS 1.3 group configuration, the OpenSSL server may select
# a weaker key exchange group than preferred, enabling a limited key exchange downgrade.
# - Only affects systems acting as a raw TLS 1.3 server using OpenSSL's server-side group negotiation.
#
# Risk Assessment: ACCEPTED (Low exploitability in Charon context)
# - Charon does not use untgz utility directly
# - No untrusted tar archive processing in application code
# - Attack surface limited to OS-level utilities
# - Multiple layers of containerization and isolation
# Root Cause (No Fix Available):
# - Alpine upstream has not published a patched libcrypto3/libssl3 for Alpine 3.23.
# - Checked: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18.
# - Fix path: once Alpine publishes a patched libcrypto3/libssl3, rebuild the Docker image
# and remove this suppression.
#
# Mitigation:
# - Monitor Alpine Linux security feed daily for zlib patches
# - Container runs with minimal privileges (no-new-privileges)
# - Read-only filesystem where possible
# - Network isolation via Docker networks
#
# Review:
# - Daily checks for Alpine security updates
# - Automatic re-scan via CI/CD on every commit
# - Manual review scheduled for 2026-01-23 (7 days)
#
# Removal Criteria:
# - Alpine releases zlib 1.3.1-r3 or higher with CVE fix
# - OR upstream zlib project releases patched version
# - Remove this suppression immediately after fix available
#
# References:
# - CVE: https://nvd.nist.gov/vuln/detail/CVE-2026-22184
# - Alpine Security: https://security.alpinelinux.org/
# - GitHub Issue: https://github.com/Wikid82/Charon/issues/TBD
- vulnerability: CVE-2026-22184
package:
name: zlib
version: "1.3.1-r2"
type: apk # Alpine package
reason: |
CRITICAL buffer overflow in untgz utility. No fix available from Alpine
as of 2026-01-16. Risk accepted: Charon does not directly use untgz or
process untrusted tar archives. Attack surface limited to base OS utilities.
Monitoring Alpine security feed for upstream patch.
expiry: "2026-03-14" # Re-evaluate in 7 days
# Action items when this suppression expires:
# 1. Check Alpine security feed: https://security.alpinelinux.org/
# 2. Check zlib releases: https://github.com/madler/zlib/releases
# 3. If fix available: Update Dockerfile, rebuild, remove suppression
# 4. If no fix: Extend expiry by 7 days, document justification
# 5. If extended 3+ times: Escalate to security team for review
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
# Severity: HIGH (CVSS 8.1)
# Package: github.com/slackhq/nebula v1.9.7 (embedded in /usr/bin/caddy)
# Status: Cannot upgrade — smallstep/certificates v0.30.0-rc2 still pins nebula v1.9.x
#
# Vulnerability Details:
# - ECDSA signature malleability allows bypassing certificate blocklists
# - Attacker can forge alternate valid P256 ECDSA signatures for revoked
# certificates (CVSSv3: AV:N/AC:H/PR:L/UI:N/S:U/C:H/I:H/A:N)
# - Only affects configurations using Nebula-based certificate authorities
# (non-default and uncommon in Charon deployments)
#
# Root Cause (Compile-Time Dependency Lock):
# - Caddy is built with caddy-security plugin, which transitively requires
# github.com/smallstep/certificates. That package pins nebula v1.9.x.
# - Checked: smallstep/certificates v0.27.5 → v0.30.0-rc2 all require nebula v1.9.4v1.9.7.
# The nebula v1.10 API removal breaks compilation in the
# authority/provisioner package; xcaddy build fails with upgrade attempted.
# - Dockerfile caddy-builder stage pins nebula@v1.9.7 (Renovate tracked) with
# an inline comment explaining the constraint (Dockerfile line 247).
# - Fix path: once smallstep/certificates releases a version requiring
# nebula v1.10+, remove the pin and this suppression simultaneously.
#
# Risk Assessment: ACCEPTED (Low exploitability in Charon context)
# - Charon uses standard ACME/Let's Encrypt TLS; Nebula VPN PKI is not
# enabled by default and rarely configured in Charon deployments.
# - Exploiting this requires a valid certificate sharing the same issuer as
# a revoked one — an uncommon and targeted attack scenario.
# Risk Assessment: ACCEPTED (No upstream fix; limited exposure in Charon context)
# - Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS 1.3 server.
# - The vulnerability requires the affected application to directly configure TLS 1.3 server
# group negotiation via OpenSSL, which Charon does not do.
# - Container-level isolation reduces the attack surface further.
#
# Mitigation (active while suppression is in effect):
# - Monitor smallstep/certificates releases at https://github.com/smallstep/certificates/releases
# - Weekly CI security rebuild flags any new CVEs in the full image.
# - Renovate annotation in Dockerfile (datasource=go depName=github.com/slackhq/nebula)
# will surface the pin for review when xcaddy build becomes compatible.
# - Monitor Alpine security advisories: https://security.alpinelinux.org/vuln/CVE-2026-2673
# - Weekly CI security rebuild (security-weekly-rebuild.yml) flags any new CVEs in the full image.
#
# Review:
# - Reviewed 2026-02-19: smallstep/certificates latest stable remains v0.27.5;
# no release requiring nebula v1.10+ has shipped. Suppression extended 14 days.
# - Next review: 2026-03-05. Remove suppression immediately once upstream fixes.
# - Reviewed 2026-03-18 (initial suppression): no upstream fix available. Set 30-day review.
# - Extended 2026-04-04: Alpine 3.23 still ships 3.5.5-r0. No upstream fix available.
# - Next review: 2026-05-18. Remove suppression immediately once upstream fixes.
#
# Removal Criteria:
# - smallstep/certificates releases a stable version requiring nebula v1.10+
# - Update Dockerfile caddy-builder patch to use the new versions
# - Rebuild image, run security scan, confirm suppression no longer needed
# - Remove both this entry and the corresponding .trivyignore entry
# - Alpine publishes a patched version of libcrypto3 and libssl3
# - Rebuild Docker image and verify CVE-2026-2673 no longer appears in grype-results.json
# - Remove both these entries and the corresponding .trivyignore entry simultaneously
#
# References:
# - GHSA: https://github.com/advisories/GHSA-69x3-g4r3-p962
# - CVE-2026-2673: https://nvd.nist.gov/vuln/detail/CVE-2026-2673
# - Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-2673
- vulnerability: CVE-2026-2673
package:
name: libcrypto3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL TLS 1.3 server key exchange group downgrade in libcrypto3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libcrypto3 3.5.5-r0 as of 2026-03-18. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server.
Risk accepted pending Alpine upstream patch.
expiry: "2026-05-18" # Extended 2026-04-04: Alpine 3.23 still ships 3.5.5-r0. Next review 2026-05-18.
# Action items when this suppression expires:
# 1. Check Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-2673
# 2. If a patched Alpine package is now available:
# a. Rebuild Docker image without suppression
# b. Run local security-scan-docker-image and confirm CVE is resolved
# c. Remove this suppression entry, the libssl3 entry below, and the .trivyignore entry
# 3. If no fix yet: Extend expiry by 1430 days and update the review comment above
# 4. If extended 3+ times: Open an issue to track the upstream status formally
# CVE-2026-2673 (libssl3) — see full justification in the libcrypto3 entry above
- vulnerability: CVE-2026-2673
package:
name: libssl3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL TLS 1.3 server key exchange group downgrade in libssl3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libssl3 3.5.5-r0 as of 2026-03-18. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server.
Risk accepted pending Alpine upstream patch.
expiry: "2026-05-18" # Extended 2026-04-04: see libcrypto3 entry above for action items.
# CVE-2026-31790: OpenSSL vulnerability in Alpine base image packages
# Severity: HIGH
# Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 (Alpine apk)
# Status: No upstream fix available — Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-04-09
#
# Root Cause (No Fix Available):
# - Alpine upstream has not published a patched libcrypto3/libssl3 for Alpine 3.23.
# - Checked: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-04-09.
# - Fix path: once Alpine publishes a patched libcrypto3/libssl3, rebuild the Docker image
# and remove this suppression.
#
# Risk Assessment: ACCEPTED (No upstream fix; documented in SECURITY.md)
# - Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS server.
# - Container-level isolation reduces the attack surface further.
#
# Mitigation (active while suppression is in effect):
# - Monitor Alpine security advisories: https://security.alpinelinux.org/vuln/CVE-2026-31790
# - Weekly CI security rebuild (security-weekly-rebuild.yml) flags any new CVEs in the full image.
#
# Review:
# - Reviewed 2026-04-09 (initial suppression): no upstream fix available. Set 30-day review.
# - Next review: 2026-05-09. Remove suppression immediately once upstream fixes.
#
# Removal Criteria:
# - Alpine publishes a patched version of libcrypto3 and libssl3
# - Rebuild Docker image and verify CVE-2026-31790 no longer appears in grype-results.json
# - Remove both these entries and the corresponding .trivyignore entry simultaneously
#
# References:
# - CVE-2026-31790: https://nvd.nist.gov/vuln/detail/CVE-2026-31790
# - Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-31790
- vulnerability: CVE-2026-31790
package:
name: libcrypto3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL vulnerability in libcrypto3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libcrypto3 3.5.5-r0 as of 2026-04-09. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS server.
Risk accepted pending Alpine upstream patch. Documented in SECURITY.md.
expiry: "2026-05-09" # Reviewed 2026-04-09: no upstream fix available. Next review 2026-05-09.
# Action items when this suppression expires:
# 1. Check Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-31790
# 2. If a patched Alpine package is now available:
# a. Rebuild Docker image without suppression
# b. Run local security-scan-docker-image and confirm CVE is resolved
# c. Remove this suppression entry, the libssl3 entry below, and the .trivyignore entry
# 3. If no fix yet: Extend expiry by 1430 days and update the review comment above
# 4. If extended 3+ times: Open an issue to track the upstream status formally
# CVE-2026-31790 (libssl3) — see full justification in the libcrypto3 entry above
- vulnerability: CVE-2026-31790
package:
name: libssl3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL vulnerability in libssl3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libssl3 3.5.5-r0 as of 2026-04-09. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS server.
Risk accepted pending Alpine upstream patch. Documented in SECURITY.md.
expiry: "2026-05-09" # Reviewed 2026-04-09: see libcrypto3 entry above for action items.
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
# Severity: HIGH (CVSS 8.1)
# Package: github.com/slackhq/nebula v1.9.7 (embedded in /usr/bin/caddy via smallstep/certificates)
# Status: Fix exists in nebula v1.10.3 — smallstep/certificates cannot compile against v1.10+ APIs
#
# Vulnerability Details:
# - ECDSA signature malleability in nebula allows potential authentication bypass via
# crafted certificate signatures (CWE-347).
# - CVSSv3: AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:N (CVSS 8.1)
#
# Root Cause (Third-Party Binary + Upstream API Incompatibility):
# - Charon does not use nebula directly. The library is compiled into the Caddy binary
# via the caddy-security plugin → smallstep/certificates dependency chain.
# - Nebula v1.10.3 patches the vulnerability but removes legacy APIs that
# smallstep/certificates (through v0.30.2) depends on, causing compile failures.
# - Fix path: once smallstep/certificates releases a version compatible with nebula >= v1.10.3,
# update the Dockerfile and remove this suppression.
#
# Risk Assessment: ACCEPTED (No direct use + upstream API incompatibility blocks fix)
# - Charon does not use Nebula VPN PKI by default. The vulnerable code path is only
# reachable if Nebula-based certificate provisioning is explicitly configured.
# - The attack requires network access and a crafted certificate, which is not part of
# standard Charon deployment.
#
# Mitigation (active while suppression is in effect):
# - Monitor smallstep/certificates releases: https://github.com/smallstep/certificates/releases
# - Monitor nebula releases: https://github.com/slackhq/nebula/releases
# - Weekly CI security rebuild flags the moment a compatible upstream ships.
#
# Review:
# - Reviewed 2026-02-19 (initial suppression in .trivyignore): certificates v0.27.5 pins nebula v1.9.x.
# - Re-evaluated 2026-04-10: nebula v1.10.3 has the fix but certificates (through v0.30.2)
# uses legacy APIs removed in v1.10+. Still blocked. Set 30-day review.
# - Next review: 2026-05-10. Remove suppression once certificates ships with nebula >= v1.10.3.
#
# Removal Criteria:
# - smallstep/certificates releases a version compatible with nebula >= v1.10.3
# - Update Dockerfile nebula pin, rebuild, run security-scan-docker-image, confirm resolved
# - Remove this entry and the corresponding .trivyignore entry simultaneously
#
# References:
# - GHSA-69x3-g4r3-p962: https://github.com/advisories/GHSA-69x3-g4r3-p962
# - CVE-2026-25793: https://nvd.nist.gov/vuln/detail/CVE-2026-25793
# - smallstep/certificates: https://github.com/smallstep/certificates/releases
# - Dockerfile pin: caddy-builder stage, line ~247 (go get nebula@v1.9.7)
- vulnerability: GHSA-69x3-g4r3-p962
# - Nebula releases: https://github.com/slackhq/nebula/releases
# - smallstep/certificates releases: https://github.com/smallstep/certificates/releases
- vulnerability: CVE-2026-25793
package:
name: github.com/slackhq/nebula
version: "v1.9.7"
type: go-module
reason: |
HIGH — ECDSA signature malleability in nebula v1.9.7 embedded in /usr/bin/caddy.
Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19)
still requires nebula v1.9.x (verified across v0.27.5v0.30.0-rc2). Charon does
not use Nebula VPN PKI by default. Risk accepted pending upstream smallstep fix.
Reviewed 2026-02-19: no new smallstep release changes this assessment.
expiry: "2026-03-05" # Re-evaluate in 14 days (2026-02-19 + 14 days)
Fix exists in nebula v1.10.3 but smallstep/certificates (through v0.30.2) uses legacy APIs
removed in v1.10+, causing compile failures. Charon does not use Nebula VPN PKI by default.
Risk accepted; no remediation until smallstep/certificates ships with nebula >= v1.10.3.
Re-evaluated 2026-04-10: still blocked by upstream API incompatibility.
expiry: "2026-05-10" # Re-evaluated 2026-04-10: certificates through v0.30.2 incompatible with nebula v1.10+.
# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS)
# Severity: HIGH (CVSS 7.5)
# Package: github.com/buger/jsonparser v1.1.1 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
# Status: UPSTREAM FIX EXISTS (v1.1.2 released 2026-03-20) — awaiting CrowdSec to update dependency
# NOTE: As of 2026-04-20, grype v0.111.0 with fresh DB no longer flags this finding in the image.
# This suppression is retained as a safety net in case future DB updates re-surface it.
#
# Vulnerability Details:
# - The Delete function fails to validate offsets on malformed JSON input, producing a
# negative slice index and a runtime panic — denial of service (CWE-125).
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H
#
# Root Cause (Third-Party Binary — Fix Exists Upstream, Not Yet in CrowdSec):
# - Charon does not use buger/jsonparser directly. It is compiled into CrowdSec binaries.
# - buger/jsonparser released v1.1.2 on 2026-03-20 fixing issue #275.
# - CrowdSec has not yet released a version built with buger/jsonparser v1.1.2.
# - Fix path: once CrowdSec updates their dependency and rebuilds, rebuild the Docker image
# and remove this suppression.
#
# Risk Assessment: ACCEPTED (Limited exploitability; fix exists upstream but not yet in CrowdSec)
# - The DoS vector requires passing malformed JSON to the vulnerable Delete function within
# CrowdSec's internal processing pipeline; this is not a direct attack surface in Charon.
# - CrowdSec's exposed surface is its HTTP API (not raw JSON stream parsing via this path).
#
# Mitigation (active while suppression is in effect):
# - Monitor CrowdSec releases for a build using buger/jsonparser >= v1.1.2.
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-03-19 (initial suppression): no upstream fix. Set 30-day review.
# - Extended 2026-04-04: no upstream fix. buger/jsonparser issue #275 still open.
# - Updated 2026-04-20: buger/jsonparser v1.1.2 released 2026-03-20. CrowdSec not yet updated.
# Grype v0.111.0 with fresh DB (2026-04-20) no longer flags this finding. Suppression retained
# as a safety net. Next review: 2026-05-19 — remove if CrowdSec ships with v1.1.2+.
#
# Removal Criteria:
# - CrowdSec releases a version built with buger/jsonparser >= v1.1.2
# - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved
# - Remove this entry and the corresponding .trivyignore entry simultaneously
#
# References:
# - GHSA-6g7g-w4f8-9c9x: https://github.com/advisories/GHSA-6g7g-w4f8-9c9x
# - Upstream fix: https://github.com/buger/jsonparser/releases/tag/v1.1.2
# - golang/vulndb: https://github.com/golang/vulndb/issues/4514
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
- vulnerability: GHSA-6g7g-w4f8-9c9x
package:
name: github.com/buger/jsonparser
version: "v1.1.1"
type: go-module
reason: |
HIGH — DoS panic via malformed JSON in buger/jsonparser v1.1.1 embedded in CrowdSec binaries.
Upstream fix: buger/jsonparser v1.1.2 released 2026-03-20; CrowdSec has not yet updated their
dependency. Grype no longer flags this as of 2026-04-20 (fresh DB). Suppression retained as
safety net pending CrowdSec update. Charon does not use this package directly.
Updated 2026-04-20: fix v1.1.2 exists upstream; awaiting CrowdSec dependency update.
expiry: "2026-05-19" # Review 2026-05-19: remove if CrowdSec ships with buger/jsonparser >= v1.1.2.
# Action items when this suppression expires:
# 1. Check smallstep/certificates releases: https://github.com/smallstep/certificates/releases
# 2. If a stable version requires nebula v1.10+:
# a. Update Dockerfile caddy-builder: remove the `go get nebula@v1.9.7` pin
# b. Optionally bump smallstep/certificates to the new version
# c. Rebuild Docker image and verify no compile failures
# d. Re-run local security-scan-docker-image and confirm clean result
# e. Remove this suppression entry
# 3. If no fix yet: Extend expiry by 14 days and document justification
# 4. If extended 3+ times: Open upstream issue on smallstep/certificates
# 1. Check if CrowdSec has released a version with buger/jsonparser >= v1.1.2:
# https://github.com/crowdsecurity/crowdsec/releases
# 2. If CrowdSec has updated: rebuild Docker image, run security-scan-docker-image,
# and remove this suppression entry and the corresponding .trivyignore entry
# 3. If grype still does not flag it with fresh DB: consider removing the suppression as
# it may no longer be necessary
# 4. If no CrowdSec update yet: Extend expiry by 30 days
# GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
# Severity: HIGH (CVSS 7.5)
# Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
# Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5
#
# Vulnerability Details:
# - DataRow.Decode does not validate field lengths; a malicious or compromised PostgreSQL server
# can send a negative field length causing a slice-bounds panic — denial of service (CWE-129).
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H
#
# Root Cause (EOL Module + Third-Party Binary):
# - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package
# is compiled into CrowdSec binaries for their internal database communication.
# - The pgproto3/v2 module is archived and EOL; no fix will be released. The fix path
# is migration to pgx/v5, which embeds an updated pgproto3/v3.
# - Fix path: once CrowdSec migrates to pgx/v5 and releases an updated binary, rebuild
# the Docker image and remove this suppression.
#
# Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path)
# - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite
# internally and does not run PostgreSQL. CrowdSec's database path is not exposed to
# external traffic in a standard Charon deployment.
# - The attack requires a compromised database server, which would imply full host compromise.
#
# Mitigation (active while suppression is in effect):
# - Monitor CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-03-19 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review.
# - Extended 2026-04-04: CrowdSec has not migrated to pgx/v5 yet.
# - Next review: 2026-05-19. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - CrowdSec releases a version with pgx/v5 (pgproto3/v3) replacing pgproto3/v2
# - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved
# - Remove this entry and the corresponding .trivyignore entry simultaneously
#
# References:
# - GHSA-jqcq-xjh3-6g23: https://github.com/advisories/GHSA-jqcq-xjh3-6g23
# - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3
# - pgx/v5 (replacement): https://github.com/jackc/pgx
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
- vulnerability: GHSA-jqcq-xjh3-6g23
package:
name: github.com/jackc/pgproto3/v2
version: "v2.3.3"
type: go-module
reason: |
HIGH — DoS panic via negative field length in pgproto3/v2 v2.3.3 embedded in CrowdSec binaries.
pgproto3/v2 is archived/EOL with no fix planned; fix path requires CrowdSec to migrate to pgx/v5.
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-03-19: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-05-19" # Extended 2026-04-04: no fix path until CrowdSec migrates to pgx/v5.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3`
# Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced)
# 3. If CrowdSec has migrated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this suppression entry and the corresponding .trivyignore entry
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
# GHSA-x6gf-mpr2-68h6 / CVE-2026-4427: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
# Severity: HIGH (CVSS 7.5)
# Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
# Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5
# Note: This is the NVD/Red Hat advisory alias for the same underlying vulnerability as GHSA-jqcq-xjh3-6g23
#
# Vulnerability Details:
# - DataRow.Decode does not validate field lengths; a malicious or compromised PostgreSQL server
# can send a negative field length causing a slice-bounds panic — denial of service (CWE-129).
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H (CVSS 7.5)
#
# Root Cause (EOL Module + Third-Party Binary):
# - Same underlying vulnerability as GHSA-jqcq-xjh3-6g23; tracked separately by NVD/Red Hat as CVE-2026-4427.
# - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package
# is compiled into CrowdSec binaries for their internal database communication.
# - The pgproto3/v2 module is archived and EOL; no fix will be released. The fix path
# is migration to pgx/v5, which embeds an updated pgproto3/v3.
# - Fix path: once CrowdSec migrates to pgx/v5 and releases an updated binary, rebuild
# the Docker image and remove this suppression.
#
# Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path)
# - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite
# internally and does not run PostgreSQL. CrowdSec's database path is not exposed to
# external traffic in a standard Charon deployment.
# - The attack requires a compromised database server, which would imply full host compromise.
#
# Mitigation (active while suppression is in effect):
# - Monitor CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-03-21 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review. Sibling GHSA-jqcq-xjh3-6g23
# was already suppressed; this alias surfaced as a separate Grype match via NVD/Red Hat tracking.
# - Extended 2026-04-04: CrowdSec has not migrated to pgx/v5 yet.
# - Next review: 2026-05-21. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - Same as GHSA-jqcq-xjh3-6g23: CrowdSec releases a version with pgx/v5 replacing pgproto3/v2
# - Rebuild Docker image, run security-scan-docker-image, confirm both advisories are resolved
# - Remove this entry, GHSA-jqcq-xjh3-6g23 entry, and both .trivyignore entries simultaneously
#
# References:
# - GHSA-x6gf-mpr2-68h6: https://github.com/advisories/GHSA-x6gf-mpr2-68h6
# - CVE-2026-4427: https://nvd.nist.gov/vuln/detail/CVE-2026-4427
# - Red Hat: https://access.redhat.com/security/cve/CVE-2026-4427
# - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3
# - pgx/v5 (replacement): https://github.com/jackc/pgx
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
- vulnerability: GHSA-x6gf-mpr2-68h6
package:
name: github.com/jackc/pgproto3/v2
version: "v2.3.3"
type: go-module
reason: |
HIGH — DoS panic via negative field length in pgproto3/v2 v2.3.3 embedded in CrowdSec binaries.
NVD/Red Hat alias (CVE-2026-4427) for the same underlying bug as GHSA-jqcq-xjh3-6g23.
pgproto3/v2 is archived/EOL with no fix planned; fix path requires CrowdSec to migrate to pgx/v5.
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-03-21: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-05-21" # Extended 2026-04-04: no fix path until CrowdSec migrates to pgx/v5.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3`
# Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced)
# 3. If CrowdSec has migrated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this entry, GHSA-jqcq-xjh3-6g23 entry, and both .trivyignore entries
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
# CVE-2026-32286: pgproto3/v2 buffer overflow in DataRow handling (DoS)
# Severity: HIGH (CVSS 7.5)
# Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
# Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5
#
# Vulnerability Details:
# - Buffer overflow in pgproto3/v2 DataRow handling allows a malicious or compromised PostgreSQL
# server to trigger a denial of service via crafted protocol messages (CWE-120).
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H (CVSS 7.5)
#
# Root Cause (EOL Module + Third-Party Binary):
# - Same affected module as GHSA-jqcq-xjh3-6g23 and GHSA-x6gf-mpr2-68h6 — pgproto3/v2 v2.3.3
# is the final release (repository archived Jul 12, 2025). No fix will be released.
# - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package
# is compiled into CrowdSec binaries for their internal database communication.
# - Fix exists only in pgproto3/v3 (used by pgx/v5). CrowdSec v1.7.7 (latest) still depends
# on pgx/v4 → pgproto3/v2. Dockerfile already applies best-effort mitigation (pgx/v4@v4.18.3).
# - Fix path: once CrowdSec migrates to pgx/v5, rebuild the Docker image and remove this suppression.
#
# Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path)
# - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite
# internally and does not run PostgreSQL. CrowdSec's database path is not exposed to
# external traffic in a standard Charon deployment.
# - CrowdSec's PostgreSQL code path is not directly exposed to untrusted network input in
# Charon's deployment.
#
# Mitigation (active while suppression is in effect):
# - Monitor CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-04-10 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 90-day review.
# - Next review: 2026-07-09. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - Same as GHSA-jqcq-xjh3-6g23: CrowdSec releases a version with pgx/v5 replacing pgproto3/v2
# - Rebuild Docker image, run security-scan-docker-image, confirm all pgproto3/v2 advisories are resolved
# - Remove this entry, GHSA-jqcq-xjh3-6g23 entry, GHSA-x6gf-mpr2-68h6 entry, and all .trivyignore entries simultaneously
#
# References:
# - CVE-2026-32286: https://nvd.nist.gov/vuln/detail/CVE-2026-32286
# - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3
# - pgx/v5 (replacement): https://github.com/jackc/pgx
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
- vulnerability: CVE-2026-32286
package:
name: github.com/jackc/pgproto3/v2
version: "v2.3.3"
type: go-module
reason: |
HIGH — Buffer overflow in pgproto3/v2 v2.3.3 DataRow handling, embedded in CrowdSec binaries.
pgproto3/v2 v2.3.3 is the final release (archived Jul 2025); no fix will be released.
Fix exists only in pgproto3/v3 (pgx/v5). CrowdSec v1.7.7 still depends on pgx/v4 → pgproto3/v2.
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-04-10: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-07-09" # Reviewed 2026-04-10: no fix path until CrowdSec migrates to pgx/v5. 90-day expiry.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3`
# Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced)
# 3. If CrowdSec has migrated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this entry, GHSA-jqcq-xjh3-6g23 entry, GHSA-x6gf-mpr2-68h6 entry, and all .trivyignore entries
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
# GHSA-pxq6-2prw-chj9 / CVE-2026-33997: Moby off-by-one error in plugin privilege validation
# Severity: MEDIUM (CVSS 6.8)
# Package: github.com/docker/docker v28.5.2+incompatible (go-module)
# Status: Fixed in moby/moby v29.3.1 — NO fix available for docker/docker import path
#
# Vulnerability Details:
# - Off-by-one error in Moby's plugin privilege validation allows potential privilege escalation
# via crafted plugin configurations.
#
# Root Cause (No Fix Available for Import Path):
# - Same import path issue as CVE-2026-34040. The fix exists in moby/moby v29.3.1 but not
# for the docker/docker import path that Charon uses.
# - Fix path: same dependency migration pattern as CVE-2026-34040 (if needed) or upstream fix.
#
# Risk Assessment: ACCEPTED (Not exploitable in Charon context)
# - Charon uses the Docker client SDK only (list containers). The vulnerability is in Docker's
# plugin privilege validation, which is server-side functionality.
# - Charon does not run a Docker daemon, install Docker plugins, or interact with plugin privileges.
#
# Mitigation (active while suppression is in effect):
# - Monitor docker/docker releases: https://github.com/moby/moby/releases
# - Weekly CI security rebuild flags the moment a fixed version ships.
#
# Review:
# - Reviewed 2026-03-30 (initial suppression): no fix for docker/docker import path. Set 30-day review.
# - Next review: 2026-04-30. Remove suppression once a fix is available for the docker/docker import path.
#
# Removal Criteria:
# - docker/docker publishes a patched version OR moby/moby/v2 stabilizes
# - Update dependency, rebuild, run security-scan-docker-image, confirm finding is resolved
# - Remove this entry and all corresponding .trivyignore entries simultaneously
#
# References:
# - GHSA-pxq6-2prw-chj9: https://github.com/advisories/GHSA-pxq6-2prw-chj9
# - CVE-2026-33997: https://nvd.nist.gov/vuln/detail/CVE-2026-33997
# - moby/moby releases: https://github.com/moby/moby/releases
- vulnerability: GHSA-pxq6-2prw-chj9
package:
name: github.com/docker/docker
version: "v28.5.2+incompatible"
type: go-module
reason: |
MEDIUM — Off-by-one error in Moby plugin privilege validation in docker/docker v28.5.2+incompatible.
Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
Charon uses Docker client SDK only (list containers); the vulnerability is in Docker's server-side
plugin privilege validation. Charon does not run a Docker daemon or install Docker plugins.
Risk accepted; no remediation path until docker/docker publishes a fix or moby/moby/v2 stabilizes.
Reviewed 2026-03-30: no patched release available for docker/docker import path.
expiry: "2026-04-30" # 30-day review: no fix for docker/docker import path. Extend in 30-day increments with documented justification.
# Action items when this suppression expires:
# 1. Check docker/docker and moby/moby releases: https://github.com/moby/moby/releases
# 2. Check if moby/moby/v2 has stabilized: https://github.com/moby/moby
# 3. If a fix has shipped for docker/docker import path OR moby/moby/v2 is stable:
# a. Update the dependency and rebuild Docker image
# b. Run local security-scan-docker-image and confirm finding is resolved
# c. Remove this entry and all corresponding .trivyignore entries
# 4. If no fix yet: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an issue to track moby/moby/v2 migration feasibility
# GHSA-78h2-9frx-2jm8: go-jose JWE decryption panic (DoS)
# Severity: HIGH
# Packages: github.com/go-jose/go-jose/v3 v3.0.4 and github.com/go-jose/go-jose/v4 v4.1.3
# (embedded in /usr/bin/caddy)
# Status: Fix available in go-jose/v3 v3.0.5 and go-jose/v4 v4.1.4 — requires upstream Caddy rebuild
#
# Vulnerability Details:
# - JWE decryption can trigger a panic due to improper input validation, causing
# a denial-of-service condition (runtime crash).
#
# Root Cause (Third-Party Binary):
# - Charon does not use go-jose directly. The library is compiled into the Caddy binary
# shipped in the Docker image.
# - Fixes are available upstream (v3.0.5 and v4.1.4) but require a Caddy rebuild to pick up.
# - Fix path: once the upstream Caddy release includes the patched go-jose versions,
# rebuild the Docker image and remove these suppressions.
#
# Risk Assessment: ACCEPTED (No direct use + fix requires upstream rebuild)
# - Charon does not import or call go-jose functions; the library is only present as a
# transitive dependency inside the Caddy binary.
# - The attack vector requires crafted JWE input reaching Caddy's internal JWT handling,
# which is limited to authenticated admin-API paths not exposed in Charon deployments.
#
# Mitigation (active while suppression is in effect):
# - Monitor Caddy releases: https://github.com/caddyserver/caddy/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-04-05 (initial suppression): fix available upstream but not yet in Caddy release.
# Set 30-day review.
# - Next review: 2026-05-05. Remove suppression once Caddy ships with patched go-jose.
#
# Removal Criteria:
# - Caddy releases a version built with go-jose/v3 >= v3.0.5 and go-jose/v4 >= v4.1.4
# - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved
# - Remove both entries (v3 and v4) and any corresponding .trivyignore entries simultaneously
#
# References:
# - GHSA-78h2-9frx-2jm8: https://github.com/advisories/GHSA-78h2-9frx-2jm8
# - go-jose releases: https://github.com/go-jose/go-jose/releases
# - Caddy releases: https://github.com/caddyserver/caddy/releases
- vulnerability: GHSA-78h2-9frx-2jm8
package:
name: github.com/go-jose/go-jose/v3
version: "v3.0.4"
type: go-module
reason: |
HIGH — JWE decryption panic in go-jose v3.0.4 embedded in /usr/bin/caddy.
Fix available in v3.0.5 but requires upstream Caddy rebuild. Charon does not use go-jose
directly. Deferring to next Caddy release.
expiry: "2026-05-05" # 30-day review: remove once Caddy ships with go-jose/v3 >= v3.0.5.
# Action items when this suppression expires:
# 1. Check Caddy releases: https://github.com/caddyserver/caddy/releases
# 2. Verify with: `go version -m /usr/bin/caddy | grep go-jose`
# Expected: go-jose/v3 >= v3.0.5
# 3. If Caddy has updated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this entry, the v4 entry below, and any corresponding .trivyignore entries
# 4. If not yet updated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on caddyserver/caddy requesting go-jose update
# GHSA-78h2-9frx-2jm8 (go-jose/v4) — see full justification in the go-jose/v3 entry above
- vulnerability: GHSA-78h2-9frx-2jm8
package:
name: github.com/go-jose/go-jose/v4
version: "v4.1.3"
type: go-module
reason: |
HIGH — JWE decryption panic in go-jose v4.1.3 embedded in /usr/bin/caddy.
Fix available in v4.1.4 but requires upstream Caddy rebuild. Charon does not use go-jose
directly. Deferring to next Caddy release.
expiry: "2026-05-05" # 30-day review: see go-jose/v3 entry above for action items.
# Match exclusions (patterns to ignore during scanning)
# Use sparingly - prefer specific CVE suppressions above

View File

@@ -3,14 +3,103 @@ playwright/.auth/
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
# Severity: HIGH (CVSS 8.1) — Package: github.com/slackhq/nebula v1.9.7 in /usr/bin/caddy
# Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19) still pins nebula v1.9.x.
# Charon does not use Nebula VPN PKI by default. Review by: 2026-03-05
# Fix exists in nebula v1.10.3, but smallstep/certificates (through v0.30.2) uses legacy nebula
# APIs removed in v1.10+, causing compile failures. Waiting on certificates upstream update.
# Charon does not use Nebula VPN PKI by default. Review by: 2026-05-10
# See also: .grype.yaml for full justification
# exp: 2026-05-10
CVE-2026-25793
# CVE-2026-22184: zlib Global Buffer Overflow in untgz utility
# Severity: CRITICAL (CVSS 9.8) — Package: zlib 1.3.1-r2 in Alpine base image
# No upstream fix available: Alpine 3.23 (including edge) still ships zlib 1.3.1-r2.
# Charon does not use untgz or process untrusted tar archives. Review by: 2026-03-14
# CVE-2026-27171: zlib CPU spin via crc32_combine64 infinite loop (DoS)
# Severity: MEDIUM (CVSS 5.5 NVD / 2.9 MITRE) — Package: zlib 1.3.1-r2 in Alpine base image
# Fix requires zlib >= 1.3.2. No upstream fix available: Alpine 3.23 still ships zlib 1.3.1-r2.
# Attack requires local access (AV:L); the vulnerable code path is not reachable via Charon's
# network-facing surface. Non-blocking by CI policy (MEDIUM). Review by: 2026-05-21
# exp: 2026-05-21
CVE-2026-27171
# CVE-2026-2673: OpenSSL TLS 1.3 server key exchange group downgrade (libcrypto3/libssl3)
# Severity: HIGH (CVSS 7.5) — Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 in Alpine base image
# No upstream fix available: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18.
# When DEFAULT is in TLS 1.3 group config, server may select a weaker key exchange group.
# Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS 1.3 server.
# Review by: 2026-05-18
# See also: .grype.yaml for full justification
CVE-2026-22184
# exp: 2026-05-18
CVE-2026-2673
# CVE-2026-33186 / GHSA-p77j-4mvh-x3m3: gRPC-Go authorization bypass via missing leading slash
# Severity: CRITICAL (CVSS 9.1) — Package: google.golang.org/grpc, embedded in CrowdSec (v1.74.2) and Caddy (v1.79.1)
# Fix exists at v1.79.3 — Charon's own dep is patched. Waiting on CrowdSec and Caddy upstream releases.
# CrowdSec's and Caddy's grpc servers are not exposed externally in a standard Charon deployment.
# Suppressed for CrowdSec/Caddy embedded binaries only — Charon's direct deps are fixed (v1.79.3).
# Review by: 2026-05-04
# See also: .grype.yaml for full justification
# exp: 2026-05-04
CVE-2026-33186
# GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture)
# Severity: HIGH (CVSS 7.5) — Package: github.com/russellhaering/goxmldsig v1.5.0, embedded in /usr/bin/caddy
# Fix exists at v1.6.0 — waiting on Caddy upstream (or caddy-security plugin) to release with patched goxmldsig.
# Charon does not configure SAML-based SSO by default; the vulnerable path is not reachable in a standard deployment.
# Awaiting Caddy upstream update to include goxmldsig v1.6.0.
# Review by: 2026-05-04
# See also: .grype.yaml for full justification
# exp: 2026-05-04
GHSA-479m-364c-43vc
# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/buger/jsonparser v1.1.1, embedded in CrowdSec binaries
# No upstream fix available as of 2026-03-19 (issue #275 open, golang/vulndb #4514 open).
# Charon does not use this package; the vector requires reaching CrowdSec's internal processing pipeline.
# Review by: 2026-05-19
# See also: .grype.yaml for full justification
# exp: 2026-05-19
GHSA-6g7g-w4f8-9c9x
# GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries
# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5.
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-05-19
# See also: .grype.yaml for full justification
# exp: 2026-05-19
GHSA-jqcq-xjh3-6g23
# GHSA-x6gf-mpr2-68h6 / CVE-2026-4427: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries
# NVD/Red Hat alias (CVE-2026-4427) for the same underlying bug as GHSA-jqcq-xjh3-6g23.
# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5.
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-05-21
# See also: .grype.yaml for full justification
# exp: 2026-05-21
GHSA-x6gf-mpr2-68h6
# CVE-2026-32286: pgproto3/v2 buffer overflow in DataRow handling (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries
# pgproto3/v2 v2.3.3 is the final release — repository archived Jul 12, 2025. No fix will be released.
# Fix exists only in pgproto3/v3 (used by pgx/v5). CrowdSec v1.7.7 (latest) still depends on pgx/v4 → pgproto3/v2.
# Dockerfile already applies best-effort mitigation (pgx/v4@v4.18.3).
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-07-09
# See also: .grype.yaml for full justification
# exp: 2026-07-09
CVE-2026-32286
# CVE-2026-33997 / GHSA-pxq6-2prw-chj9: Moby off-by-one error in plugin privilege validation
# Severity: MEDIUM (CVSS 6.8) — Package: github.com/docker/docker v28.5.2+incompatible
# Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
# Charon uses Docker client SDK only (list containers); plugin privilege validation is server-side.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
CVE-2026-33997
# GHSA-pxq6-2prw-chj9: Moby off-by-one error in plugin privilege validation (GHSA alias)
# Severity: MEDIUM (CVSS 6.8) — Package: github.com/docker/docker v28.5.2+incompatible
# GHSA alias for CVE-2026-33997. See CVE-2026-33997 entry above for full details.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
GHSA-pxq6-2prw-chj9

View File

@@ -1 +1 @@
v0.21.0
v0.27.0

View File

@@ -139,15 +139,15 @@ graph TB
| Component | Technology | Version | Purpose |
|-----------|-----------|---------|---------|
| **Framework** | React | 19.2.3 | UI framework |
| **Language** | TypeScript | 5.x | Type-safe JavaScript |
| **Build Tool** | Vite | 6.1.9 | Fast bundler and dev server |
| **CSS Framework** | Tailwind CSS | 3.x | Utility-first CSS |
| **Language** | TypeScript | 6.x | Type-safe JavaScript |
| **Build Tool** | Vite | 8.0.0-beta.18 | Fast bundler and dev server |
| **CSS Framework** | Tailwind CSS | 4.2.1 | Utility-first CSS |
| **Routing** | React Router | 7.x | Client-side routing |
| **HTTP Client** | Fetch API | Native | API communication |
| **State Management** | React Hooks + Context | Native | Global state |
| **Internationalization** | i18next | Latest | 5 language support |
| **Unit Testing** | Vitest | 2.x | Fast unit test runner |
| **E2E Testing** | Playwright | 1.50.x | Browser automation |
| **Unit Testing** | Vitest | 4.1.0-beta.6 | Fast unit test runner |
| **E2E Testing** | Playwright | 1.58.2 | Browser automation |
### Infrastructure
@@ -218,7 +218,7 @@ graph TB
│ │ └── main.tsx # Application entry point
│ ├── public/ # Static assets
│ ├── package.json # NPM dependencies
│ └── vite.config.js # Vite configuration
│ └── vite.config.ts # Vite configuration
├── .docker/ # Docker configuration
│ ├── compose/ # Docker Compose files
@@ -306,11 +306,13 @@ graph TB
**Key Modules:**
#### API Layer (`internal/api/`)
- **Handlers:** Process HTTP requests, validate input, return responses
- **Middleware:** CORS, GZIP, authentication, logging, metrics, panic recovery
- **Routes:** Route registration and grouping (public vs authenticated)
**Example Endpoints:**
- `GET /api/v1/proxy-hosts` - List all proxy hosts
- `POST /api/v1/proxy-hosts` - Create new proxy host
- `PUT /api/v1/proxy-hosts/:id` - Update proxy host
@@ -318,6 +320,7 @@ graph TB
- `WS /api/v1/logs` - WebSocket for real-time logs
#### Service Layer (`internal/services/`)
- **ProxyService:** CRUD operations for proxy hosts, validation logic
- **CertificateService:** ACME certificate provisioning and renewal
- **DockerService:** Container discovery and monitoring
@@ -327,12 +330,14 @@ graph TB
**Design Pattern:** Services contain business logic and call multiple repositories/managers
#### Caddy Manager (`internal/caddy/`)
- **Manager:** Orchestrates Caddy configuration updates
- **Config Builder:** Generates Caddy JSON from database models
- **Reload Logic:** Atomic config application with rollback on failure
- **Security Integration:** Injects Cerberus middleware into Caddy pipelines
**Responsibilities:**
1. Generate Caddy JSON configuration from database state
2. Validate configuration before applying
3. Trigger Caddy reload via JSON API
@@ -340,22 +345,26 @@ graph TB
5. Integrate security layers (WAF, ACL, Rate Limiting)
#### Security Suite (`internal/cerberus/`)
- **ACL (Access Control Lists):** IP-based allow/deny rules, GeoIP blocking
- **WAF (Web Application Firewall):** Coraza engine with OWASP CRS
- **CrowdSec:** Behavior-based threat detection with global intelligence
- **Rate Limiter:** Per-IP request throttling
**Integration Points:**
- Middleware injection into Caddy request pipeline
- Database-driven rule configuration
- Metrics collection for security events
#### Database Layer (`internal/database/`)
- **Migrations:** Automatic schema versioning with GORM AutoMigrate
- **Seeding:** Default settings and admin user creation
- **Connection Management:** SQLite with WAL mode and connection pooling
**Schema Overview:**
- **ProxyHost:** Domain, upstream target, SSL config
- **RemoteServer:** Upstream server definitions
- **CaddyConfig:** Generated Caddy configuration (audit trail)
@@ -372,6 +381,7 @@ graph TB
**Component Architecture:**
#### Pages (`src/pages/`)
- **Dashboard:** System overview, recent activity, quick actions
- **ProxyHosts:** List, create, edit, delete proxy configurations
- **Certificates:** Manage SSL/TLS certificates, view expiry
@@ -380,17 +390,20 @@ graph TB
- **Users:** User management (admin only)
#### Components (`src/components/`)
- **Forms:** Reusable form inputs with validation
- **Modals:** Dialog components for CRUD operations
- **Tables:** Data tables with sorting, filtering, pagination
- **Layout:** Header, sidebar, navigation
#### API Client (`src/api/`)
- Centralized API calls with error handling
- Request/response type definitions
- Authentication token management
**Example:**
```typescript
export const getProxyHosts = async (): Promise<ProxyHost[]> => {
const response = await fetch('/api/v1/proxy-hosts', {
@@ -402,11 +415,13 @@ export const getProxyHosts = async (): Promise<ProxyHost[]> => {
```
#### State Management
- **React Context:** Global state for auth, theme, language
- **Local State:** Component-specific state with `useState`
- **Custom Hooks:** Encapsulate API calls and side effects
**Example Hook:**
```typescript
export const useProxyHosts = () => {
const [hosts, setHosts] = useState<ProxyHost[]>([]);
@@ -425,11 +440,13 @@ export const useProxyHosts = () => {
**Purpose:** High-performance reverse proxy with automatic HTTPS
**Integration:**
- Embedded as a library in the Go backend
- Configured via JSON API (not Caddyfile)
- Listens on ports 80 (HTTP) and 443 (HTTPS)
**Features Used:**
- Dynamic configuration updates without restarts
- Automatic HTTPS with Let's Encrypt and ZeroSSL
- DNS challenge support for wildcard certificates
@@ -437,6 +454,7 @@ export const useProxyHosts = () => {
- Request logging and metrics
**Configuration Flow:**
1. User creates proxy host via frontend
2. Backend validates and saves to database
3. Caddy Manager generates JSON configuration
@@ -461,12 +479,14 @@ For each proxy host, Charon generates **two routes** with the same domain:
- Handlers: Full Cerberus security suite
This pattern is **intentional and valid**:
- Emergency route provides break-glass access to security controls
- Main route protects application with enterprise security features
- Caddy processes routes in order (emergency matches first)
- Validator allows duplicate hosts when one has paths and one doesn't
**Example:**
```json
// Emergency Route (evaluated first)
{
@@ -488,6 +508,7 @@ This pattern is **intentional and valid**:
**Purpose:** Persistent data storage
**Why SQLite:**
- Embedded (no external database server)
- Serverless (perfect for single-user/small team)
- ACID compliant with WAL mode
@@ -495,16 +516,19 @@ This pattern is **intentional and valid**:
- Backup-friendly (single file)
**Configuration:**
- **WAL Mode:** Allows concurrent reads during writes
- **Foreign Keys:** Enforced referential integrity
- **Pragma Settings:** Performance optimizations
**Backup Strategy:**
- Automated daily backups to `data/backups/`
- Retention: 7 daily, 4 weekly, 12 monthly backups
- Backup during low-traffic periods
**Migrations:**
- GORM AutoMigrate for schema changes
- Manual migrations for complex data transformations
- Rollback support via backup restoration
@@ -537,6 +561,7 @@ graph LR
**Purpose:** Prevent brute-force attacks and API abuse
**Implementation:**
- Per-IP request counters with sliding window
- Configurable thresholds (e.g., 100 req/min, 1000 req/hour)
- HTTP 429 response when limit exceeded
@@ -547,12 +572,15 @@ graph LR
**Purpose:** Behavior-based threat detection
**Features:**
- Local log analysis (brute-force, port scans, exploits)
- Global threat intelligence (crowd-sourced IP reputation)
- Automatic IP banning with configurable duration
- Decision management API (view, create, delete bans)
- IP whitelist management: operators add/remove IPs and CIDRs via the management UI; entries are persisted in SQLite and regenerated into a `crowdsecurity/whitelists` parser YAML on every mutating operation and at startup
**Modes:**
- **Local Only:** No external API calls
- **API Mode:** Sync with CrowdSec cloud for global intelligence
@@ -561,12 +589,14 @@ graph LR
**Purpose:** IP-based access control
**Features:**
- Per-proxy-host allow/deny rules
- CIDR range support (e.g., `192.168.1.0/24`)
- Geographic blocking via GeoIP2 (MaxMind)
- Admin whitelist (emergency access)
**Evaluation Order:**
1. Check admin whitelist (always allow)
2. Check deny list (explicit block)
3. Check allow list (explicit allow)
@@ -579,6 +609,7 @@ graph LR
**Engine:** Coraza with OWASP Core Rule Set (CRS)
**Detection Categories:**
- SQL Injection (SQLi)
- Cross-Site Scripting (XSS)
- Remote Code Execution (RCE)
@@ -587,12 +618,14 @@ graph LR
- Command Injection
**Modes:**
- **Monitor:** Log but don't block (testing)
- **Block:** Return HTTP 403 for violations
### Layer 5: Application Security
**Additional Protections:**
- **SSRF Prevention:** Block requests to private IP ranges in webhooks/URL validation
- **HTTP Security Headers:** CSP, HSTS, X-Frame-Options, X-Content-Type-Options
- **Input Validation:** Server-side validation for all user inputs
@@ -610,6 +643,7 @@ graph LR
3. **Direct Database Access:** Manual SQLite update as last resort
**Emergency Token:**
- 64-character hex token set via `CHARON_EMERGENCY_TOKEN`
- Grants temporary admin access
- Rotated after each use
@@ -635,6 +669,7 @@ Charon operates with **two distinct traffic flows** on separate ports, each with
- **Testing:** Playwright E2E tests verify UI/UX functionality on this port
**Why No Middleware?**
- Management interface must remain accessible even when security modules are misconfigured
- Emergency endpoints (`/api/v1/emergency/*`) require unrestricted access for system recovery
- Separation of concerns: admin access control is handled by JWT, not proxy-level security
@@ -797,6 +832,7 @@ sequenceDiagram
**Rationale:** Simplicity over scalability - target audience is home users and small teams
**Container Contents:**
- Frontend static files (Vite build output)
- Go backend binary
- Embedded Caddy server
@@ -911,11 +947,13 @@ services:
### High Availability Considerations
**Current Limitations:**
- SQLite does not support clustering
- Single point of failure (one container)
- Not designed for horizontal scaling
**Future Options:**
- PostgreSQL backend for HA deployments
- Read replicas for load balancing
- Container orchestration (Kubernetes, Docker Swarm)
@@ -927,6 +965,7 @@ services:
### Local Development Setup
1. **Prerequisites:**
```bash
- Go 1.26+ (backend development)
- Node.js 23+ and npm (frontend development)
@@ -935,12 +974,14 @@ services:
```
2. **Clone Repository:**
```bash
git clone https://github.com/Wikid82/Charon.git
cd Charon
```
3. **Backend Development:**
```bash
cd backend
go mod download
@@ -949,6 +990,7 @@ services:
```
4. **Frontend Development:**
```bash
cd frontend
npm install
@@ -957,6 +999,7 @@ services:
```
5. **Full-Stack Development (Docker):**
```bash
docker-compose -f .docker/compose/docker-compose.dev.yml up
# Frontend + Backend + Caddy in one container
@@ -965,12 +1008,14 @@ services:
### Git Workflow
**Branch Strategy:**
- `main`: Stable production branch
- `feature/*`: New feature development
- `fix/*`: Bug fixes
- `chore/*`: Maintenance tasks
**Commit Convention:**
- `feat:` New user-facing feature
- `fix:` Bug fix in application code
- `chore:` Infrastructure, CI/CD, dependencies
@@ -979,6 +1024,7 @@ services:
- `test:` Adding or updating tests
**Example:**
```
feat: add DNS-01 challenge support for Cloudflare
@@ -1031,6 +1077,7 @@ Closes #123
**Purpose:** Validate critical user flows in a real browser
**Scope:**
- User authentication
- Proxy host CRUD operations
- Certificate provisioning
@@ -1038,6 +1085,7 @@ Closes #123
- Real-time log streaming
**Execution:**
```bash
# Run against Docker container
npx playwright test --project=chromium
@@ -1050,10 +1098,12 @@ npx playwright test --debug
```
**Coverage Modes:**
- **Docker Mode:** Integration testing, no coverage (0% reported)
- **Vite Dev Mode:** Coverage collection with V8 inspector
**Why Two Modes?**
- Playwright coverage requires source maps and raw source files
- Docker serves pre-built production files (no source maps)
- Vite dev server exposes source files for coverage instrumentation
@@ -1067,6 +1117,7 @@ npx playwright test --debug
**Coverage Target:** 85% minimum
**Execution:**
```bash
# Run all tests
go test ./...
@@ -1079,11 +1130,13 @@ go test -cover ./...
```
**Test Organization:**
- `*_test.go` files alongside source code
- Table-driven tests for comprehensive coverage
- Mocks for external dependencies (database, HTTP clients)
**Example:**
```go
func TestCreateProxyHost(t *testing.T) {
tests := []struct {
@@ -1123,6 +1176,7 @@ func TestCreateProxyHost(t *testing.T) {
**Coverage Target:** 85% minimum
**Execution:**
```bash
# Run all tests
npm test
@@ -1135,6 +1189,7 @@ npm run test:coverage
```
**Test Organization:**
- `*.test.tsx` files alongside components
- Mock API calls with MSW (Mock Service Worker)
- Snapshot tests for UI consistency
@@ -1146,12 +1201,14 @@ npm run test:coverage
**Location:** `backend/integration/`
**Scope:**
- API endpoint end-to-end flows
- Database migrations
- Caddy manager integration
- CrowdSec API calls
**Execution:**
```bash
go test ./integration/...
```
@@ -1161,6 +1218,7 @@ go test ./integration/...
**Automated Hooks (via `.pre-commit-config.yaml`):**
**Fast Stage (< 5 seconds):**
- Trailing whitespace removal
- EOF fixer
- YAML syntax check
@@ -1168,11 +1226,13 @@ go test ./integration/...
- Markdown link validation
**Manual Stage (run explicitly):**
- Backend coverage tests (60-90s)
- Frontend coverage tests (30-60s)
- TypeScript type checking (10-20s)
**Why Manual?**
- Coverage tests are slow and would block commits
- Developers run them on-demand before pushing
- CI enforces coverage on pull requests
@@ -1180,10 +1240,12 @@ go test ./integration/...
### Continuous Integration (GitHub Actions)
**Workflow Triggers:**
- `push` to `main`, `feature/*`, `fix/*`
- `pull_request` to `main`
**CI Jobs:**
1. **Lint:** golangci-lint, ESLint, markdownlint, hadolint
2. **Test:** Go tests, Vitest, Playwright
3. **Security:** Trivy, CodeQL, Grype, Govulncheck
@@ -1205,6 +1267,7 @@ go test ./integration/...
- **PRERELEASE:** `-beta.1`, `-rc.1`, etc.
**Examples:**
- `1.0.0` - Stable release
- `1.1.0` - New feature (DNS provider support)
- `1.1.1` - Bug fix (GORM query fix)
@@ -1215,12 +1278,14 @@ go test ./integration/...
### Build Pipeline (Multi-Platform)
**Platforms Supported:**
- `linux/amd64`
- `linux/arm64`
**Build Process:**
1. **Frontend Build:**
```bash
cd frontend
npm ci --only=production
@@ -1229,6 +1294,7 @@ go test ./integration/...
```
2. **Backend Build:**
```bash
cd backend
go build -o charon cmd/api/main.go
@@ -1236,6 +1302,7 @@ go test ./integration/...
```
3. **Docker Image Build:**
```bash
docker buildx build \
--platform linux/amd64,linux/arm64 \
@@ -1292,6 +1359,7 @@ go test ./integration/...
- Level: SLSA Build L3 (hermetic builds)
**Verification Example:**
```bash
# Verify image signature
cosign verify \
@@ -1309,6 +1377,7 @@ grype ghcr.io/wikid82/charon@sha256:<index-digest>
### Rollback Strategy
**Container Rollback:**
```bash
# List available versions
docker images wikid82/charon
@@ -1319,6 +1388,7 @@ docker-compose up -d --pull always wikid82/charon:1.1.1
```
**Database Rollback:**
```bash
# Restore from backup
docker exec charon /app/scripts/restore-backup.sh \
@@ -1355,11 +1425,13 @@ docker exec charon /app/scripts/restore-backup.sh \
### API Extensibility
**REST API Design:**
- Version prefix: `/api/v1/`
- Future versions: `/api/v2/` (backward-compatible)
- Deprecation policy: 2 major versions supported
**WebHooks (Future):**
- Event notifications for external systems
- Triggers: Proxy host created, certificate renewed, security event
- Payload: JSON with event type and data
@@ -1369,6 +1441,7 @@ docker exec charon /app/scripts/restore-backup.sh \
**Current:** Cerberus security middleware injected into Caddy pipeline
**Future:**
- User-defined middleware (rate limiting rules, custom headers)
- JavaScript/Lua scripting for request transformation
- Plugin marketplace for community contributions
@@ -1452,6 +1525,7 @@ docker exec charon /app/scripts/restore-backup.sh \
**GitHub Copilot Instructions:**
All agents (`Planning`, `Backend_Dev`, `Frontend_Dev`, `DevOps`) must reference `ARCHITECTURE.md` when:
- Creating new components
- Modifying core systems
- Changing integration points

View File

@@ -7,17 +7,55 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Added
- **CrowdSec Dashboard**: Visual analytics for CrowdSec security data within the Security section
- Summary cards showing total bans, active bans, unique IPs, and top scenario
- Interactive charts: ban timeline (area), top attacking IPs (bar), scenario breakdown (pie)
- Configurable time range selector (1h, 6h, 24h, 7d, 30d)
- Active decisions table with IP, scenario, duration, type, and time remaining
- Alerts feed with pagination sourced from CrowdSec LAPI
- CSV and JSON export for decisions data
- Server-side caching (3060s TTL) for fast dashboard loads
- Full i18n support across all 5 locales (en, de, fr, es, zh)
- Keyboard navigable, screen-reader compatible (WCAG 2.2 AA)
- **Notifications:** Added Ntfy notification provider with support for self-hosted and cloud instances, optional Bearer token authentication, and JSON template customization
- **Certificate Deletion**: Clean up expired and unused certificates directly from the Certificates page
- Expired Let's Encrypt certificates not attached to any proxy host can now be deleted
- Custom and staging certificates remain deletable when not in use
- In-use certificates show a disabled delete button with a tooltip explaining why
- Native browser confirmation replaced with an accessible, themed confirmation dialog
- **Pushover Notification Provider**: Send push notifications to your devices via the Pushover app
- Supports JSON templates (minimal, detailed, custom)
- Application API Token stored securely — never exposed in API responses
- User Key stored in the URL field, following the same pattern as Telegram
- Feature flag: `feature.notifications.service.pushover.enabled` (on by default)
- Emergency priority (2) is intentionally unsupported — deferred to a future release
- **Slack Notification Provider**: Send alerts to Slack channels via Incoming Webhooks
- Supports JSON templates (minimal, detailed, custom) with Slack's native `text` format
- Webhook URL stored securely — never exposed in API responses
- Optional channel display name for easy identification in provider list
- Feature flag: `feature.notifications.service.slack.enabled` (on by default)
- See [Notification Guide](docs/features/notifications.md) for setup instructions
### CI/CD
- **Supply Chain**: Optimized verification workflow to prevent redundant builds
- Change: Removed direct Push/PR triggers; now waits for 'Docker Build' via `workflow_run`
### Security
- **Supply Chain**: Enhanced PR verification workflow stability and accuracy
- **Vulnerability Reporting**: Eliminated false negatives ("0 vulnerabilities") by enforcing strict failure conditions
- **Tooling**: Switched to manual Grype installation ensuring usage of latest stable binary
- **Observability**: Improved debugging visibility for vulnerability scans and SARIF generation
### Performance
- **E2E Tests**: Reduced feature flag API calls by 90% through conditional polling optimization (Phase 2)
- Conditional skip: Exits immediately if flags already in expected state (~50% of cases)
- Request coalescing: Shares in-flight API requests between parallel test workers
@@ -29,6 +67,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Prevents timeout errors in Firefox/WebKit caused by strict label matching
### Fixed
- **Notifications:** Fixed Pushover token-clearing bug where tokens were silently stripped on provider create/update
- **TCP Monitor Creation**: Fixed misleading form UX that caused silent HTTP 500 errors when creating TCP monitors
- Corrected URL placeholder to show `host:port` format instead of the incorrect `tcp://host:port` prefix
- Added dynamic per-type placeholder and helper text (HTTP monitors show a full URL example; TCP monitors show `host:port`)
- Added client-side validation that blocks form submission when a scheme prefix (e.g. `tcp://`) is detected, with an inline error message
- Reordered form fields so the monitor type selector appears above the URL input, making the dynamic helper text immediately relevant
- i18n: Added 5 new translation keys across en, de, fr, es, and zh locales
- **CI: Rate Limit Integration Tests**: Hardened test script reliability — login now validates HTTP status, Caddy admin API readiness gated on `/config/` poll, security config failures are fatal with full diagnostics, and poll interval increased to 5s
- **CI: Rate Limit Integration Tests**: Removed stale GeoIP database SHA256 checksum from Dockerfile non-CI path (hash was perpetually stale due to weekly upstream updates)
- **CI: Rate Limit Integration Tests**: Fixed Caddy admin API debug dump URL to use canonical trailing slash in workflow
- Fixed: Added robust validation and debug logging for Docker image tags to prevent invalid reference errors.
- Fixed: Removed log masking for image references and added manifest validation to debug CI failures.
- **Proxy Hosts**: Fixed ACL and Security Headers dropdown selections so create/edit saves now keep the selected values (including clearing to none) after submit and reload.
@@ -41,6 +90,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- **Test Performance**: Reduced system settings test execution time by 31% (from 23 minutes to 16 minutes)
### Changed
- **Testing Infrastructure**: Enhanced E2E test helpers with better synchronization and error handling
- **CI**: Optimized E2E workflow shards [Reduced from 4 to 3]

View File

@@ -45,8 +45,6 @@ brew install lefthook
go install github.com/evilmartians/lefthook@latest
```
```bash
# Option 1: Homebrew (macOS/Linux)
brew install golangci-lint
@@ -84,17 +82,20 @@ For local development, install go 1.26.0+ from [go.dev/dl](https://go.dev/dl/).
When the project's Go version is updated (usually by Renovate):
1. **Pull the latest changes**
```bash
git pull
```
2. **Update your local Go installation**
```bash
# Run the Go update skill (downloads and installs the new version)
.github/skills/scripts/skill-runner.sh utility-update-go-version
```
3. **Rebuild your development tools**
```bash
# This fixes lefthook hook errors and IDE issues
./scripts/rebuild-go-tools.sh

View File

@@ -10,22 +10,26 @@ ARG BUILD_DEBUG=0
# ---- Pinned Toolchain Versions ----
# renovate: datasource=docker depName=golang versioning=docker
ARG GO_VERSION=1.26.1
ARG GO_VERSION=1.26.2
# renovate: datasource=docker depName=alpine versioning=docker
ARG ALPINE_IMAGE=alpine:3.23.3@sha256:25109184c71bdad752c8312a8623239686a9a2071e8825f20acb8f2198c3f659
ARG ALPINE_IMAGE=alpine:3.23.4@sha256:5b10f432ef3da1b8d4c7eb6c487f2f5a8f096bc91145e68878dd4a5019afde11
# ---- Shared CrowdSec Version ----
# renovate: datasource=github-releases depName=crowdsecurity/crowdsec
ARG CROWDSEC_VERSION=1.7.6
ARG CROWDSEC_VERSION=1.7.7
# CrowdSec fallback tarball checksum (v${CROWDSEC_VERSION})
ARG CROWDSEC_RELEASE_SHA256=704e37121e7ac215991441cef0d8732e33fa3b1a2b2b88b53a0bfe5e38f863bd
# ---- Shared Go Security Patches ----
# renovate: datasource=go depName=github.com/expr-lang/expr
ARG EXPR_LANG_VERSION=1.17.7
ARG EXPR_LANG_VERSION=1.17.8
# renovate: datasource=go depName=golang.org/x/net
ARG XNET_VERSION=0.51.0
ARG XNET_VERSION=0.53.0
# renovate: datasource=go depName=github.com/smallstep/certificates
ARG SMALLSTEP_CERTIFICATES_VERSION=0.30.0
# renovate: datasource=npm depName=npm
ARG NPM_VERSION=11.11.1
# Allow pinning Caddy version - Renovate will update this
# Build the most recent Caddy 2.x release (keeps major pinned under v3).
@@ -39,9 +43,9 @@ ARG CADDY_CANDIDATE_VERSION=2.11.2
ARG CADDY_USE_CANDIDATE=0
ARG CADDY_PATCH_SCENARIO=B
# renovate: datasource=go depName=github.com/greenpau/caddy-security
ARG CADDY_SECURITY_VERSION=1.1.45
ARG CADDY_SECURITY_VERSION=1.1.62
# renovate: datasource=go depName=github.com/corazawaf/coraza-caddy
ARG CORAZA_CADDY_VERSION=2.2.0
ARG CORAZA_CADDY_VERSION=2.5.0
## When an official caddy image tag isn't available on the host, use a
## plain Alpine base image and overwrite its caddy binary with our
## xcaddy-built binary in the later COPY step. This avoids relying on
@@ -88,7 +92,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
# ---- Frontend Builder ----
# Build the frontend using the BUILDPLATFORM to avoid arm64 musl Rollup native issues
# renovate: datasource=docker depName=node
FROM --platform=$BUILDPLATFORM node:24.14.0-alpine@sha256:7fddd9ddeae8196abf4a3ef2de34e11f7b1a722119f91f28ddf1e99dcafdf114 AS frontend-builder
FROM --platform=$BUILDPLATFORM node:24.15.0-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f AS frontend-builder
WORKDIR /app/frontend
# Copy frontend package files
@@ -99,9 +103,12 @@ ARG VERSION=dev
# Make version available to Vite as VITE_APP_VERSION during the frontend build
ENV VITE_APP_VERSION=${VERSION}
# Set environment to bypass native binary requirement for cross-arch builds
ENV npm_config_rollup_skip_nodejs_native=1 \
ROLLUP_SKIP_NODEJS_NATIVE=1
# Vite 8: Rolldown native bindings auto-resolved per platform via optionalDependencies
ARG NPM_VERSION
# hadolint ignore=DL3017
RUN apk upgrade --no-cache && \
npm install -g npm@${NPM_VERSION} --no-fund --no-audit && \
npm cache clean --force
RUN npm ci
@@ -124,7 +131,7 @@ SHELL ["/bin/ash", "-o", "pipefail", "-c"]
ARG TARGETPLATFORM
ARG TARGETARCH
# hadolint ignore=DL3018
RUN apk add --no-cache clang lld
RUN apk add --no-cache git clang lld
# hadolint ignore=DL3059
# hadolint ignore=DL3018
# Install musl (headers + runtime) and gcc for cross-compilation linker
@@ -153,7 +160,7 @@ RUN set -eux; \
# Note: xx-go install puts binaries in /go/bin/TARGETOS_TARGETARCH/dlv if cross-compiling.
# We find it and move it to /go/bin/dlv so it's in a consistent location for the next stage.
# renovate: datasource=go depName=github.com/go-delve/delve
ARG DLV_VERSION=1.26.1
ARG DLV_VERSION=1.26.2
# hadolint ignore=DL3059,DL4006
RUN CGO_ENABLED=0 xx-go install github.com/go-delve/delve/cmd/dlv@v${DLV_VERSION} && \
DLV_PATH=$(find /go/bin -name dlv -type f | head -n 1) && \
@@ -226,6 +233,7 @@ ARG CORAZA_CADDY_VERSION
ARG XCADDY_VERSION=0.4.5
ARG EXPR_LANG_VERSION
ARG XNET_VERSION
ARG SMALLSTEP_CERTIFICATES_VERSION
# hadolint ignore=DL3018
RUN apk add --no-cache bash git
@@ -274,6 +282,36 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
# renovate: datasource=go depName=github.com/hslatman/ipstore
go get github.com/hslatman/ipstore@v0.4.0; \
go get golang.org/x/net@v${XNET_VERSION}; \
# CVE-2026-33186: gRPC-Go auth bypass (fixed in v1.79.3)
# CVE-2026-34986: go-jose/v4 transitive fix (requires grpc >= v1.80.0)
# Pin here so the Caddy binary is patched immediately;
# remove once Caddy ships a release built with grpc >= v1.80.0.
# renovate: datasource=go depName=google.golang.org/grpc
go get google.golang.org/grpc@v1.80.0; \
# CVE-2026-34986: go-jose JOSE/JWT validation bypass
# renovate: datasource=go depName=github.com/go-jose/go-jose/v3
go get github.com/go-jose/go-jose/v3@v3.0.5; \
# renovate: datasource=go depName=github.com/go-jose/go-jose/v4
go get github.com/go-jose/go-jose/v4@v4.1.4; \
# CVE-2026-39883: OTel SDK resource leak
# renovate: datasource=go depName=go.opentelemetry.io/otel/sdk
go get go.opentelemetry.io/otel/sdk@v1.43.0; \
# CVE-2026-39882: OTel HTTP exporter request smuggling
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp
go get go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp@v0.19.0; \
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp
go get go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp@v1.43.0; \
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp
go get go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp@v1.43.0; \
# GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture)
# Fix available at v1.6.0. Pin here so the Caddy binary is patched immediately;
# remove once caddy-security ships a release built with goxmldsig >= v1.6.0.
# renovate: datasource=go depName=github.com/russellhaering/goxmldsig
go get github.com/russellhaering/goxmldsig@v1.6.0; \
# CVE-2026-30836: smallstep/certificates 0.30.0-rc3 vulnerability
# Fix available at v0.30.0. Pin here so the Caddy binary is patched immediately;
# remove once caddy-security ships a release built with smallstep/certificates >= v0.30.0.
go get github.com/smallstep/certificates@v${SMALLSTEP_CERTIFICATES_VERSION}; \
if [ "${CADDY_PATCH_SCENARIO}" = "A" ]; then \
# Rollback scenario: keep explicit nebula pin if upstream compatibility regresses.
# NOTE: smallstep/certificates (pulled by caddy-security stack) currently
@@ -307,7 +345,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
rm -rf /tmp/buildenv_* /tmp/caddy-initial'
# ---- CrowdSec Builder ----
# Build CrowdSec from source to ensure we use Go 1.26.1+ and avoid stdlib vulnerabilities
# Build CrowdSec from source to ensure we use Go 1.26.2+ and avoid stdlib vulnerabilities
# (CVE-2025-58183, CVE-2025-58186, CVE-2025-58187, CVE-2025-61729)
FROM --platform=$BUILDPLATFORM golang:${GO_VERSION}-alpine AS crowdsec-builder
COPY --from=xx / /
@@ -338,6 +376,23 @@ RUN git clone --depth 1 --branch "v${CROWDSEC_VERSION}" https://github.com/crowd
RUN go get github.com/expr-lang/expr@v${EXPR_LANG_VERSION} && \
go get golang.org/x/crypto@v0.46.0 && \
go get golang.org/x/net@v${XNET_VERSION} && \
# CVE-2026-33186 (GHSA-p77j-4mvh-x3m3): gRPC-Go auth bypass via missing leading slash
# Fix available at v1.79.3. Pin here so the CrowdSec binary is patched immediately;
# remove once CrowdSec ships a release built with grpc >= v1.79.3.
# renovate: datasource=go depName=google.golang.org/grpc
go get google.golang.org/grpc@v1.80.0 && \
# CVE-2026-32286: pgproto3/v2 buffer overflow (no v2 fix exists; bump pgx/v4 to latest patch)
# renovate: datasource=go depName=github.com/jackc/pgx/v4
go get github.com/jackc/pgx/v4@v4.18.3 && \
# GHSA-xmrv-pmrh-hhx2: AWS SDK v2 event stream injection
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream
go get github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream@v1.7.9 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs
go get github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs@v1.69.1 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/kinesis
go get github.com/aws/aws-sdk-go-v2/service/kinesis@v1.43.6 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/s3
go get github.com/aws/aws-sdk-go-v2/service/s3@v1.99.1 && \
go mod tidy
# Fix compatibility issues with expr-lang v1.17.7
@@ -410,11 +465,11 @@ WORKDIR /app
# Install runtime dependencies for Charon, including bash for maintenance scripts
# Note: gosu is now built from source (see gosu-builder stage) to avoid CVEs from Debian's pre-compiled version
# Explicitly upgrade packages to fix security vulnerabilities
# binutils provides objdump for debug symbol detection in docker-entrypoint.sh
# hadolint ignore=DL3018
RUN apk add --no-cache \
bash ca-certificates sqlite-libs sqlite tzdata curl gettext libcap libcap-utils \
c-ares binutils libc-utils busybox-extras
bash ca-certificates sqlite-libs sqlite tzdata gettext libcap libcap-utils \
c-ares busybox-extras \
&& apk upgrade --no-cache zlib libcrypto3 libssl3 musl musl-utils
# Copy gosu binary from gosu-builder (built with Go 1.26+ to avoid stdlib CVEs)
COPY --from=gosu-builder /gosu-out/gosu /usr/sbin/gosu
@@ -431,12 +486,13 @@ SHELL ["/bin/ash", "-o", "pipefail", "-c"]
# Note: In production, users should provide their own MaxMind license key
# This uses the publicly available GeoLite2 database
# In CI, timeout quickly rather than retrying to save build time
ARG GEOLITE2_COUNTRY_SHA256=aa154fc6bcd712644de232a4abcdd07dac1f801308c0b6f93dbc2b375443da7b
ARG GEOLITE2_COUNTRY_SHA256=62049119bd084e19fff4689bebe258f18a5f27a386e6d26ba5180941b613fc2b
RUN mkdir -p /app/data/geoip && \
if [ -n "$CI" ]; then \
if [ "$CI" = "true" ] || [ "$CI" = "1" ]; then \
echo "⏱️ CI detected - quick download (10s timeout, no retries)"; \
if curl -fSL -m 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \
-o /app/data/geoip/GeoLite2-Country.mmdb 2>/dev/null; then \
if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \
-T 10 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" 2>/dev/null \
&& [ -s /app/data/geoip/GeoLite2-Country.mmdb ]; then \
echo "✅ GeoIP downloaded"; \
else \
echo "⚠️ GeoIP skipped"; \
@@ -444,16 +500,12 @@ RUN mkdir -p /app/data/geoip && \
fi; \
else \
echo "Local - full download (30s timeout, 3 retries)"; \
if curl -fSL -m 30 --retry 3 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \
-o /app/data/geoip/GeoLite2-Country.mmdb; then \
if echo "${GEOLITE2_COUNTRY_SHA256} /app/data/geoip/GeoLite2-Country.mmdb" | sha256sum -c -; then \
echo "✅ GeoIP checksum verified"; \
else \
echo "⚠️ Checksum failed"; \
touch /app/data/geoip/GeoLite2-Country.mmdb.placeholder; \
fi; \
if wget -qO /app/data/geoip/GeoLite2-Country.mmdb \
-T 30 -t 4 "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" \
&& [ -s /app/data/geoip/GeoLite2-Country.mmdb ]; then \
echo "✅ GeoIP downloaded"; \
else \
echo "⚠️ Download failed"; \
echo "⚠️ GeoIP download failed or empty — skipping"; \
touch /app/data/geoip/GeoLite2-Country.mmdb.placeholder; \
fi; \
fi
@@ -464,7 +516,7 @@ COPY --from=caddy-builder /usr/bin/caddy /usr/bin/caddy
# Allow non-root to bind privileged ports (80/443) securely
RUN setcap 'cap_net_bind_service=+ep' /usr/bin/caddy
# Copy CrowdSec binaries from the crowdsec-builder stage (built with Go 1.26.1+)
# Copy CrowdSec binaries from the crowdsec-builder stage (built with Go 1.26.2+)
# This ensures we don't have stdlib vulnerabilities from older Go versions
COPY --from=crowdsec-builder /crowdsec-out/crowdsec /usr/local/bin/crowdsec
COPY --from=crowdsec-builder /crowdsec-out/cscli /usr/local/bin/cscli
@@ -579,8 +631,8 @@ EXPOSE 80 443 443/udp 2019 8080
# Security: Add healthcheck to monitor container health
# Verifies the Charon API is responding correctly
HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
CMD curl -f http://localhost:8080/api/v1/health || exit 1
HEALTHCHECK --interval=30s --timeout=10s --start-period=15s --retries=3 \
CMD wget -q -O /dev/null http://localhost:8080/api/v1/health || exit 1
# Create CrowdSec symlink as root before switching to non-root user
# This symlink allows CrowdSec to use persistent storage at /app/data/crowdsec/config

View File

@@ -54,7 +54,7 @@ If you can use a website, you can run Charon.
Charon includes security features that normally require multiple tools:
- Web Application Firewall (WAF)
- CrowdSec intrusion detection
- CrowdSec intrusion detection with analytics dashboard
- Access Control Lists (ACLs)
- Rate limiting
- Emergency recovery tools
@@ -94,6 +94,7 @@ services:
retries: 3
start_period: 40s
```
> **Docker Socket Access:** Charon runs as a non-root user. If you mount the Docker socket for container discovery, the container needs permission to read it. Find your socket's group ID and add it to the compose file:
>
> ```bash
@@ -107,26 +108,34 @@ services:
> - "998"
> ```
### 2⃣ Generate encryption key:
### 2⃣ Generate encryption key
```bash
openssl rand -base64 32
```
### 3⃣ Start Charon:
### 3⃣ Start Charon
```bash
docker-compose up -d
```
### 4⃣ Access the dashboard:
### 4⃣ Access the dashboard
Open your browser and navigate to `http://localhost:8080` to access the dashboard and create your admin account.
```code
http://localhost:8080
```
### Getting Started:
Full setup instructions and documentation are available at [https://wikid82.github.io/Charon/docs/getting-started.html](https://wikid82.github.io/Charon/docs/getting-started.html).
### Getting Started
Full setup instructions and documentation are available at [https://wikid82.github.io/Charon/docs/getting-started.html](https://wikid82.github.io/Charon/docs/getting-started.html).
--- ## ✨ Top 10 Features
### 🎯 **Point & Click Management**
No config files. No terminal commands. Just click, type your domain name, and you're live. If you can use a website, you can run Charon.
### 🔐 **Automatic HTTPS Certificates**
@@ -139,7 +148,7 @@ Secure all your subdomains with a single *.example.com certificate. Supports 15+
### 🛡️ **Enterprise-Grade Security Built In**
Web Application Firewall, rate limiting, geographic blocking, access control lists, and intrusion detection via CrowdSec. Protection that "just works."
Web Application Firewall, rate limiting, geographic blocking, access control lists, and intrusion detection via CrowdSec—with a built-in analytics dashboard showing attack trends, top offenders, and ban history. Protection that "just works."
### 🔐 **Supply Chain Security**
@@ -160,6 +169,7 @@ See exactly what's happening with live request logs, uptime monitoring, and inst
### 📥 **Migration Made Easy**
Already invested in another reverse proxy? Bring your work with you by importing your existing configurations with one click:
- **Caddyfile** — Migrate from other Caddy setups
- **Nginx** — Import from Nginx based configurations (Coming Soon)
- **Traefik** - Import from Traefik based configurations (Coming Soon)

File diff suppressed because it is too large Load Diff

View File

@@ -24,8 +24,10 @@ Example: `0.1.0-alpha`, `1.0.0-beta.1`, `2.0.0-rc.2`
1. **Create and push a release tag**:
```bash
git tag -a v1.0.0 -m "Release v1.0.0"
git push origin v1.0.0
```
2. **GitHub Actions automatically**:
@@ -51,10 +53,12 @@ Use it only when you need local/version-file parity checks:
echo "1.0.0" > .version
```
2. **Validate `.version` matches the latest tag**:
1. **Validate `.version` matches the latest tag**:
```bash
bash scripts/check-version-match-tag.sh
```
### Deterministic Rollout Verification Gates (Mandatory)

View File

@@ -255,7 +255,11 @@ func main() {
cerb := cerberus.New(cfg.Security, db)
// Pass config to routes for auth service and certificate service
if err := routes.RegisterWithDeps(router, db, cfg, caddyManager, cerb); err != nil {
// Lifecycle context cancelled on shutdown to stop background goroutines
appCtx, appCancel := context.WithCancel(context.Background())
defer appCancel()
if err := routes.RegisterWithDeps(appCtx, router, db, cfg, caddyManager, cerb); err != nil {
log.Fatalf("register routes: %v", err)
}
@@ -291,6 +295,9 @@ func main() {
sig := <-quit
logger.Log().Infof("Received signal %v, initiating graceful shutdown...", sig)
// Cancel the app-wide context to stop background goroutines (e.g. cert expiry checker)
appCancel()
// Graceful shutdown with timeout
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()

View File

@@ -1,56 +1,56 @@
module github.com/Wikid82/charon/backend
go 1.26.1
go 1.26.2
require (
github.com/docker/docker v28.5.2+incompatible
github.com/gin-contrib/gzip v1.2.5
github.com/gin-contrib/gzip v1.2.6
github.com/gin-gonic/gin v1.12.0
github.com/glebarez/sqlite v1.11.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/uuid v1.6.0
github.com/gorilla/websocket v1.5.3
github.com/mattn/go-sqlite3 v1.14.34
github.com/mattn/go-sqlite3 v1.14.42
github.com/moby/moby/client v0.4.1
github.com/oschwald/geoip2-golang/v2 v2.1.0
github.com/prometheus/client_golang v1.23.2
github.com/robfig/cron/v3 v3.0.1
github.com/sirupsen/logrus v1.9.4
github.com/stretchr/testify v1.11.1
golang.org/x/crypto v0.48.0
golang.org/x/net v0.51.0
golang.org/x/text v0.35.0
golang.org/x/crypto v0.50.0
golang.org/x/net v0.53.0
golang.org/x/text v0.36.0
golang.org/x/time v0.15.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
gorm.io/driver/sqlite v1.6.0
gorm.io/gorm v1.31.1
software.sslmate.com/src/go-pkcs12 v0.7.1
)
require (
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/gopkg v0.1.4 // indirect
github.com/bytedance/sonic v1.15.0 // indirect
github.com/bytedance/sonic/loader v0.5.0 // indirect
github.com/bytedance/sonic/loader v0.5.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/docker/go-connections v0.6.0 // indirect
github.com/docker/go-connections v0.7.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/gabriel-vasile/mimetype v1.4.13 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/gin-contrib/sse v1.1.1 // indirect
github.com/glebarez/go-sqlite v1.22.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.30.1 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/go-playground/validator/v10 v10.30.2 // indirect
github.com/goccy/go-json v0.10.6 // indirect
github.com/goccy/go-yaml v1.19.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
@@ -58,20 +58,17 @@ require (
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-isatty v0.0.21 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/sys/atomicwriter v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
github.com/moby/moby/api v1.54.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/oschwald/maxminddb-golang/v2 v2.1.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pelletier/go-toml/v2 v2.3.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.67.5 // indirect
@@ -79,24 +76,22 @@ require (
github.com/quic-go/qpack v0.6.0 // indirect
github.com/quic-go/quic-go v0.59.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/objx v0.5.3 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.1 // indirect
go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect
go.mongodb.org/mongo-driver/v2 v2.5.1 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 // indirect
go.opentelemetry.io/otel v1.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.42.0 // indirect
go.opentelemetry.io/otel/trace v1.42.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 // indirect
go.opentelemetry.io/otel v1.43.0 // indirect
go.opentelemetry.io/otel/metric v1.43.0 // indirect
go.opentelemetry.io/otel/trace v1.43.0 // indirect
go.yaml.in/yaml/v2 v2.4.4 // indirect
golang.org/x/arch v0.25.0 // indirect
golang.org/x/sys v0.42.0 // indirect
golang.org/x/arch v0.26.0 // indirect
golang.org/x/sys v0.43.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gotest.tools/v3 v3.5.2 // indirect
modernc.org/libc v1.70.0 // indirect
modernc.org/libc v1.72.0 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/sqlite v1.46.1 // indirect
modernc.org/sqlite v1.49.1 // indirect
)

View File

@@ -1,17 +1,13 @@
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg=
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
github.com/bytedance/gopkg v0.1.4 h1:oZnQwnX82KAIWb7033bEwtxvTqXcYMxDBaQxo5JJHWM=
github.com/bytedance/gopkg v0.1.4/go.mod h1:v1zWfPm21Fb+OsyXN2VAHdL6TBb2L88anLQgdyje6R4=
github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE=
github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k=
github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE=
github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/bytedance/sonic/loader v0.5.1 h1:Ygpfa9zwRCCKSlrp5bBP/b/Xzc3VxsAW+5NIYXrOOpI=
github.com/bytedance/sonic/loader v0.5.1/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
@@ -20,17 +16,13 @@ github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM=
github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
github.com/docker/go-connections v0.7.0 h1:6SsRfJddP22WMrCkj19x9WKjEDTB+ahsdiGYf0mN39c=
github.com/docker/go-connections v0.7.0/go.mod h1:no1qkHdjq7kLMGUXYAduOhYPSJxxvgWBh7ogVvptn3Q=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
@@ -39,10 +31,10 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/gabriel-vasile/mimetype v1.4.13 h1:46nXokslUBsAJE/wMsp5gtO500a4F3Nkz9Ufpk2AcUM=
github.com/gabriel-vasile/mimetype v1.4.13/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/gin-contrib/gzip v1.2.5 h1:fIZs0S+l17pIu1P5XRJOo/YNqfIuPCrZZ3TWB7pjckI=
github.com/gin-contrib/gzip v1.2.5/go.mod h1:aomRgR7ftdZV3uWY0gW/m8rChfxau0n8YVvwlOHONzw=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-contrib/gzip v1.2.6 h1:OtN8DplD5DNZCSLAnQ5HxRkD2qZ5VU+JhOrcfJrcRvg=
github.com/gin-contrib/gzip v1.2.6/go.mod h1:BQy8/+JApnRjAVUplSGZiVtD2k8GmIE2e9rYu/hLzzU=
github.com/gin-contrib/sse v1.1.1 h1:uGYpNwTacv5R68bSGMapo62iLTRa9l5zxGCps4hK6ko=
github.com/gin-contrib/sse v1.1.1/go.mod h1:QXzuVkA0YO7o/gun03UI1Q+FTI8ZV/n5t03kIQAI89s=
github.com/gin-gonic/gin v1.12.0 h1:b3YAbrZtnf8N//yjKeU2+MQsh2mY5htkZidOM7O0wG8=
github.com/gin-gonic/gin v1.12.0/go.mod h1:VxccKfsSllpKshkBWgVgRniFFAzFb9csfngsqANjnLc=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
@@ -60,10 +52,10 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/go-playground/validator/v10 v10.30.2 h1:JiFIMtSSHb2/XBUbWM4i/MpeQm9ZK2xqPNk8vgvu5JQ=
github.com/go-playground/validator/v10 v10.30.2/go.mod h1:mAf2pIOVXjTEBrwUMGKkCWKKPs9NheYGabeB04txQSc=
github.com/goccy/go-json v0.10.6 h1:p8HrPJzOakx/mn/bQtjgNjdTcN+/S6FcG2CTtQOrHVU=
github.com/goccy/go-json v0.10.6/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
@@ -77,8 +69,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
@@ -99,25 +89,21 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.34 h1:3NtcvcUnFBPsuRcno8pUtupspG/GM+9nZ88zgJcp6Zk=
github.com/mattn/go-sqlite3 v1.14.34/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/go-isatty v0.0.21 h1:xYae+lCNBP7QuW4PUnNG61ffM4hVIfm+zUzDuSzYLGs=
github.com/mattn/go-isatty v0.0.21/go.mod h1:ZXfXG4SQHsB/w3ZeOYbR0PrPwLy+n6xiMrJlRFqopa4=
github.com/mattn/go-sqlite3 v1.14.42 h1:MigqEP4ZmHw3aIdIT7T+9TLa90Z6smwcthx+Azv4Cgo=
github.com/mattn/go-sqlite3 v1.14.42/go.mod h1:pjEuOr8IwzLJP2MfGeTb0A35jauH+C2kbHKBr7yXKVQ=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs=
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
github.com/moby/moby/api v1.54.2 h1:wiat9QAhnDQjA7wk1kh/TqHz2I1uUA7M7t9SAl/JNXg=
github.com/moby/moby/api v1.54.2/go.mod h1:+RQ6wluLwtYaTd1WnPLykIDPekkuyD/ROWQClE83pzs=
github.com/moby/moby/client v0.4.1 h1:DMQgisVoMkmMs7fp3ROSdiBnoAu8+vo3GggFl06M/wY=
github.com/moby/moby/client v0.4.1/go.mod h1:z52C9O2POPOsnxZAy//WtKcQ32P+jT/NGeXu/7nfjGQ=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
@@ -130,10 +116,8 @@ github.com/oschwald/geoip2-golang/v2 v2.1.0 h1:DjnLhNJu9WHwTrmoiQFvgmyJoczhdnm7L
github.com/oschwald/geoip2-golang/v2 v2.1.0/go.mod h1:qdVmcPgrTJ4q2eP9tHq/yldMTdp2VMr33uVdFbHBiBc=
github.com/oschwald/maxminddb-golang/v2 v2.1.1 h1:lA8FH0oOrM4u7mLvowq8IT6a3Q/qEnqRzLQn9eH5ojc=
github.com/oschwald/maxminddb-golang/v2 v2.1.1/go.mod h1:PLdx6PR+siSIoXqqy7C7r3SB3KZnhxWr1Dp6g0Hacl8=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pelletier/go-toml/v2 v2.3.0 h1:k59bC/lIZREW0/iVaQR8nDHxVq8OVlIzYCOJf421CaM=
github.com/pelletier/go-toml/v2 v2.3.0/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
@@ -159,8 +143,9 @@ github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC4
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/objx v0.5.3 h1:jmXUvGomnU1o3W/V5h2VEradbpJDwGrzugQQvL0POH4=
github.com/stretchr/objx v0.5.3/go.mod h1:rDQraq+vQZU7Fde9LOZLr8Tax6zZvy4kuNKF+QYS+U0=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
@@ -172,59 +157,46 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY=
github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
go.mongodb.org/mongo-driver/v2 v2.5.1 h1:j2U/Qp+wvueSpqitLCSZPT/+ZpVc1xzuwdHWwl7d8ro=
go.mongodb.org/mongo-driver/v2 v2.5.1/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:OyrsyzuttWTSur2qN/Lm0m2a8yqyIjUVBZcxFPuXq2o=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg=
go.opentelemetry.io/otel v1.42.0 h1:lSQGzTgVR3+sgJDAU/7/ZMjN9Z+vUip7leaqBKy4sho=
go.opentelemetry.io/otel v1.42.0/go.mod h1:lJNsdRMxCUIWuMlVJWzecSMuNjE7dOYyWlqOXWkdqCc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
go.opentelemetry.io/otel/metric v1.42.0 h1:2jXG+3oZLNXEPfNmnpxKDeZsFI5o4J+nz6xUlaFdF/4=
go.opentelemetry.io/otel/metric v1.42.0/go.mod h1:RlUN/7vTU7Ao/diDkEpQpnz3/92J9ko05BIwxYa2SSI=
go.opentelemetry.io/otel/sdk v1.42.0 h1:LyC8+jqk6UJwdrI/8VydAq/hvkFKNHZVIWuslJXYsDo=
go.opentelemetry.io/otel/sdk v1.42.0/go.mod h1:rGHCAxd9DAph0joO4W6OPwxjNTYWghRWmkHuGbayMts=
go.opentelemetry.io/otel/sdk/metric v1.42.0 h1:D/1QR46Clz6ajyZ3G8SgNlTJKBdGp84q9RKCAZ3YGuA=
go.opentelemetry.io/otel/sdk/metric v1.42.0/go.mod h1:Ua6AAlDKdZ7tdvaQKfSmnFTdHx37+J4ba8MwVCYM5hc=
go.opentelemetry.io/otel/trace v1.42.0 h1:OUCgIPt+mzOnaUTpOQcBiM/PLQ/Op7oq6g4LenLmOYY=
go.opentelemetry.io/otel/trace v1.42.0/go.mod h1:f3K9S+IFqnumBkKhRJMeaZeNk9epyhnCmQh/EysQCdc=
go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4=
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 h1:CqXxU8VOmDefoh0+ztfGaymYbhdB/tT3zs79QaZTNGY=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0/go.mod h1:BuhAPThV8PBHBvg8ZzZ/Ok3idOdhWIodywz2xEcRbJo=
go.opentelemetry.io/otel v1.43.0 h1:mYIM03dnh5zfN7HautFE4ieIig9amkNANT+xcVxAj9I=
go.opentelemetry.io/otel v1.43.0/go.mod h1:JuG+u74mvjvcm8vj8pI5XiHy1zDeoCS2LB1spIq7Ay0=
go.opentelemetry.io/otel/metric v1.43.0 h1:d7638QeInOnuwOONPp4JAOGfbCEpYb+K6DVWvdxGzgM=
go.opentelemetry.io/otel/metric v1.43.0/go.mod h1:RDnPtIxvqlgO8GRW18W6Z/4P462ldprJtfxHxyKd2PY=
go.opentelemetry.io/otel/sdk v1.43.0 h1:pi5mE86i5rTeLXqoF/hhiBtUNcrAGHLKQdhg4h4V9Dg=
go.opentelemetry.io/otel/sdk v1.43.0/go.mod h1:P+IkVU3iWukmiit/Yf9AWvpyRDlUeBaRg6Y+C58QHzg=
go.opentelemetry.io/otel/sdk/metric v1.43.0 h1:S88dyqXjJkuBNLeMcVPRFXpRw2fuwdvfCGLEo89fDkw=
go.opentelemetry.io/otel/sdk/metric v1.43.0/go.mod h1:C/RJtwSEJ5hzTiUz5pXF1kILHStzb9zFlIEe85bhj6A=
go.opentelemetry.io/otel/trace v1.43.0 h1:BkNrHpup+4k4w+ZZ86CZoHHEkohws8AY+WTX09nk+3A=
go.opentelemetry.io/otel/trace v1.43.0/go.mod h1:/QJhyVBUUswCphDVxq+8mld+AvhXZLhe+8WVFxiFff0=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
go.yaml.in/yaml/v2 v2.4.4 h1:tuyd0P+2Ont/d6e2rl3be67goVK4R6deVxCUX5vyPaQ=
go.yaml.in/yaml/v2 v2.4.4/go.mod h1:gMZqIpDtDqOfM0uNfy0SkpRhvUryYH0Z6wdMYcacYXQ=
golang.org/x/arch v0.25.0 h1:qnk6Ksugpi5Bz32947rkUgDt9/s5qvqDPl/gBKdMJLE=
golang.org/x/arch v0.25.0/go.mod h1:0X+GdSIP+kL5wPmpK7sdkEVTt2XoYP0cSjQSbZBwOi8=
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
golang.org/x/arch v0.26.0 h1:jZ6dpec5haP/fUv1kLCbuJy6dnRrfX6iVK08lZBFpk4=
golang.org/x/arch v0.26.0/go.mod h1:0X+GdSIP+kL5wPmpK7sdkEVTt2XoYP0cSjQSbZBwOi8=
golang.org/x/crypto v0.50.0 h1:zO47/JPrL6vsNkINmLoo/PH1gcxpls50DNogFvB5ZGI=
golang.org/x/crypto v0.50.0/go.mod h1:3muZ7vA7PBCE6xgPX7nkzzjiUq87kRItoJQM1Yo8S+Q=
golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI=
golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY=
golang.org/x/net v0.53.0 h1:d+qAbo5L0orcWAr0a9JweQpjXF19LMXJE8Ey7hwOdUA=
golang.org/x/net v0.53.0/go.mod h1:JvMuJH7rrdiCfbeHoo3fCQU24Lf5JJwT9W3sJFulfgs=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo=
golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI=
golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg=
golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164=
golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U=
golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno=
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc=
google.golang.org/grpc v1.75.0 h1:+TW+dqTd2Biwe6KKfhE5JpiYIBWq865PhKGSXiivqt4=
google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ=
golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s=
golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -241,10 +213,10 @@ gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.32.0 h1:hjG66bI/kqIPX1b2yT6fr/jt+QedtP2fqojG2VrFuVw=
modernc.org/ccgo/v4 v4.32.0/go.mod h1:6F08EBCx5uQc38kMGl+0Nm0oWczoo1c7cgpzEry7Uc0=
modernc.org/cc/v4 v4.27.3 h1:uNCgn37E5U09mTv1XgskEVUJ8ADKpmFMPxzGJ0TSo+U=
modernc.org/cc/v4 v4.27.3/go.mod h1:3YjcbCqhoTTHPycJDRl2WZKKFj0nwcOIPBfEZK0Hdk8=
modernc.org/ccgo/v4 v4.32.4 h1:L5OB8rpEX4ZsXEQwGozRfJyJSFHbbNVOoQ59DU9/KuU=
modernc.org/ccgo/v4 v4.32.4/go.mod h1:lY7f+fiTDHfcv6YlRgSkxYfhs+UvOEEzj49jAn2TOx0=
modernc.org/fileutil v1.4.0 h1:j6ZzNTftVS054gi281TyLjHPp6CPHr2KCxEXjEbD6SM=
modernc.org/fileutil v1.4.0/go.mod h1:EqdKFDxiByqxLk8ozOxObDSfcVOv/54xDs/DUHdvCUU=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
@@ -253,8 +225,8 @@ modernc.org/gc/v3 v3.1.2 h1:ZtDCnhonXSZexk/AYsegNRV1lJGgaNZJuKjJSWKyEqo=
modernc.org/gc/v3 v3.1.2/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.70.0 h1:U58NawXqXbgpZ/dcdS9kMshu08aiA6b7gusEusqzNkw=
modernc.org/libc v1.70.0/go.mod h1:OVmxFGP1CI/Z4L3E0Q3Mf1PDE0BucwMkcXjjLntvHJo=
modernc.org/libc v1.72.0 h1:IEu559v9a0XWjw0DPoVKtXpO2qt5NVLAnFaBbjq+n8c=
modernc.org/libc v1.72.0/go.mod h1:tTU8DL8A+XLVkEY3x5E/tO7s2Q/q42EtnNWda/L5QhQ=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
@@ -263,9 +235,13 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.46.1 h1:eFJ2ShBLIEnUWlLy12raN0Z1plqmFX9Qe3rjQTKt6sU=
modernc.org/sqlite v1.46.1/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=
modernc.org/sqlite v1.49.1 h1:dYGHTKcX1sJ+EQDnUzvz4TJ5GbuvhNJa8Fg6ElGx73U=
modernc.org/sqlite v1.49.1/go.mod h1:m0w8xhwYUVY3H6pSDwc3gkJ/irZT/0YEXwBlhaxQEew=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
pgregory.net/rapid v1.2.0 h1:keKAYRcjm+e1F0oAuU5F5+YPAWcyxNNRK2wud503Gnk=
pgregory.net/rapid v1.2.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04=
software.sslmate.com/src/go-pkcs12 v0.7.1 h1:bxkUPRsvTPNRBZa4M/aSX4PyMOEbq3V8I6hbkG4F4Q8=
software.sslmate.com/src/go-pkcs12 v0.7.1/go.mod h1:Qiz0EyvDRJjjxGyUQa2cCNZn/wMyzrRJ/qcDXOQazLI=

View File

@@ -121,7 +121,6 @@ func TestAccessListHandler_List_DBError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate the table to cause error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -138,7 +137,6 @@ func TestAccessListHandler_Get_DBError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate the table to cause error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -157,7 +155,6 @@ func TestAccessListHandler_Delete_InternalError(t *testing.T) {
// Migrate AccessList but not ProxyHost to cause internal error on delete
_ = db.AutoMigrate(&models.AccessList{})
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -285,7 +282,6 @@ func TestAccessListHandler_TestIP_InternalError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate - this causes a "no such table" error which is an internal error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)

View File

@@ -21,7 +21,6 @@ func setupAccessListTestRouter(t *testing.T) (*gin.Engine, *gorm.DB) {
err = db.AutoMigrate(&models.AccessList{}, &models.ProxyHost{})
assert.NoError(t, err)
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)

View File

@@ -27,7 +27,6 @@ func setupImportCoverageDB(t *testing.T) *gorm.DB {
}
func TestImportHandler_Commit_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -44,7 +43,6 @@ func TestImportHandler_Commit_InvalidJSON(t *testing.T) {
}
func TestImportHandler_Commit_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -67,7 +65,6 @@ func TestImportHandler_Commit_InvalidSessionUUID(t *testing.T) {
}
func TestImportHandler_Commit_SessionNotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -98,7 +95,6 @@ func setupRemoteServerCoverageDB2(t *testing.T) *gorm.DB {
}
func TestRemoteServerHandler_TestConnection_Unreachable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -137,7 +133,6 @@ func setupSecurityCoverageDB3(t *testing.T) *gorm.DB {
}
func TestSecurityHandler_GetConfig_InternalError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -157,7 +152,6 @@ func TestSecurityHandler_GetConfig_InternalError(t *testing.T) {
}
func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
// Create handler with nil caddy manager (ApplyConfig will be called but is nil)
@@ -181,7 +175,6 @@ func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
}
func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -201,7 +194,6 @@ func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
}
func TestSecurityHandler_ListDecisions_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -220,7 +212,6 @@ func TestSecurityHandler_ListDecisions_Error(t *testing.T) {
}
func TestSecurityHandler_ListRuleSets_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -239,7 +230,6 @@ func TestSecurityHandler_ListRuleSets_Error(t *testing.T) {
}
func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -265,7 +255,6 @@ func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
}
func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -291,7 +280,6 @@ func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
}
func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -313,7 +301,6 @@ func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
// CrowdSec ImportConfig additional coverage tests
func TestCrowdsec_ImportConfig_EmptyUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -344,7 +331,6 @@ func TestCrowdsec_ImportConfig_EmptyUpload(t *testing.T) {
// Backup Handler additional coverage tests
func TestBackupHandler_List_DBError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Use a non-writable temp dir to simulate errors
tmpDir := t.TempDir()
@@ -370,7 +356,6 @@ func TestBackupHandler_List_DBError(t *testing.T) {
// ImportHandler UploadMulti coverage tests
func TestImportHandler_UploadMulti_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -387,7 +372,6 @@ func TestImportHandler_UploadMulti_InvalidJSON(t *testing.T) {
}
func TestImportHandler_UploadMulti_MissingCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -411,7 +395,6 @@ func TestImportHandler_UploadMulti_MissingCaddyfile(t *testing.T) {
}
func TestImportHandler_UploadMulti_EmptyContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -435,7 +418,6 @@ func TestImportHandler_UploadMulti_EmptyContent(t *testing.T) {
}
func TestImportHandler_UploadMulti_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -481,7 +463,6 @@ func setupLogsDownloadTest(t *testing.T) (h *LogsHandler, logsDir string) {
}
func TestLogsHandler_Download_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
h, _ := setupLogsDownloadTest(t)
w := httptest.NewRecorder()
@@ -496,7 +477,6 @@ func TestLogsHandler_Download_PathTraversal(t *testing.T) {
}
func TestLogsHandler_Download_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
h, _ := setupLogsDownloadTest(t)
w := httptest.NewRecorder()
@@ -511,7 +491,6 @@ func TestLogsHandler_Download_NotFound(t *testing.T) {
}
func TestLogsHandler_Download_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
h, logsDir := setupLogsDownloadTest(t)
// Create a log file to download
@@ -531,7 +510,6 @@ func TestLogsHandler_Download_Success(t *testing.T) {
// Import Handler Upload error tests
func TestImportHandler_Upload_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -548,7 +526,6 @@ func TestImportHandler_Upload_InvalidJSON(t *testing.T) {
}
func TestImportHandler_Upload_EmptyContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -571,7 +548,6 @@ func TestImportHandler_Upload_EmptyContent(t *testing.T) {
// Additional Backup Handler tests
func TestBackupHandler_List_ServiceError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create a temp dir with invalid permission for backup dir
tmpDir := t.TempDir()
@@ -608,7 +584,6 @@ func TestBackupHandler_List_ServiceError(t *testing.T) {
}
func TestBackupHandler_Delete_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -639,7 +614,6 @@ func TestBackupHandler_Delete_PathTraversal(t *testing.T) {
}
func TestBackupHandler_Delete_InternalError2(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -689,7 +663,6 @@ func TestBackupHandler_Delete_InternalError2(t *testing.T) {
// Remote Server TestConnection error paths
func TestRemoteServerHandler_TestConnection_NotFound2(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -704,7 +677,6 @@ func TestRemoteServerHandler_TestConnection_NotFound2(t *testing.T) {
}
func TestRemoteServerHandler_TestConnectionCustom_Unreachable2(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -735,7 +707,6 @@ func setupAuthCoverageDB(t *testing.T) *gorm.DB {
}
func TestAuthHandler_Register_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuthCoverageDB(t)
cfg := config.Config{JWTSecret: "test-secret"}
@@ -755,7 +726,6 @@ func TestAuthHandler_Register_InvalidJSON(t *testing.T) {
// Health handler coverage
func TestHealthHandler_Basic(t *testing.T) {
gin.SetMode(gin.TestMode)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
@@ -771,7 +741,6 @@ func TestHealthHandler_Basic(t *testing.T) {
// Backup Create error coverage
func TestBackupHandler_Create_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
// Use a path where database file doesn't exist
tmpDir := t.TempDir()
@@ -811,7 +780,6 @@ func setupSettingsCoverageDB(t *testing.T) *gorm.DB {
}
func TestSettingsHandler_GetSettings_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsCoverageDB(t)
h := NewSettingsHandler(db)
@@ -830,7 +798,6 @@ func TestSettingsHandler_GetSettings_Error(t *testing.T) {
}
func TestSettingsHandler_UpdateSetting_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsCoverageDB(t)
h := NewSettingsHandler(db)
@@ -849,7 +816,6 @@ func TestSettingsHandler_UpdateSetting_InvalidJSON(t *testing.T) {
// Additional remote server TestConnection tests
func TestRemoteServerHandler_TestConnection_Reachable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -873,7 +839,6 @@ func TestRemoteServerHandler_TestConnection_Reachable(t *testing.T) {
}
func TestRemoteServerHandler_TestConnection_EmptyHost(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -900,7 +865,6 @@ func TestRemoteServerHandler_TestConnection_EmptyHost(t *testing.T) {
// Additional UploadMulti test with valid Caddyfile content
func TestImportHandler_UploadMulti_ValidCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -925,7 +889,6 @@ func TestImportHandler_UploadMulti_ValidCaddyfile(t *testing.T) {
}
func TestImportHandler_UploadMulti_SubdirFile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")

View File

@@ -30,7 +30,6 @@ func setupAuditLogTestDB(t *testing.T) *gorm.DB {
}
func TestAuditLogHandler_List(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -130,7 +129,6 @@ func TestAuditLogHandler_List(t *testing.T) {
}
func TestAuditLogHandler_Get(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -198,7 +196,6 @@ func TestAuditLogHandler_Get(t *testing.T) {
}
func TestAuditLogHandler_ListByProvider(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -286,7 +283,6 @@ func TestAuditLogHandler_ListByProvider(t *testing.T) {
}
func TestAuditLogHandler_ListWithDateFilters(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -371,7 +367,6 @@ func TestAuditLogHandler_ListWithDateFilters(t *testing.T) {
// TestAuditLogHandler_ServiceErrors tests error handling when service layer fails
func TestAuditLogHandler_ServiceErrors(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -422,7 +417,6 @@ func TestAuditLogHandler_ServiceErrors(t *testing.T) {
// TestAuditLogHandler_List_PaginationBoundaryEdgeCases tests pagination boundary edge cases
func TestAuditLogHandler_List_PaginationBoundaryEdgeCases(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -513,7 +507,6 @@ func TestAuditLogHandler_List_PaginationBoundaryEdgeCases(t *testing.T) {
// TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases tests pagination boundary edge cases for provider list
func TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -583,7 +576,6 @@ func TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases(t *testing.T
// TestAuditLogHandler_List_InvalidDateFormats tests handling of invalid date formats
func TestAuditLogHandler_List_InvalidDateFormats(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -624,7 +616,6 @@ func TestAuditLogHandler_List_InvalidDateFormats(t *testing.T) {
// TestAuditLogHandler_Get_InternalError tests Get when service returns internal error
func TestAuditLogHandler_Get_InternalError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create a fresh DB and immediately close it to simulate internal error
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})

View File

@@ -126,19 +126,16 @@ func isLocalRequest(c *gin.Context) bool {
}
// setSecureCookie sets an auth cookie with security best practices
// - HttpOnly: prevents JavaScript access (XSS protection)
// - Secure: true for HTTPS; false for local/private network HTTP requests
// - SameSite: Lax for any local/private-network request (regardless of scheme),
// Strict otherwise (public HTTPS only)
// - HttpOnly: prevents JavaScript access (XSS protection)
// - Secure: always true (all major browsers honour Secure on localhost HTTP;
// HTTP-on-private-IP without TLS is an unsupported deployment)
// - SameSite: Lax for any local/private-network request (regardless of scheme),
// Strict otherwise (public HTTPS only)
func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
scheme := requestScheme(c)
secure := true
sameSite := http.SameSiteStrictMode
if scheme != "https" {
sameSite = http.SameSiteLaxMode
if isLocalRequest(c) {
secure = false
}
}
if isLocalRequest(c) {
@@ -149,14 +146,13 @@ func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
domain := ""
c.SetSameSite(sameSite)
// secure is intentionally false for local/private network HTTP requests; always true for external or HTTPS requests.
c.SetCookie( // codeql[go/cookie-secure-not-set]
c.SetCookie(
name, // name
value, // value
maxAge, // maxAge in seconds
"/", // path
domain, // domain (empty = current host)
secure, // secure
true, // secure
true, // httpOnly (no JS access)
)
}

View File

@@ -6,7 +6,6 @@ import (
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/Wikid82/charon/backend/internal/api/middleware"
@@ -45,7 +44,6 @@ func TestAuthHandler_Login(t *testing.T) {
_ = user.SetPassword("password123")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/login", handler.Login)
@@ -65,9 +63,6 @@ func TestAuthHandler_Login(t *testing.T) {
}
func TestSetSecureCookie_HTTPS_Strict(t *testing.T) {
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "https://example.com/login", http.NoBody)
@@ -83,7 +78,6 @@ func TestSetSecureCookie_HTTPS_Strict(t *testing.T) {
func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://192.0.2.10/login", http.NoBody)
@@ -100,7 +94,6 @@ func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://127.0.0.1:8080/login", http.NoBody)
@@ -112,15 +105,12 @@ func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
assert.False(t, cookie.Secure)
assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -139,9 +129,6 @@ func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -160,9 +147,6 @@ func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -182,9 +166,6 @@ func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -204,7 +185,6 @@ func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
func TestSetSecureCookie_HTTP_PrivateIP_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://192.168.1.50:8080/login", http.NoBody)
@@ -216,13 +196,12 @@ func TestSetSecureCookie_HTTP_PrivateIP_Insecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
assert.False(t, cookie.Secure)
assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
func TestSetSecureCookie_HTTP_10Network_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://10.0.0.5:8080/login", http.NoBody)
@@ -234,13 +213,12 @@ func TestSetSecureCookie_HTTP_10Network_Insecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
assert.False(t, cookie.Secure)
assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
func TestSetSecureCookie_HTTP_172Network_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://172.16.0.1:8080/login", http.NoBody)
@@ -252,13 +230,12 @@ func TestSetSecureCookie_HTTP_172Network_Insecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
assert.False(t, cookie.Secure)
assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
func TestSetSecureCookie_HTTPS_PrivateIP_Secure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "https://192.168.1.50:8080/login", http.NoBody)
@@ -276,7 +253,6 @@ func TestSetSecureCookie_HTTPS_PrivateIP_Secure(t *testing.T) {
func TestSetSecureCookie_HTTP_IPv6ULA_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://[fd12::1]:8080/login", http.NoBody)
@@ -288,13 +264,12 @@ func TestSetSecureCookie_HTTP_IPv6ULA_Insecure(t *testing.T) {
cookies := recorder.Result().Cookies()
require.Len(t, cookies, 1)
cookie := cookies[0]
assert.False(t, cookie.Secure)
assert.True(t, cookie.Secure)
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
}
func TestSetSecureCookie_HTTP_PublicIP_Secure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://203.0.113.5:8080/login", http.NoBody)
@@ -322,7 +297,6 @@ func TestIsProduction(t *testing.T) {
}
func TestRequestScheme(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("forwarded proto first value wins", func(t *testing.T) {
recorder := httptest.NewRecorder()
@@ -393,7 +367,6 @@ func TestHostHelpers(t *testing.T) {
}
func TestIsLocalRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("forwarded host list includes localhost", func(t *testing.T) {
recorder := httptest.NewRecorder()
@@ -428,7 +401,6 @@ func TestIsLocalRequest(t *testing.T) {
}
func TestClearSecureCookie(t *testing.T) {
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
ctx.Request = httptest.NewRequest("POST", "http://example.com/logout", http.NoBody)
@@ -439,12 +411,12 @@ func TestClearSecureCookie(t *testing.T) {
require.Len(t, cookies, 1)
assert.Equal(t, "auth_token", cookies[0].Name)
assert.Equal(t, -1, cookies[0].MaxAge)
assert.True(t, cookies[0].Secure)
}
func TestAuthHandler_Login_Errors(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/login", handler.Login)
@@ -472,7 +444,6 @@ func TestAuthHandler_Register(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -496,7 +467,6 @@ func TestAuthHandler_Register_Duplicate(t *testing.T) {
handler, db := setupAuthHandler(t)
db.Create(&models.User{UUID: uuid.NewString(), Email: "dup@example.com", Name: "Dup"})
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -518,7 +488,6 @@ func TestAuthHandler_Logout(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/logout", handler.Logout)
@@ -547,7 +516,6 @@ func TestAuthHandler_Me(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
// Simulate middleware
r.Use(func(c *gin.Context) {
@@ -573,7 +541,6 @@ func TestAuthHandler_Me(t *testing.T) {
func TestAuthHandler_Me_NotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999)) // Non-existent ID
@@ -601,7 +568,6 @@ func TestAuthHandler_ChangePassword(t *testing.T) {
_ = user.SetPassword("oldpassword")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
// Simulate middleware
r.Use(func(c *gin.Context) {
@@ -636,7 +602,6 @@ func TestAuthHandler_ChangePassword_WrongOld(t *testing.T) {
_ = user.SetPassword("correct")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -660,7 +625,6 @@ func TestAuthHandler_ChangePassword_WrongOld(t *testing.T) {
func TestAuthHandler_ChangePassword_Errors(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/change-password", handler.ChangePassword)
@@ -707,7 +671,6 @@ func TestNewAuthHandlerWithDB(t *testing.T) {
func TestAuthHandler_Verify_NoCookie(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -722,7 +685,6 @@ func TestAuthHandler_Verify_NoCookie(t *testing.T) {
func TestAuthHandler_Verify_InvalidToken(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -752,7 +714,6 @@ func TestAuthHandler_Verify_ValidToken(t *testing.T) {
// Generate token
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -782,7 +743,6 @@ func TestAuthHandler_Verify_BearerToken(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -812,7 +772,6 @@ func TestAuthHandler_Verify_DisabledUser(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -852,7 +811,6 @@ func TestAuthHandler_Verify_ForwardAuthDenied(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -868,7 +826,6 @@ func TestAuthHandler_Verify_ForwardAuthDenied(t *testing.T) {
func TestAuthHandler_VerifyStatus_NotAuthenticated(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -885,7 +842,6 @@ func TestAuthHandler_VerifyStatus_NotAuthenticated(t *testing.T) {
func TestAuthHandler_VerifyStatus_InvalidToken(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -916,7 +872,6 @@ func TestAuthHandler_VerifyStatus_Authenticated(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -950,7 +905,6 @@ func TestAuthHandler_VerifyStatus_DisabledUser(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -968,7 +922,6 @@ func TestAuthHandler_VerifyStatus_DisabledUser(t *testing.T) {
func TestAuthHandler_GetAccessibleHosts_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/hosts", handler.GetAccessibleHosts)
@@ -999,7 +952,6 @@ func TestAuthHandler_GetAccessibleHosts_AllowAll(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1036,7 +988,6 @@ func TestAuthHandler_GetAccessibleHosts_DenyAll(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1076,7 +1027,6 @@ func TestAuthHandler_GetAccessibleHosts_PermittedHosts(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1099,7 +1049,6 @@ func TestAuthHandler_GetAccessibleHosts_UserNotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(99999))
@@ -1117,7 +1066,6 @@ func TestAuthHandler_GetAccessibleHosts_UserNotFound(t *testing.T) {
func TestAuthHandler_CheckHostAccess_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/hosts/:hostId/access", handler.CheckHostAccess)
@@ -1135,7 +1083,6 @@ func TestAuthHandler_CheckHostAccess_InvalidHostID(t *testing.T) {
user := &models.User{UUID: uuid.NewString(), Email: "check@example.com", Enabled: true}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1165,7 +1112,6 @@ func TestAuthHandler_CheckHostAccess_Allowed(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1198,7 +1144,6 @@ func TestAuthHandler_CheckHostAccess_Denied(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1275,7 +1220,6 @@ func TestAuthHandler_Me_RequiresUserContext(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/me", handler.Me)
@@ -1359,7 +1303,6 @@ func TestAuthHandler_Refresh(t *testing.T) {
require.NoError(t, user.SetPassword("password123"))
require.NoError(t, db.Create(user).Error)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/refresh", func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1380,7 +1323,6 @@ func TestAuthHandler_Refresh_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/refresh", handler.Refresh)
@@ -1395,7 +1337,6 @@ func TestAuthHandler_Register_BadRequest(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -1411,7 +1352,6 @@ func TestAuthHandler_Logout_InvalidateSessionsFailure(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999999))
@@ -1455,7 +1395,6 @@ func TestAuthHandler_Verify_UsesOriginalHostFallback(t *testing.T) {
token, err := handler.authService.GenerateToken(user)
require.NoError(t, err)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -1473,7 +1412,6 @@ func TestAuthHandler_GetAccessibleHosts_DatabaseUnavailable(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(1))
@@ -1493,7 +1431,6 @@ func TestAuthHandler_CheckHostAccess_DatabaseUnavailable(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(1))
@@ -1513,7 +1450,6 @@ func TestAuthHandler_CheckHostAccess_UserNotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999999))

View File

@@ -16,7 +16,6 @@ import (
)
func TestBackupHandlerSanitizesFilename(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// prepare a fake "database"
dbPath := filepath.Join(tmpDir, "db.sqlite")

View File

@@ -21,7 +21,6 @@ import (
)
func init() {
gin.SetMode(gin.TestMode)
}
// TestCerberusLogsHandler_NewHandler verifies handler creation.

View File

@@ -2,14 +2,18 @@ package handlers
import (
"fmt"
"io"
"net/http"
"strconv"
"sync"
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/logger"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
"github.com/Wikid82/charon/backend/internal/util"
)
@@ -28,9 +32,10 @@ type CertificateHandler struct {
service *services.CertificateService
backupService BackupServiceInterface
notificationService *services.NotificationService
db *gorm.DB
// Rate limiting for notifications
notificationMu sync.Mutex
lastNotificationTime map[uint]time.Time
lastNotificationTime map[string]time.Time
}
func NewCertificateHandler(service *services.CertificateService, backupService BackupServiceInterface, ns *services.NotificationService) *CertificateHandler {
@@ -38,10 +43,18 @@ func NewCertificateHandler(service *services.CertificateService, backupService B
service: service,
backupService: backupService,
notificationService: ns,
lastNotificationTime: make(map[uint]time.Time),
lastNotificationTime: make(map[string]time.Time),
}
}
// SetDB sets the database connection for user lookups (export re-auth).
func (h *CertificateHandler) SetDB(db *gorm.DB) {
h.db = db
}
// maxFileSize is 1MB for certificate file uploads.
const maxFileSize = 1 << 20
func (h *CertificateHandler) List(c *gin.Context) {
certs, err := h.service.ListCertificates()
if err != nil {
@@ -53,34 +66,41 @@ func (h *CertificateHandler) List(c *gin.Context) {
c.JSON(http.StatusOK, certs)
}
type UploadCertificateRequest struct {
Name string `form:"name" binding:"required"`
Certificate string `form:"certificate"` // PEM content
PrivateKey string `form:"private_key"` // PEM content
func (h *CertificateHandler) Get(c *gin.Context) {
certUUID := c.Param("uuid")
if certUUID == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid is required"})
return
}
detail, err := h.service.GetCertificate(certUUID)
if err != nil {
if err == services.ErrCertNotFound {
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
return
}
logger.Log().WithError(err).Error("failed to get certificate")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get certificate"})
return
}
c.JSON(http.StatusOK, detail)
}
func (h *CertificateHandler) Upload(c *gin.Context) {
// Handle multipart form
name := c.PostForm("name")
if name == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "name is required"})
return
}
// Read files
// Read certificate file
certFile, err := c.FormFile("certificate_file")
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "certificate_file is required"})
return
}
keyFile, err := c.FormFile("key_file")
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "key_file is required"})
return
}
// Open and read content
certSrc, err := certFile.Open()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open cert file"})
@@ -92,35 +112,75 @@ func (h *CertificateHandler) Upload(c *gin.Context) {
}
}()
keySrc, err := keyFile.Open()
certBytes, err := io.ReadAll(io.LimitReader(certSrc, maxFileSize))
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open key file"})
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read certificate file"})
return
}
defer func() {
if errClose := keySrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close key file")
certPEM := string(certBytes)
// Read private key file (optional — format detection is content-based in the service)
var keyPEM string
keyFile, err := c.FormFile("key_file")
if err == nil {
keySrc, errOpen := keyFile.Open()
if errOpen != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open key file"})
return
}
}()
defer func() {
if errClose := keySrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close key file")
}
}()
// Read to string
// Limit size to avoid DoS (e.g. 1MB)
certBytes := make([]byte, 1024*1024)
n, _ := certSrc.Read(certBytes)
certPEM := string(certBytes[:n])
keyBytes, errRead := io.ReadAll(io.LimitReader(keySrc, maxFileSize))
if errRead != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read key file"})
return
}
keyPEM = string(keyBytes)
}
keyBytes := make([]byte, 1024*1024)
n, _ = keySrc.Read(keyBytes)
keyPEM := string(keyBytes[:n])
// Read chain file (optional)
var chainPEM string
chainFile, err := c.FormFile("chain_file")
if err == nil {
chainSrc, errOpen := chainFile.Open()
if errOpen != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open chain file"})
return
}
defer func() {
if errClose := chainSrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close chain file")
}
}()
cert, err := h.service.UploadCertificate(name, certPEM, keyPEM)
chainBytes, errRead := io.ReadAll(io.LimitReader(chainSrc, maxFileSize))
if errRead != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read chain file"})
return
}
chainPEM = string(chainBytes)
}
// Require key_file for non-PFX formats (PFX embeds the private key)
if keyPEM == "" {
format := services.DetectFormat(certBytes)
if format != services.FormatPFX {
c.JSON(http.StatusBadRequest, gin.H{"error": "key_file is required for PEM/DER certificate uploads"})
return
}
}
cert, err := h.service.UploadCertificate(name, certPEM, keyPEM, chainPEM)
if err != nil {
logger.Log().WithError(err).Error("failed to upload certificate")
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to upload certificate"})
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Send Notification
if h.notificationService != nil {
h.notificationService.SendExternal(c.Request.Context(),
"cert",
@@ -137,24 +197,255 @@ func (h *CertificateHandler) Upload(c *gin.Context) {
c.JSON(http.StatusCreated, cert)
}
type updateCertificateRequest struct {
Name string `json:"name" binding:"required"`
}
func (h *CertificateHandler) Update(c *gin.Context) {
certUUID := c.Param("uuid")
if certUUID == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid is required"})
return
}
var req updateCertificateRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "name is required"})
return
}
info, err := h.service.UpdateCertificate(certUUID, req.Name)
if err != nil {
if err == services.ErrCertNotFound {
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
return
}
logger.Log().WithError(err).Error("failed to update certificate")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to update certificate"})
return
}
c.JSON(http.StatusOK, info)
}
func (h *CertificateHandler) Validate(c *gin.Context) {
// Read certificate file
certFile, err := c.FormFile("certificate_file")
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "certificate_file is required"})
return
}
certSrc, err := certFile.Open()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open cert file"})
return
}
defer func() {
if errClose := certSrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close certificate file")
}
}()
certBytes, err := io.ReadAll(io.LimitReader(certSrc, maxFileSize))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read certificate file"})
return
}
// Read optional key file
var keyPEM string
keyFile, err := c.FormFile("key_file")
if err == nil {
keySrc, errOpen := keyFile.Open()
if errOpen != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open key file"})
return
}
defer func() {
if errClose := keySrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close key file")
}
}()
keyBytes, errRead := io.ReadAll(io.LimitReader(keySrc, maxFileSize))
if errRead != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read key file"})
return
}
keyPEM = string(keyBytes)
}
// Read optional chain file
var chainPEM string
chainFile, err := c.FormFile("chain_file")
if err == nil {
chainSrc, errOpen := chainFile.Open()
if errOpen != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open chain file"})
return
}
defer func() {
if errClose := chainSrc.Close(); errClose != nil {
logger.Log().WithError(errClose).Warn("failed to close chain file")
}
}()
chainBytes, errRead := io.ReadAll(io.LimitReader(chainSrc, maxFileSize))
if errRead != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read chain file"})
return
}
chainPEM = string(chainBytes)
}
result, err := h.service.ValidateCertificate(string(certBytes), keyPEM, chainPEM)
if err != nil {
logger.Log().WithError(err).Error("failed to validate certificate")
c.JSON(http.StatusBadRequest, gin.H{
"error": "validation failed",
"errors": []string{err.Error()},
})
return
}
c.JSON(http.StatusOK, result)
}
type exportCertificateRequest struct {
Format string `json:"format" binding:"required"`
IncludeKey bool `json:"include_key"`
PFXPassword string `json:"pfx_password"`
Password string `json:"password"`
}
func (h *CertificateHandler) Export(c *gin.Context) {
certUUID := c.Param("uuid")
if certUUID == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid is required"})
return
}
var req exportCertificateRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "format is required"})
return
}
// Re-authenticate when requesting private key
if req.IncludeKey {
if req.Password == "" {
c.JSON(http.StatusForbidden, gin.H{"error": "password required to export private key"})
return
}
userVal, exists := c.Get("user")
if !exists || h.db == nil {
c.JSON(http.StatusForbidden, gin.H{"error": "authentication required"})
return
}
userMap, ok := userVal.(map[string]any)
if !ok {
c.JSON(http.StatusForbidden, gin.H{"error": "invalid session"})
return
}
userID, ok := userMap["id"]
if !ok {
c.JSON(http.StatusForbidden, gin.H{"error": "invalid session"})
return
}
var user models.User
if err := h.db.First(&user, userID).Error; err != nil {
c.JSON(http.StatusForbidden, gin.H{"error": "user not found"})
return
}
if !user.CheckPassword(req.Password) {
c.JSON(http.StatusForbidden, gin.H{"error": "incorrect password"})
return
}
}
data, filename, err := h.service.ExportCertificate(certUUID, req.Format, req.IncludeKey, req.PFXPassword)
if err != nil {
if err == services.ErrCertNotFound {
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
return
}
logger.Log().WithError(fmt.Errorf("%s", util.SanitizeForLog(err.Error()))).Error("failed to export certificate")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to export certificate"})
return
}
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%q", filename))
c.Data(http.StatusOK, "application/octet-stream", data)
}
func (h *CertificateHandler) Delete(c *gin.Context) {
idStr := c.Param("id")
id, err := strconv.ParseUint(idStr, 10, 32)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid id"})
idStr := c.Param("uuid")
// Support both numeric ID (legacy) and UUID
if numID, err := strconv.ParseUint(idStr, 10, 32); err == nil && numID > 0 {
inUse, err := h.service.IsCertificateInUse(uint(numID))
if err != nil {
logger.Log().WithError(err).WithField("certificate_id", numID).Error("failed to check certificate usage")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to check certificate usage"})
return
}
if inUse {
c.JSON(http.StatusConflict, gin.H{"error": "certificate is in use by one or more proxy hosts"})
return
}
if h.backupService != nil {
if availableSpace, err := h.backupService.GetAvailableSpace(); err != nil {
logger.Log().WithError(err).Warn("unable to check disk space, proceeding with backup")
} else if availableSpace < 100*1024*1024 {
logger.Log().WithField("available_bytes", availableSpace).Warn("low disk space, skipping backup")
c.JSON(http.StatusInsufficientStorage, gin.H{"error": "insufficient disk space for backup"})
return
}
if _, err := h.backupService.CreateBackup(); err != nil {
logger.Log().WithError(err).Error("failed to create backup before deletion")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to create backup before deletion"})
return
}
}
if err := h.service.DeleteCertificateByID(uint(numID)); err != nil {
if err == services.ErrCertInUse {
c.JSON(http.StatusConflict, gin.H{"error": "certificate is in use by one or more proxy hosts"})
return
}
logger.Log().WithError(err).WithField("certificate_id", numID).Error("failed to delete certificate")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete certificate"})
return
}
h.sendDeleteNotification(c, fmt.Sprintf("%d", numID))
c.JSON(http.StatusOK, gin.H{"message": "certificate deleted"})
return
}
// Validate ID range
if id == 0 {
// UUID path - parse to validate format and produce a canonical, safe string
parsedUUID, parseErr := uuid.Parse(idStr)
if parseErr != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid id"})
return
}
certUUID := parsedUUID.String()
// Check if certificate is in use before proceeding
inUse, err := h.service.IsCertificateInUse(uint(id))
inUse, err := h.service.IsCertificateInUseByUUID(certUUID)
if err != nil {
logger.Log().WithError(err).WithField("certificate_id", id).Error("failed to check certificate usage")
if err == services.ErrCertNotFound {
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
return
}
logger.Log().WithError(err).WithField("certificate_uuid", util.SanitizeForLog(certUUID)).Error("failed to check certificate usage")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to check certificate usage"})
return
}
@@ -163,13 +454,10 @@ func (h *CertificateHandler) Delete(c *gin.Context) {
return
}
// Create backup before deletion
if h.backupService != nil {
// Check disk space before backup (require at least 100MB free)
if availableSpace, err := h.backupService.GetAvailableSpace(); err != nil {
logger.Log().WithError(err).Warn("unable to check disk space, proceeding with backup")
} else if availableSpace < 100*1024*1024 {
logger.Log().WithField("available_bytes", availableSpace).Warn("low disk space, skipping backup")
c.JSON(http.StatusInsufficientStorage, gin.H{"error": "insufficient disk space for backup"})
return
}
@@ -181,38 +469,62 @@ func (h *CertificateHandler) Delete(c *gin.Context) {
}
}
// Proceed with deletion
if err := h.service.DeleteCertificate(uint(id)); err != nil {
if err := h.service.DeleteCertificate(certUUID); err != nil {
if err == services.ErrCertInUse {
c.JSON(http.StatusConflict, gin.H{"error": "certificate is in use by one or more proxy hosts"})
return
}
logger.Log().WithError(err).WithField("certificate_id", id).Error("failed to delete certificate")
if err == services.ErrCertNotFound {
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
return
}
logger.Log().WithError(err).WithField("certificate_uuid", util.SanitizeForLog(certUUID)).Error("failed to delete certificate")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete certificate"})
return
}
// Send Notification with rate limiting (1 per cert per 10 seconds)
if h.notificationService != nil {
h.notificationMu.Lock()
lastTime, exists := h.lastNotificationTime[uint(id)]
if !exists || time.Since(lastTime) > 10*time.Second {
h.lastNotificationTime[uint(id)] = time.Now()
h.notificationMu.Unlock()
h.notificationService.SendExternal(c.Request.Context(),
"cert",
"Certificate Deleted",
fmt.Sprintf("Certificate ID %d deleted", id),
map[string]any{
"ID": id,
"Action": "deleted",
},
)
} else {
h.notificationMu.Unlock()
logger.Log().WithField("certificate_id", id).Debug("notification rate limited")
}
}
h.sendDeleteNotification(c, certUUID)
c.JSON(http.StatusOK, gin.H{"message": "certificate deleted"})
}
func (h *CertificateHandler) sendDeleteNotification(c *gin.Context, certRef string) {
if h.notificationService == nil {
return
}
// Re-validate to produce a CodeQL-safe value (breaks taint from user input).
// Callers already pass validated data; this is defense-in-depth.
safeRef := sanitizeCertRef(certRef)
h.notificationMu.Lock()
lastTime, exists := h.lastNotificationTime[certRef]
if exists && time.Since(lastTime) < 10*time.Second {
h.notificationMu.Unlock()
logger.Log().WithField("certificate_ref", safeRef).Debug("notification rate limited")
return
}
h.lastNotificationTime[certRef] = time.Now()
h.notificationMu.Unlock()
h.notificationService.SendExternal(c.Request.Context(),
"cert",
"Certificate Deleted",
fmt.Sprintf("Certificate %s deleted", safeRef),
map[string]any{
"Ref": safeRef,
"Action": "deleted",
},
)
}
// sanitizeCertRef re-validates a certificate reference (UUID or numeric ID)
// and returns a safe string representation. Returns a placeholder if invalid.
func sanitizeCertRef(ref string) string {
if parsed, err := uuid.Parse(ref); err == nil {
return parsed.String()
}
if n, err := strconv.ParseUint(ref, 10, 64); err == nil {
return strconv.FormatUint(n, 10)
}
return "[invalid-ref]"
}

View File

@@ -1,12 +1,18 @@
package handlers
import (
"bytes"
"encoding/json"
"mime/multipart"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
@@ -16,10 +22,9 @@ func TestCertificateHandler_List_DBError(t *testing.T) {
db := OpenTestDB(t)
// Don't migrate to cause error
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates", h.List)
@@ -33,12 +38,11 @@ func TestCertificateHandler_List_DBError(t *testing.T) {
func TestCertificateHandler_Delete_InvalidID(t *testing.T) {
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/invalid", http.NoBody)
w := httptest.NewRecorder()
@@ -50,12 +54,11 @@ func TestCertificateHandler_Delete_InvalidID(t *testing.T) {
func TestCertificateHandler_Delete_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/9999", http.NoBody)
w := httptest.NewRecorder()
@@ -71,14 +74,13 @@ func TestCertificateHandler_Delete_NoBackupService(t *testing.T) {
cert := models.SSLCertificate{UUID: "test-cert-no-backup", Name: "no-backup-cert", Provider: "custom", Domains: "nobackup.example.com"}
db.Create(&cert)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// No backup service
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -97,12 +99,11 @@ func TestCertificateHandler_Delete_CheckUsageDBError(t *testing.T) {
cert := models.SSLCertificate{UUID: "test-cert-db-err", Name: "db-error-cert", Provider: "custom", Domains: "dberr.example.com"}
db.Create(&cert)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -118,10 +119,9 @@ func TestCertificateHandler_List_WithCertificates(t *testing.T) {
db.Create(&models.SSLCertificate{UUID: "cert-1", Name: "Cert 1", Provider: "custom", Domains: "one.example.com"})
db.Create(&models.SSLCertificate{UUID: "cert-2", Name: "Cert 2", Provider: "custom", Domains: "two.example.com"})
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates", h.List)
@@ -139,12 +139,11 @@ func TestCertificateHandler_Delete_ZeroID(t *testing.T) {
// DELETE /api/certificates/0 should return 400 Bad Request
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/0", http.NoBody)
w := httptest.NewRecorder()
@@ -173,11 +172,10 @@ func TestCertificateHandler_DBSetupOrdering(t *testing.T) {
t.Fatalf("expected proxy_hosts table to exist before service initialization")
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates", h.List)
@@ -187,3 +185,395 @@ func TestCertificateHandler_DBSetupOrdering(t *testing.T) {
assert.Equal(t, http.StatusOK, w.Code)
}
// --- Get handler tests ---
func TestCertificateHandler_Get_Success(t *testing.T) {
db := OpenTestDBWithMigrations(t)
expiry := time.Now().Add(30 * 24 * time.Hour)
db.Create(&models.SSLCertificate{UUID: "get-uuid-1", Name: "Get Test", Provider: "custom", Domains: "get.example.com", ExpiresAt: &expiry})
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates/:uuid", h.Get)
req := httptest.NewRequest(http.MethodGet, "/api/certificates/get-uuid-1", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "get-uuid-1")
assert.Contains(t, w.Body.String(), "Get Test")
}
func TestCertificateHandler_Get_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates/:uuid", h.Get)
req := httptest.NewRequest(http.MethodGet, "/api/certificates/nonexistent-uuid", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestCertificateHandler_Get_EmptyUUID(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
// Route with empty uuid param won't match, test the handler directly with blank uuid
r.GET("/api/certificates/", h.Get)
req := httptest.NewRequest(http.MethodGet, "/api/certificates/", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
// Empty uuid should return 400 or 404 depending on router handling
assert.True(t, w.Code == http.StatusBadRequest || w.Code == http.StatusNotFound)
}
// --- SetDB test ---
func TestCertificateHandler_SetDB(t *testing.T) {
db := OpenTestDBWithMigrations(t)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
assert.Nil(t, h.db)
h.SetDB(db)
assert.NotNil(t, h.db)
}
// --- Update handler tests ---
func TestCertificateHandler_Update_Success(t *testing.T) {
db := OpenTestDBWithMigrations(t)
expiry := time.Now().Add(30 * 24 * time.Hour)
db.Create(&models.SSLCertificate{UUID: "upd-uuid-1", Name: "Old Name", Provider: "custom", Domains: "update.example.com", ExpiresAt: &expiry})
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.PUT("/api/certificates/:uuid", h.Update)
body, _ := json.Marshal(map[string]string{"name": "New Name"})
req := httptest.NewRequest(http.MethodPut, "/api/certificates/upd-uuid-1", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "New Name")
}
func TestCertificateHandler_Update_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.PUT("/api/certificates/:uuid", h.Update)
body, _ := json.Marshal(map[string]string{"name": "New Name"})
req := httptest.NewRequest(http.MethodPut, "/api/certificates/nonexistent-uuid", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestCertificateHandler_Update_BadJSON(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.PUT("/api/certificates/:uuid", h.Update)
req := httptest.NewRequest(http.MethodPut, "/api/certificates/some-uuid", strings.NewReader("{invalid"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
func TestCertificateHandler_Update_MissingName(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.PUT("/api/certificates/:uuid", h.Update)
body, _ := json.Marshal(map[string]string{})
req := httptest.NewRequest(http.MethodPut, "/api/certificates/some-uuid", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
// --- Validate handler tests ---
func TestCertificateHandler_Validate_Success(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/validate", h.Validate)
certPEM, keyPEM, err := generateSelfSignedCertPEM()
require.NoError(t, err)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte(certPEM))
part2, _ := writer.CreateFormFile("key_file", "key.pem")
_, _ = part2.Write([]byte(keyPEM))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "valid")
}
func TestCertificateHandler_Validate_NoCertFile(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/validate", h.Validate)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", strings.NewReader(""))
req.Header.Set("Content-Type", "multipart/form-data")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
func TestCertificateHandler_Validate_CertOnly(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/validate", h.Validate)
certPEM, _, err := generateSelfSignedCertPEM()
require.NoError(t, err)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte(certPEM))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
}
// --- Export handler tests ---
func TestCertificateHandler_Export_EmptyUUID(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/:uuid/export", h.Export)
body, _ := json.Marshal(map[string]any{"format": "pem"})
// Use a route that provides :uuid param as empty would not match normal routing
req := httptest.NewRequest(http.MethodPost, "/api/certificates//export", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
// Router won't match empty uuid, so 404 or redirect
assert.True(t, w.Code == http.StatusNotFound || w.Code == http.StatusMovedPermanently || w.Code == http.StatusBadRequest)
}
func TestCertificateHandler_Export_BadJSON(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/:uuid/export", h.Export)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/some-uuid/export", strings.NewReader("{bad"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
func TestCertificateHandler_Export_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/:uuid/export", h.Export)
body, _ := json.Marshal(map[string]any{"format": "pem"})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/nonexistent-uuid/export", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestCertificateHandler_Export_PEMSuccess(t *testing.T) {
db := OpenTestDBWithMigrations(t)
certPEM, _, err := generateSelfSignedCertPEM()
require.NoError(t, err)
cert := models.SSLCertificate{UUID: "export-uuid-1", Name: "Export Test", Provider: "custom", Domains: "export.example.com", Certificate: certPEM}
db.Create(&cert)
r := gin.New()
r.Use(mockAuthMiddleware())
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/:uuid/export", h.Export)
body, _ := json.Marshal(map[string]any{"format": "pem"})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/export-uuid-1/export", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Header().Get("Content-Disposition"), "Export Test.pem")
}
func TestCertificateHandler_Export_IncludeKeyNoPassword(t *testing.T) {
db := OpenTestDBWithMigrations(t)
cert := models.SSLCertificate{UUID: "export-uuid-2", Name: "Key Test", Provider: "custom", Domains: "key.example.com"}
db.Create(&cert)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates/:uuid/export", h.Export)
body, _ := json.Marshal(map[string]any{"format": "pem", "include_key": true})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/export-uuid-2/export", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "password required")
}
func TestCertificateHandler_Export_IncludeKeyNoDBSet(t *testing.T) {
db := OpenTestDBWithMigrations(t)
cert := models.SSLCertificate{UUID: "export-uuid-3", Name: "No DB Test", Provider: "custom", Domains: "nodb.example.com"}
db.Create(&cert)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
// h.db is nil - not set via SetDB
r.POST("/api/certificates/:uuid/export", h.Export)
body, _ := json.Marshal(map[string]any{"format": "pem", "include_key": true, "password": "test123"})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/export-uuid-3/export", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "authentication required")
}
// --- Delete via UUID path tests ---
func TestCertificateHandler_Delete_UUIDPath_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:uuid", h.Delete)
// Valid UUID format but does not exist
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/00000000-0000-0000-0000-000000000001", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestCertificateHandler_Delete_UUIDPath_InUse(t *testing.T) {
db := OpenTestDBWithMigrations(t)
cert := models.SSLCertificate{UUID: "11111111-1111-1111-1111-111111111111", Name: "InUse UUID", Provider: "custom", Domains: "uuid-inuse.example.com"}
db.Create(&cert)
ph := models.ProxyHost{UUID: "ph-uuid-del", Name: "Proxy", DomainNames: "uuid-inuse.example.com", ForwardHost: "localhost", ForwardPort: 8080, CertificateID: &cert.ID}
db.Create(&ph)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/11111111-1111-1111-1111-111111111111", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusConflict, w.Code)
}
// --- sanitizeCertRef tests ---
func TestSanitizeCertRef(t *testing.T) {
assert.Equal(t, "00000000-0000-0000-0000-000000000001", sanitizeCertRef("00000000-0000-0000-0000-000000000001"))
assert.Equal(t, "123", sanitizeCertRef("123"))
assert.Equal(t, "[invalid-ref]", sanitizeCertRef("not-valid"))
assert.Equal(t, "0", sanitizeCertRef("0"))
}

View File

@@ -0,0 +1,707 @@
package handlers
import (
"bytes"
"encoding/json"
"fmt"
"mime/multipart"
"net/http"
"net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
)
// --- Delete UUID path with backup service ---
func TestDelete_UUID_WithBackup_Success(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
db.Create(&models.SSLCertificate{UUID: certUUID, Name: "backup-uuid", Provider: "custom", Domains: "backup.test"})
svc := services.NewCertificateService(tmpDir, db, nil)
mock := &mockBackupService{
createFunc: func() (string, error) { return "/tmp/backup.tar.gz", nil },
availableSpaceFunc: func() (int64, error) { return 1024 * 1024 * 1024, nil },
}
h := NewCertificateHandler(svc, mock, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
}
func TestDelete_UUID_NotFound(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
nonExistentUUID := uuid.New().String()
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+nonExistentUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestDelete_UUID_InUse(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
cert := models.SSLCertificate{UUID: certUUID, Name: "inuse-uuid", Provider: "custom", Domains: "inuse.test"}
db.Create(&cert)
db.Create(&models.ProxyHost{UUID: "ph-uuid-inuse", Name: "ph", DomainNames: "inuse.test", ForwardHost: "localhost", ForwardPort: 8080, CertificateID: &cert.ID})
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusConflict, w.Code)
}
func TestDelete_UUID_BackupLowSpace(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
db.Create(&models.SSLCertificate{UUID: certUUID, Name: "low-space", Provider: "custom", Domains: "lowspace.test"})
svc := services.NewCertificateService(tmpDir, db, nil)
mock := &mockBackupService{
availableSpaceFunc: func() (int64, error) { return 1024, nil }, // 1KB - too low
}
h := NewCertificateHandler(svc, mock, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInsufficientStorage, w.Code)
}
func TestDelete_UUID_BackupSpaceCheckError(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
db.Create(&models.SSLCertificate{UUID: certUUID, Name: "space-err", Provider: "custom", Domains: "spaceerr.test"})
svc := services.NewCertificateService(tmpDir, db, nil)
mock := &mockBackupService{
availableSpaceFunc: func() (int64, error) { return 0, fmt.Errorf("disk error") },
createFunc: func() (string, error) { return "/tmp/backup.tar.gz", nil },
}
h := NewCertificateHandler(svc, mock, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
// Space check error → proceeds with backup → succeeds
assert.Equal(t, http.StatusOK, w.Code)
}
func TestDelete_UUID_BackupCreateError(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
db.Create(&models.SSLCertificate{UUID: certUUID, Name: "backup-fail", Provider: "custom", Domains: "backupfail.test"})
svc := services.NewCertificateService(tmpDir, db, nil)
mock := &mockBackupService{
availableSpaceFunc: func() (int64, error) { return 1024 * 1024 * 1024, nil },
createFunc: func() (string, error) { return "", fmt.Errorf("backup creation failed") },
}
h := NewCertificateHandler(svc, mock, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
// --- Delete UUID with notification service ---
func TestDelete_UUID_WithNotification(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.Setting{}, &models.Notification{}, &models.NotificationProvider{}))
certUUID := uuid.New().String()
db.Create(&models.SSLCertificate{UUID: certUUID, Name: "notify-cert", Provider: "custom", Domains: "notify.test"})
svc := services.NewCertificateService(tmpDir, db, nil)
notifSvc := services.NewNotificationService(db, nil)
h := NewCertificateHandler(svc, nil, notifSvc)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
}
// --- Validate handler ---
func TestValidate_Success(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
certPEM, _, err := generateSelfSignedCertPEM()
require.NoError(t, err)
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile("certificate_file", "cert.pem")
require.NoError(t, err)
_, err = part.Write([]byte(certPEM))
require.NoError(t, err)
require.NoError(t, writer.Close())
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
}
func TestValidate_InvalidCert(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile("certificate_file", "cert.pem")
require.NoError(t, err)
_, err = part.Write([]byte("not a certificate"))
require.NoError(t, err)
require.NoError(t, writer.Close())
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "unrecognized certificate format")
}
func TestValidate_NoCertFile(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", http.NoBody)
req.Header.Set("Content-Type", "multipart/form-data; boundary=boundary")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
func TestValidate_WithKeyAndChain(t *testing.T) {
tmpDir := t.TempDir()
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
certPEM, keyPEM, err := generateSelfSignedCertPEM()
require.NoError(t, err)
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
certPart, err := writer.CreateFormFile("certificate_file", "cert.pem")
require.NoError(t, err)
_, err = certPart.Write([]byte(certPEM))
require.NoError(t, err)
keyPart, err := writer.CreateFormFile("key_file", "key.pem")
require.NoError(t, err)
_, err = keyPart.Write([]byte(keyPEM))
require.NoError(t, err)
chainPart, err := writer.CreateFormFile("chain_file", "chain.pem")
require.NoError(t, err)
_, err = chainPart.Write([]byte(certPEM)) // self-signed chain
require.NoError(t, err)
require.NoError(t, writer.Close())
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
}
// --- Get handler DB error (non-NotFound) ---
func TestGet_DBError(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
// Deliberately don't migrate - any query will fail with "no such table"
svc := services.NewCertificateService(t.TempDir(), db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.GET("/api/certificates/:uuid", h.Get)
req := httptest.NewRequest(http.MethodGet, "/api/certificates/"+uuid.New().String(), http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
// Should be 500 since the table doesn't exist
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
// --- Export handler: re-auth and service error paths ---
func TestExport_IncludeKey_MissingPassword(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_IncludeKey_NoUserContext(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New() // no middleware — "user" key absent
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true,"password":"somepass"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_IncludeKey_InvalidClaimsType(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
r.Use(func(c *gin.Context) { c.Set("user", "not-a-map"); c.Next() })
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true,"password":"somepass"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_IncludeKey_UserIDNotInClaims(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
r.Use(func(c *gin.Context) { c.Set("user", map[string]any{}); c.Next() }) // no "id" key
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true,"password":"somepass"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_IncludeKey_UserNotFoundInDB(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
r.Use(func(c *gin.Context) { c.Set("user", map[string]any{"id": float64(9999)}); c.Next() })
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true,"password":"somepass"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_IncludeKey_WrongPassword(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
u := &models.User{UUID: uuid.New().String(), Email: "export@example.com", Name: "Export User"}
require.NoError(t, u.SetPassword("correctpass"))
require.NoError(t, db.Create(u).Error)
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
r.Use(func(c *gin.Context) { c.Set("user", map[string]any{"id": float64(u.ID)}); c.Next() })
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem","include_key":true,"password":"wrongpass"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestExport_CertNotFound(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"pem"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+uuid.New().String()+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusNotFound, w.Code)
}
func TestExport_ServiceError(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
certUUID := uuid.New().String()
cert := models.SSLCertificate{UUID: certUUID, Name: "test", Domains: "test.example.com", Provider: "custom"}
require.NoError(t, db.Create(&cert).Error)
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/:uuid/export", h.Export)
body := bytes.NewBufferString(`{"format":"unsupported_xyz"}`)
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+certUUID+"/export", body)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
// --- Delete numeric ID paths ---
func TestDelete_NumericID_UsageCheckError(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{})) // no ProxyHost → IsCertificateInUse fails
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/1", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
func TestDelete_NumericID_LowDiskSpace(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
cert := models.SSLCertificate{UUID: uuid.New().String(), Name: "low-space", Domains: "lowspace.example.com", Provider: "custom"}
require.NoError(t, db.Create(&cert).Error)
svc := services.NewCertificateService(tmpDir, db, nil)
backup := &mockBackupService{
availableSpaceFunc: func() (int64, error) { return 1024, nil }, // < 100 MB
createFunc: func() (string, error) { return "", nil },
}
h := NewCertificateHandler(svc, backup, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/certificates/%d", cert.ID), http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInsufficientStorage, w.Code)
}
func TestDelete_NumericID_BackupError(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
cert := models.SSLCertificate{UUID: uuid.New().String(), Name: "backup-err", Domains: "backuperr.example.com", Provider: "custom"}
require.NoError(t, db.Create(&cert).Error)
svc := services.NewCertificateService(tmpDir, db, nil)
backup := &mockBackupService{
availableSpaceFunc: func() (int64, error) { return 1 << 30, nil }, // 1 GB — plenty
createFunc: func() (string, error) { return "", fmt.Errorf("backup create failed") },
}
h := NewCertificateHandler(svc, backup, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/certificates/%d", cert.ID), http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
func TestDelete_NumericID_DeleteError(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.ProxyHost{})) // no SSLCertificate → DeleteCertificateByID fails
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/42", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
// --- Delete UUID: internal usage-check error ---
func TestDelete_UUID_UsageCheckInternalError(t *testing.T) {
tmpDir := t.TempDir()
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{})) // no ProxyHost → IsCertificateInUse fails
certUUID := uuid.New().String()
cert := models.SSLCertificate{UUID: certUUID, Name: "uuid-err", Domains: "uuiderr.example.com", Provider: "custom"}
require.NoError(t, db.Create(&cert).Error)
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+certUUID, http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}
// --- sendDeleteNotification: rate limit ---
func TestSendDeleteNotification_RateLimit(t *testing.T) {
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
ns := services.NewNotificationService(db, nil)
svc := services.NewCertificateService(t.TempDir(), db, nil)
h := NewCertificateHandler(svc, nil, ns)
w := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(w)
ctx.Request = httptest.NewRequest(http.MethodDelete, "/", http.NoBody)
certRef := uuid.New().String()
h.sendDeleteNotification(ctx, certRef) // first call — sets timestamp
h.sendDeleteNotification(ctx, certRef) // second call — hits rate limit branch
}
// --- Update: empty UUID param (lines 207-209) ---
func TestUpdate_EmptyUUID(t *testing.T) {
svc := services.NewCertificateService(t.TempDir(), nil, nil)
h := NewCertificateHandler(svc, nil, nil)
w := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(w)
ctx.Request = httptest.NewRequest(http.MethodPut, "/api/certificates/", bytes.NewBufferString(`{"name":"test"}`))
ctx.Request.Header.Set("Content-Type", "application/json")
// No Params set — c.Param("uuid") returns ""
h.Update(ctx)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
// --- Update: DB error (non-ErrCertNotFound) → lines 223-225 ---
func TestUpdate_DBError(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
// Deliberately no AutoMigrate → ssl_certificates table absent → "no such table" error
svc := services.NewCertificateService(t.TempDir(), db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.PUT("/api/certificates/:uuid", h.Update)
body, _ := json.Marshal(map[string]string{"name": "new-name"})
req := httptest.NewRequest(http.MethodPut, "/api/certificates/"+uuid.New().String(), bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
}

View File

@@ -25,15 +25,14 @@ func TestCertificateHandler_Delete_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
})
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/1", http.NoBody)
w := httptest.NewRecorder()
@@ -55,13 +54,12 @@ func TestCertificateHandler_List_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
})
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates", h.List)
@@ -85,13 +83,12 @@ func TestCertificateHandler_Upload_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
})
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
@@ -126,10 +123,9 @@ func TestCertificateHandler_Delete_DiskSpaceCheck(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock backup service that reports low disk space
mockBackup := &mockBackupService{
@@ -139,7 +135,7 @@ func TestCertificateHandler_Delete_DiskSpaceCheck(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackup, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/certificates/%d", cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -179,10 +175,9 @@ func TestCertificateHandler_Delete_NotificationRateLimiting(t *testing.T) {
t.Fatalf("failed to create cert2: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
mockBackup := &mockBackupService{
createFunc: func() (string, error) {
@@ -191,7 +186,7 @@ func TestCertificateHandler_Delete_NotificationRateLimiting(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackup, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
// Delete first cert
req1 := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/certificates/%d", cert1.ID), http.NoBody)

View File

@@ -36,13 +36,12 @@ func mockAuthMiddleware() gin.HandlerFunc {
func setupCertTestRouter(t *testing.T, db *gorm.DB) *gin.Engine {
t.Helper()
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
return r
}
@@ -110,10 +109,9 @@ func TestDeleteCertificate_CreatesBackup(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock BackupService
backupCalled := false
@@ -125,7 +123,7 @@ func TestDeleteCertificate_CreatesBackup(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -164,10 +162,9 @@ func TestDeleteCertificate_BackupFailure(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock BackupService that fails
mockBackupService := &mockBackupService{
@@ -177,7 +174,7 @@ func TestDeleteCertificate_BackupFailure(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -218,10 +215,9 @@ func TestDeleteCertificate_InUse_NoBackup(t *testing.T) {
t.Fatalf("failed to create proxy host: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock BackupService
backupCalled := false
@@ -233,7 +229,7 @@ func TestDeleteCertificate_InUse_NoBackup(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -296,11 +292,10 @@ func TestCertificateHandler_List(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.GET("/api/certificates", h.List)
@@ -324,10 +319,9 @@ func TestCertificateHandler_Upload_MissingName(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
@@ -352,10 +346,9 @@ func TestCertificateHandler_Upload_MissingCertFile(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
@@ -383,10 +376,9 @@ func TestCertificateHandler_Upload_MissingKeyFile(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
@@ -410,13 +402,17 @@ func TestCertificateHandler_Upload_MissingKeyFile_MultipartWithCert(t *testing.T
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
certPEM, _, genErr := generateSelfSignedCertPEM()
if genErr != nil {
t.Fatalf("failed to generate self-signed cert: %v", genErr)
}
var body bytes.Buffer
writer := multipart.NewWriter(&body)
_ = writer.WriteField("name", "testcert")
@@ -424,7 +420,7 @@ func TestCertificateHandler_Upload_MissingKeyFile_MultipartWithCert(t *testing.T
if createErr != nil {
t.Fatalf("failed to create form file: %v", createErr)
}
_, _ = part.Write([]byte("-----BEGIN CERTIFICATE-----\nMIIB\n-----END CERTIFICATE-----"))
_, _ = part.Write([]byte(certPEM))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates", &body)
@@ -435,7 +431,7 @@ func TestCertificateHandler_Upload_MissingKeyFile_MultipartWithCert(t *testing.T
if w.Code != http.StatusBadRequest {
t.Fatalf("expected 400 Bad Request, got %d, body=%s", w.Code, w.Body.String())
}
if !strings.Contains(w.Body.String(), "key_file") {
if !strings.Contains(w.Body.String(), "key_file is required") {
t.Fatalf("expected error message about key_file, got: %s", w.Body.String())
}
}
@@ -450,14 +446,13 @@ func TestCertificateHandler_Upload_Success(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
// Create a mock CertificateService that returns a created certificate
// Create a temporary services.CertificateService with a temp dir and DB
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db)
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
@@ -525,12 +520,11 @@ func TestCertificateHandler_Upload_WithNotificationService(t *testing.T) {
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.Setting{}, &models.NotificationProvider{}))
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db)
svc := services.NewCertificateService(tmpDir, db, nil)
ns := services.NewNotificationService(db, nil)
h := NewCertificateHandler(svc, nil, ns)
r.POST("/api/certificates", h.Upload)
@@ -564,12 +558,11 @@ func TestDeleteCertificate_InvalidID(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/invalid", http.NoBody)
w := httptest.NewRecorder()
@@ -590,12 +583,11 @@ func TestDeleteCertificate_ZeroID(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/0", http.NoBody)
w := httptest.NewRecorder()
@@ -622,10 +614,9 @@ func TestDeleteCertificate_LowDiskSpace(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock BackupService with low disk space
mockBackupService := &mockBackupService{
@@ -635,7 +626,7 @@ func TestDeleteCertificate_LowDiskSpace(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -671,10 +662,9 @@ func TestDeleteCertificate_DiskSpaceCheckError(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
// Mock BackupService with space check error but backup succeeds
mockBackupService := &mockBackupService{
@@ -687,7 +677,7 @@ func TestDeleteCertificate_DiskSpaceCheckError(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -699,6 +689,122 @@ func TestDeleteCertificate_DiskSpaceCheckError(t *testing.T) {
}
}
// Test that an expired Let's Encrypt certificate not in use can be deleted.
// The backend has no provider-based restrictions; deletion policy is frontend-only.
func TestDeleteCertificate_ExpiredLetsEncrypt_NotInUse(t *testing.T) {
dbPath := t.TempDir() + "/cert_expired_le.db"
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?_journal_mode=WAL&_busy_timeout=5000&_foreign_keys=1", dbPath)), &gorm.Config{})
if err != nil {
t.Fatalf("failed to open db: %v", err)
}
sqlDB, err := db.DB()
if err != nil {
t.Fatalf("failed to access sql db: %v", err)
}
sqlDB.SetMaxOpenConns(1)
sqlDB.SetMaxIdleConns(1)
if err = db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}); err != nil {
t.Fatalf("failed to migrate: %v", err)
}
expired := time.Now().Add(-24 * time.Hour)
cert := models.SSLCertificate{
UUID: "expired-le-cert",
Name: "expired-le",
Provider: "letsencrypt",
Domains: "expired.example.com",
ExpiresAt: &expired,
}
if err = db.Create(&cert).Error; err != nil {
t.Fatalf("failed to create cert: %v", err)
}
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
mockBS := &mockBackupService{
createFunc: func() (string, error) {
return "backup-expired-le.tar.gz", nil
},
}
h := NewCertificateHandler(svc, mockBS, nil)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
if w.Code != http.StatusOK {
t.Fatalf("expected 200 OK, got %d, body=%s", w.Code, w.Body.String())
}
var found models.SSLCertificate
if err = db.First(&found, cert.ID).Error; err == nil {
t.Fatal("expected expired LE certificate to be deleted")
}
}
// Test that a valid (non-expired) Let's Encrypt certificate not in use can be deleted.
// Confirms the backend imposes no provider-based restrictions on deletion.
func TestDeleteCertificate_ValidLetsEncrypt_NotInUse(t *testing.T) {
dbPath := t.TempDir() + "/cert_valid_le.db"
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?_journal_mode=WAL&_busy_timeout=5000&_foreign_keys=1", dbPath)), &gorm.Config{})
if err != nil {
t.Fatalf("failed to open db: %v", err)
}
sqlDB, err := db.DB()
if err != nil {
t.Fatalf("failed to access sql db: %v", err)
}
sqlDB.SetMaxOpenConns(1)
sqlDB.SetMaxIdleConns(1)
if err = db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}); err != nil {
t.Fatalf("failed to migrate: %v", err)
}
future := time.Now().Add(30 * 24 * time.Hour)
cert := models.SSLCertificate{
UUID: "valid-le-cert",
Name: "valid-le",
Provider: "letsencrypt",
Domains: "valid.example.com",
ExpiresAt: &future,
}
if err = db.Create(&cert).Error; err != nil {
t.Fatalf("failed to create cert: %v", err)
}
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db, nil)
mockBS := &mockBackupService{
createFunc: func() (string, error) {
return "backup-valid-le.tar.gz", nil
},
}
h := NewCertificateHandler(svc, mockBS, nil)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
if w.Code != http.StatusOK {
t.Fatalf("expected 200 OK, got %d, body=%s", w.Code, w.Body.String())
}
var found models.SSLCertificate
if err = db.First(&found, cert.ID).Error; err == nil {
t.Fatal("expected valid LE certificate to be deleted")
}
}
// Test Delete when IsCertificateInUse fails
func TestDeleteCertificate_UsageCheckError(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
@@ -717,12 +823,11 @@ func TestDeleteCertificate_UsageCheckError(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
h := NewCertificateHandler(svc, nil, nil)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert.ID), http.NoBody)
w := httptest.NewRecorder()
@@ -755,10 +860,9 @@ func TestDeleteCertificate_NotificationRateLimit(t *testing.T) {
t.Fatalf("failed to create cert2: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
svc := services.NewCertificateService("/tmp", db, nil)
ns := services.NewNotificationService(db, nil)
mockBackupService := &mockBackupService{
@@ -768,7 +872,7 @@ func TestDeleteCertificate_NotificationRateLimit(t *testing.T) {
}
h := NewCertificateHandler(svc, mockBackupService, ns)
r.DELETE("/api/certificates/:id", h.Delete)
r.DELETE("/api/certificates/:uuid", h.Delete)
// Delete first certificate
req := httptest.NewRequest(http.MethodDelete, "/api/certificates/"+toStr(cert1.ID), http.NoBody)

View File

@@ -0,0 +1,382 @@
package handlers
import (
"bytes"
"encoding/base64"
"encoding/json"
"fmt"
"mime/multipart"
"net/http"
"net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/crypto"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
)
// --- Upload: with chain file (covers chain_file multipart branch) ---
func TestCertificateHandler_Upload_WithChainFile(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates", h.Upload)
certPEM, keyPEM, err := generateSelfSignedCertPEM()
require.NoError(t, err)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
_ = writer.WriteField("name", "chain-cert")
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte(certPEM))
part2, _ := writer.CreateFormFile("key_file", "key.pem")
_, _ = part2.Write([]byte(keyPEM))
part3, _ := writer.CreateFormFile("chain_file", "chain.pem")
_, _ = part3.Write([]byte(certPEM))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusCreated, w.Code, "body: %s", w.Body.String())
}
// --- Upload: invalid cert data ---
func TestCertificateHandler_Upload_InvalidCertData(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates", h.Upload)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
_ = writer.WriteField("name", "bad-cert")
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte("not-a-cert"))
part2, _ := writer.CreateFormFile("key_file", "key.pem")
_, _ = part2.Write([]byte("not-a-key"))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
}
// --- Export re-authentication flow ---
func setupExportRouter(t *testing.T, db *gorm.DB) (*gin.Engine, *CertificateHandler) {
t.Helper()
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
r := gin.New()
return r, h
}
func newTestEncSvc(t *testing.T) *crypto.EncryptionService {
t.Helper()
key := make([]byte, 32)
for i := range key {
key[i] = byte(i)
}
svc, err := crypto.NewEncryptionService(base64.StdEncoding.EncodeToString(key))
require.NoError(t, err)
return svc
}
func TestCertificateHandler_Export_IncludeKeySuccess(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
user := models.User{UUID: "export-user-1", Email: "export@test.com", Name: "Exporter"}
require.NoError(t, user.SetPassword("correctpassword"))
require.NoError(t, db.Create(&user).Error)
encSvc := newTestEncSvc(t)
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, encSvc)
h := NewCertificateHandler(svc, nil, nil)
h.SetDB(db)
certPEM, keyPEM, err := generateSelfSignedCertPEM()
require.NoError(t, err)
info, err := svc.UploadCertificate("export-cert", certPEM, keyPEM, "")
require.NoError(t, err)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("user", map[string]any{"id": user.ID})
c.Next()
})
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "correctpassword",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/"+info.UUID+"/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code, "body: %s", w.Body.String())
assert.Contains(t, w.Header().Get("Content-Disposition"), "export-cert.pem")
}
func TestCertificateHandler_Export_IncludeKeyWrongPassword(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
r, h := setupExportRouter(t, db)
user := models.User{UUID: "wrong-pw-user", Email: "wrong@test.com", Name: "Wrong"}
require.NoError(t, user.SetPassword("rightpass"))
require.NoError(t, db.Create(&user).Error)
r.Use(func(c *gin.Context) {
c.Set("user", map[string]any{"id": user.ID})
c.Next()
})
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "wrongpass",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/fake-uuid/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "incorrect password")
}
func TestCertificateHandler_Export_NoUserInContext(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
r, h := setupExportRouter(t, db)
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "anything",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/fake-uuid/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "authentication required")
}
func TestCertificateHandler_Export_InvalidSession(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
r, h := setupExportRouter(t, db)
r.Use(func(c *gin.Context) {
c.Set("user", "not-a-map")
c.Next()
})
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "anything",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/fake-uuid/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "invalid session")
}
func TestCertificateHandler_Export_MissingUserID(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
r, h := setupExportRouter(t, db)
r.Use(func(c *gin.Context) {
c.Set("user", map[string]any{"name": "test"})
c.Next()
})
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "anything",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/fake-uuid/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "invalid session")
}
func TestCertificateHandler_Export_UserNotFound(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.User{}))
r, h := setupExportRouter(t, db)
r.Use(func(c *gin.Context) {
c.Set("user", map[string]any{"id": uint(9999)})
c.Next()
})
r.POST("/api/certificates/:uuid/export", h.Export)
payload, _ := json.Marshal(map[string]any{
"format": "pem",
"include_key": true,
"password": "anything",
})
req := httptest.NewRequest(http.MethodPost, "/api/certificates/fake-uuid/export", bytes.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
assert.Contains(t, w.Body.String(), "user not found")
}
// --- Validate handler with key and chain ---
func TestCertificateHandler_Validate_WithKeyAndChain(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
certPEM, keyPEM, err := generateSelfSignedCertPEM()
require.NoError(t, err)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte(certPEM))
part2, _ := writer.CreateFormFile("key_file", "key.pem")
_, _ = part2.Write([]byte(keyPEM))
part3, _ := writer.CreateFormFile("chain_file", "chain.pem")
_, _ = part3.Write([]byte(certPEM))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code, "body: %s", w.Body.String())
}
func TestCertificateHandler_Validate_InvalidCert(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
part, _ := writer.CreateFormFile("certificate_file", "cert.pem")
_, _ = part.Write([]byte("not-a-cert"))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
errList, ok := resp["errors"].([]any)
assert.True(t, ok)
assert.Greater(t, len(errList), 0, "expected validation errors in response")
}
func TestCertificateHandler_Validate_MissingCertFile(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
tmpDir := t.TempDir()
svc := services.NewCertificateService(tmpDir, db, nil)
h := NewCertificateHandler(svc, nil, nil)
r := gin.New()
r.Use(mockAuthMiddleware())
r.POST("/api/certificates/validate", h.Validate)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
_ = writer.WriteField("name", "test")
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates/validate", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "certificate_file is required")
}

View File

@@ -129,7 +129,6 @@ func Test_mapCrowdsecStatus(t *testing.T) {
// Test actorFromContext helper function
func Test_actorFromContext(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("with userID in context", func(t *testing.T) {
c, _ := gin.CreateTestContext(httptest.NewRecorder())
@@ -157,7 +156,6 @@ func Test_actorFromContext(t *testing.T) {
// Test hubEndpoints helper function
func Test_hubEndpoints(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("nil Hub returns nil", func(t *testing.T) {
h := &CrowdsecHandler{Hub: nil}
@@ -193,7 +191,6 @@ func TestRealCommandExecutor_Execute(t *testing.T) {
// Test isCerberusEnabled helper
func Test_isCerberusEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}))
@@ -243,7 +240,6 @@ func Test_isCerberusEnabled(t *testing.T) {
// Test isConsoleEnrollmentEnabled helper
func Test_isConsoleEnrollmentEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}))
@@ -293,7 +289,6 @@ func Test_isConsoleEnrollmentEnabled(t *testing.T) {
// Test CrowdsecHandler.ExportConfig
func TestCrowdsecHandler_ExportConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -320,7 +315,6 @@ func TestCrowdsecHandler_ExportConfig(t *testing.T) {
// Test CrowdsecHandler.CheckLAPIHealth
func TestCrowdsecHandler_CheckLAPIHealth(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -340,7 +334,6 @@ func TestCrowdsecHandler_CheckLAPIHealth(t *testing.T) {
// Test CrowdsecHandler Console endpoints
func TestCrowdsecHandler_ConsoleStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -362,7 +355,6 @@ func TestCrowdsecHandler_ConsoleStatus(t *testing.T) {
}
func TestCrowdsecHandler_ConsoleEnroll_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -385,7 +377,6 @@ func TestCrowdsecHandler_ConsoleEnroll_Disabled(t *testing.T) {
}
func TestCrowdsecHandler_DeleteConsoleEnrollment(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -405,7 +396,6 @@ func TestCrowdsecHandler_DeleteConsoleEnrollment(t *testing.T) {
// Test CrowdsecHandler.BanIP and UnbanIP
func TestCrowdsecHandler_BanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -434,7 +424,6 @@ func TestCrowdsecHandler_BanIP(t *testing.T) {
}
func TestCrowdsecHandler_UnbanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -460,7 +449,6 @@ func TestCrowdsecHandler_UnbanIP(t *testing.T) {
// Test CrowdsecHandler.UpdateAcquisitionConfig
func TestCrowdsecHandler_UpdateAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -540,7 +528,6 @@ func Test_safeFloat64ToUint(t *testing.T) {
// Test CrowdsecHandler_DiagnosticsConnectivity
func TestCrowdsecHandler_DiagnosticsConnectivity(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -569,7 +556,6 @@ func TestCrowdsecHandler_DiagnosticsConnectivity(t *testing.T) {
// Test CrowdsecHandler_DiagnosticsConfig
func TestCrowdsecHandler_DiagnosticsConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -595,7 +581,6 @@ func TestCrowdsecHandler_DiagnosticsConfig(t *testing.T) {
// Test CrowdsecHandler_ConsoleHeartbeat
func TestCrowdsecHandler_ConsoleHeartbeat(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -623,7 +608,6 @@ func TestCrowdsecHandler_ConsoleHeartbeat(t *testing.T) {
// Test CrowdsecHandler_ConsoleHeartbeat_Disabled
func TestCrowdsecHandler_ConsoleHeartbeat_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))

View File

@@ -33,7 +33,6 @@ func createValidSQLiteDB(t *testing.T, dbPath string) error {
// Use a real BackupService, but point it at tmpDir for isolation
func TestBackupHandlerQuick(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create a valid SQLite database for backup operations
dbPath := filepath.Join(tmpDir, "db.sqlite")

View File

@@ -31,7 +31,6 @@ func setupCredentialHandlerTest(t *testing.T) (*gin.Engine, *gorm.DB, *models.DN
_ = os.Unsetenv("CHARON_ENCRYPTION_KEY")
})
gin.SetMode(gin.TestMode)
router := gin.New()
// Use test name for unique database with WAL mode to avoid locking issues

View File

@@ -251,7 +251,6 @@ func TestConfigArchiveValidator_RequiredFiles(t *testing.T) {
// TestImportConfig_Validation tests the enhanced ImportConfig handler with validation.
func TestImportConfig_Validation(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -320,7 +319,6 @@ func TestImportConfig_Validation(t *testing.T) {
// TestImportConfig_Rollback tests backup restoration on validation failure.
func TestImportConfig_Rollback(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()

View File

@@ -16,7 +16,6 @@ import (
// TestListPresetsShowsCachedStatus verifies the /presets endpoint marks cached presets.
func TestListPresetsShowsCachedStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
cacheDir := t.TempDir()
dataDir := t.TempDir()

View File

@@ -16,7 +16,6 @@ import (
// ============================================
func TestUpdateAcquisitionConfigMissingContent(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -33,7 +32,6 @@ func TestUpdateAcquisitionConfigMissingContent(t *testing.T) {
}
func TestUpdateAcquisitionConfigInvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -49,7 +47,6 @@ func TestUpdateAcquisitionConfigInvalidJSON(t *testing.T) {
}
func TestGetLAPIDecisionsWithIPFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -68,7 +65,6 @@ func TestGetLAPIDecisionsWithIPFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithScopeFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -86,7 +82,6 @@ func TestGetLAPIDecisionsWithScopeFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithTypeFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -104,7 +99,6 @@ func TestGetLAPIDecisionsWithTypeFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithMultipleFilters(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,

View File

@@ -32,7 +32,6 @@ func (m *MockCommandExecutor) ExecuteWithEnv(ctx context.Context, name string, a
// TestConsoleEnrollMissingKey covers the "enrollment_key required" branch
func TestConsoleEnrollMissingKey(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := new(MockCommandExecutor)
@@ -59,7 +58,6 @@ func TestConsoleEnrollMissingKey(t *testing.T) {
// TestGetCachedPreset_ValidationAndMiss covers path param validation empty check (if any) and cache miss
func TestGetCachedPreset_ValidationAndMiss(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
cache, _ := crowdsec.NewHubCache(tmpDir, time.Hour)
@@ -86,7 +84,6 @@ func TestGetCachedPreset_ValidationAndMiss(t *testing.T) {
}
func TestGetCachedPreset_SlugRequired(t *testing.T) {
gin.SetMode(gin.TestMode)
h := &CrowdsecHandler{}
t.Setenv("FEATURE_CERBERUS_ENABLED", "1")

View File

@@ -22,7 +22,6 @@ import (
// TestUpdateAcquisitionConfigSuccess tests successful config update
func TestUpdateAcquisitionConfigSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake acquis.yaml path in tmp
@@ -50,7 +49,6 @@ func TestUpdateAcquisitionConfigSuccess(t *testing.T) {
// TestRegisterBouncerScriptPathError tests script not found
func TestRegisterBouncerScriptPathError(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -92,7 +90,6 @@ func (f *fakeExecWithOutput) Status(ctx context.Context, configDir string) (runn
// TestGetLAPIDecisionsRequestError tests request creation error
func TestGetLAPIDecisionsEmptyResponse(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -109,7 +106,6 @@ func TestGetLAPIDecisionsEmptyResponse(t *testing.T) {
// TestGetLAPIDecisionsWithFilters tests query parameter handling
func TestGetLAPIDecisionsIPQueryParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -124,7 +120,6 @@ func TestGetLAPIDecisionsIPQueryParam(t *testing.T) {
// TestGetLAPIDecisionsScopeParam tests scope parameter
func TestGetLAPIDecisionsScopeParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -139,7 +134,6 @@ func TestGetLAPIDecisionsScopeParam(t *testing.T) {
// TestGetLAPIDecisionsTypeParam tests type parameter
func TestGetLAPIDecisionsTypeParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -154,7 +148,6 @@ func TestGetLAPIDecisionsTypeParam(t *testing.T) {
// TestGetLAPIDecisionsCombinedParams tests multiple query params
func TestGetLAPIDecisionsCombinedParams(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -169,7 +162,6 @@ func TestGetLAPIDecisionsCombinedParams(t *testing.T) {
// TestCheckLAPIHealthTimeout tests health check
func TestCheckLAPIHealthRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -223,7 +215,6 @@ func TestGetLAPIKeyAlternative(t *testing.T) {
// TestStatusContextTimeout tests context handling
func TestStatusRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -238,7 +229,6 @@ func TestStatusRequest(t *testing.T) {
// TestRegisterBouncerExecutionSuccess tests successful registration
func TestRegisterBouncerFlow(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake script
@@ -267,7 +257,6 @@ func TestRegisterBouncerFlow(t *testing.T) {
// TestRegisterBouncerWithError tests execution error
func TestRegisterBouncerExecutionFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake script
@@ -294,7 +283,6 @@ func TestRegisterBouncerExecutionFailure(t *testing.T) {
// TestGetAcquisitionConfigFileError tests file read error
func TestGetAcquisitionConfigNotPresent(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")

View File

@@ -0,0 +1,632 @@
package handlers
import (
"context"
"encoding/csv"
"encoding/json"
"fmt"
"math"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/Wikid82/charon/backend/internal/logger"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/network"
"github.com/gin-gonic/gin"
)
// Cache TTL constants for dashboard endpoints.
const (
dashSummaryTTL = 30 * time.Second
dashTimelineTTL = 60 * time.Second
dashTopIPsTTL = 60 * time.Second
dashScenariosTTL = 60 * time.Second
dashAlertsTTL = 30 * time.Second
exportMaxRows = 100_000
)
// parseTimeRange converts a range string to a start time. Empty string defaults to 24h.
func parseTimeRange(rangeStr string) (time.Time, error) {
now := time.Now().UTC()
switch rangeStr {
case "1h":
return now.Add(-1 * time.Hour), nil
case "6h":
return now.Add(-6 * time.Hour), nil
case "24h", "":
return now.Add(-24 * time.Hour), nil
case "7d":
return now.Add(-7 * 24 * time.Hour), nil
case "30d":
return now.Add(-30 * 24 * time.Hour), nil
default:
return time.Time{}, fmt.Errorf("invalid range: %s (valid: 1h, 6h, 24h, 7d, 30d)", rangeStr)
}
}
// normalizeRange returns the canonical range string (defaults empty to "24h").
func normalizeRange(r string) string {
if r == "" {
return "24h"
}
return r
}
// intervalForRange selects the default time-bucket interval for a given range.
func intervalForRange(rangeStr string) string {
switch rangeStr {
case "1h":
return "5m"
case "6h":
return "15m"
case "24h", "":
return "1h"
case "7d":
return "6h"
case "30d":
return "1d"
default:
return "1h"
}
}
// intervalToStrftime maps an interval string to the SQLite strftime expression
// used for time bucketing.
func intervalToStrftime(interval string) string {
switch interval {
case "5m":
return "strftime('%Y-%m-%dT%H:', created_at) || printf('%02d:00Z', (CAST(strftime('%M', created_at) AS INTEGER) / 5) * 5)"
case "15m":
return "strftime('%Y-%m-%dT%H:', created_at) || printf('%02d:00Z', (CAST(strftime('%M', created_at) AS INTEGER) / 15) * 15)"
case "1h":
return "strftime('%Y-%m-%dT%H:00:00Z', created_at)"
case "6h":
return "strftime('%Y-%m-%dT', created_at) || printf('%02d:00:00Z', (CAST(strftime('%H', created_at) AS INTEGER) / 6) * 6)"
case "1d":
return "strftime('%Y-%m-%dT00:00:00Z', created_at)"
default:
return "strftime('%Y-%m-%dT%H:00:00Z', created_at)"
}
}
// validInterval checks whether the provided interval is one of the known values.
func validInterval(interval string) bool {
switch interval {
case "5m", "15m", "1h", "6h", "1d":
return true
default:
return false
}
}
// sanitizeCSVField prefixes fields starting with formula-trigger characters
// to prevent CSV injection (CWE-1236).
func sanitizeCSVField(field string) string {
if field == "" {
return field
}
switch field[0] {
case '=', '+', '-', '@', '\t', '\r':
return "'" + field
}
return field
}
// DashboardSummary returns aggregate counts for the dashboard summary cards.
func (h *CrowdsecHandler) DashboardSummary(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
cacheKey := "dashboard:summary:" + rangeStr
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Historical metrics from SQLite
var totalDecisions int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Count(&totalDecisions)
var uniqueIPs int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Distinct("ip").Count(&uniqueIPs)
var topScenario struct {
Scenario string
Cnt int64
}
h.DB.Model(&models.SecurityDecision{}).
Select("scenario, COUNT(*) as cnt").
Where("source = ? AND created_at >= ? AND scenario != ''", "crowdsec", since).
Group("scenario").
Order("cnt DESC").
Limit(1).
Scan(&topScenario)
// Trend calculation: compare current period vs previous equal-length period
duration := time.Since(since)
previousSince := since.Add(-duration)
var previousCount int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ? AND created_at < ?", "crowdsec", previousSince, since).
Count(&previousCount)
// Trend: percentage change vs. the previous equal-length period.
// Formula: round((current - previous) / previous * 100, 1)
// Special cases: no previous data → 0; no current data → -100%.
var trend float64
if previousCount == 0 {
trend = 0.0
} else if totalDecisions == 0 && previousCount > 0 {
trend = -100.0
} else {
trend = math.Round(float64(totalDecisions-previousCount)/float64(previousCount)*1000) / 10
}
// Active decisions from LAPI (real-time)
activeDecisions := h.fetchActiveDecisionCount(c.Request.Context())
result := gin.H{
"total_decisions": totalDecisions,
"active_decisions": activeDecisions,
"unique_ips": uniqueIPs,
"top_scenario": topScenario.Scenario,
"decisions_trend": trend,
"range": rangeStr,
"cached": false,
"generated_at": time.Now().UTC().Format(time.RFC3339),
}
h.dashCache.Set(cacheKey, result, dashSummaryTTL)
c.JSON(http.StatusOK, result)
}
// fetchActiveDecisionCount queries LAPI for active decisions count.
// Returns -1 when LAPI is unreachable.
func (h *CrowdsecHandler) fetchActiveDecisionCount(ctx context.Context) int64 {
lapiURL := "http://127.0.0.1:8085"
if h.Security != nil {
cfg, err := h.Security.Get()
if err == nil && cfg != nil && cfg.CrowdSecAPIURL != "" {
lapiURL = cfg.CrowdSecAPIURL
}
}
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
return -1
}
endpoint := baseURL.ResolveReference(&url.URL{Path: "/v1/decisions"})
reqURL := endpoint.String()
apiKey := getLAPIKey()
reqCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(reqCtx, http.MethodGet, reqURL, http.NoBody)
if err != nil {
return -1
}
if apiKey != "" {
req.Header.Set("X-Api-Key", apiKey)
}
req.Header.Set("Accept", "application/json")
client := network.NewInternalServiceHTTPClient(10 * time.Second)
resp, err := client.Do(req)
if err != nil {
return -1
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return -1
}
var decisions []interface{}
if decErr := json.NewDecoder(resp.Body).Decode(&decisions); decErr != nil {
return -1
}
return int64(len(decisions))
}
// DashboardTimeline returns time-bucketed decision counts for the timeline chart.
func (h *CrowdsecHandler) DashboardTimeline(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
interval := c.Query("interval")
if interval == "" {
interval = intervalForRange(rangeStr)
}
if !validInterval(interval) {
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid interval: %s (valid: 5m, 15m, 1h, 6h, 1d)", interval)})
return
}
cacheKey := fmt.Sprintf("dashboard:timeline:%s:%s", rangeStr, interval)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
bucketExpr := intervalToStrftime(interval)
type bucketRow struct {
Bucket string
Bans int64
Captchas int64
}
var rows []bucketRow
h.DB.Model(&models.SecurityDecision{}).
Select(fmt.Sprintf("(%s) as bucket, SUM(CASE WHEN action = 'block' THEN 1 ELSE 0 END) as bans, SUM(CASE WHEN action = 'challenge' THEN 1 ELSE 0 END) as captchas", bucketExpr)).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Group("bucket").
Order("bucket ASC").
Scan(&rows)
buckets := make([]gin.H, 0, len(rows))
for _, r := range rows {
buckets = append(buckets, gin.H{
"timestamp": r.Bucket,
"bans": r.Bans,
"captchas": r.Captchas,
})
}
result := gin.H{
"buckets": buckets,
"range": rangeStr,
"interval": interval,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashTimelineTTL)
c.JSON(http.StatusOK, result)
}
// DashboardTopIPs returns top attacking IPs ranked by decision count.
func (h *CrowdsecHandler) DashboardTopIPs(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
limitStr := c.DefaultQuery("limit", "10")
limit, err := strconv.Atoi(limitStr)
if err != nil || limit < 1 {
limit = 10
}
if limit > 50 {
limit = 50
}
cacheKey := fmt.Sprintf("dashboard:top-ips:%s:%d", rangeStr, limit)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
type ipRow struct {
IP string
Count int64
LastSeen time.Time
Country string
}
var rows []ipRow
h.DB.Model(&models.SecurityDecision{}).
Select("ip, COUNT(*) as count, MAX(created_at) as last_seen, MAX(country) as country").
Where("source = ? AND created_at >= ?", "crowdsec", since).
Group("ip").
Order("count DESC").
Limit(limit).
Scan(&rows)
ips := make([]gin.H, 0, len(rows))
for _, r := range rows {
ips = append(ips, gin.H{
"ip": r.IP,
"count": r.Count,
"last_seen": r.LastSeen.UTC().Format(time.RFC3339),
"country": r.Country,
})
}
result := gin.H{
"ips": ips,
"range": rangeStr,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashTopIPsTTL)
c.JSON(http.StatusOK, result)
}
// DashboardScenarios returns scenario breakdown with counts and percentages.
func (h *CrowdsecHandler) DashboardScenarios(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
cacheKey := "dashboard:scenarios:" + rangeStr
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
type scenarioRow struct {
Name string
Count int64
}
var rows []scenarioRow
h.DB.Model(&models.SecurityDecision{}).
Select("scenario as name, COUNT(*) as count").
Where("source = ? AND created_at >= ? AND scenario != ''", "crowdsec", since).
Group("scenario").
Order("count DESC").
Limit(50).
Scan(&rows)
var total int64
for _, r := range rows {
total += r.Count
}
scenarios := make([]gin.H, 0, len(rows))
for _, r := range rows {
pct := 0.0
if total > 0 {
pct = math.Round(float64(r.Count)/float64(total)*1000) / 10
}
scenarios = append(scenarios, gin.H{
"name": r.Name,
"count": r.Count,
"percentage": pct,
})
}
result := gin.H{
"scenarios": scenarios,
"total": total,
"range": rangeStr,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashScenariosTTL)
c.JSON(http.StatusOK, result)
}
// ListAlerts wraps the CrowdSec LAPI /v1/alerts endpoint.
func (h *CrowdsecHandler) ListAlerts(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
scenario := strings.TrimSpace(c.Query("scenario"))
limitStr := c.DefaultQuery("limit", "50")
offsetStr := c.DefaultQuery("offset", "0")
limit, err := strconv.Atoi(limitStr)
if err != nil || limit < 1 {
limit = 50
}
if limit > 200 {
limit = 200
}
offset, err := strconv.Atoi(offsetStr)
if err != nil || offset < 0 {
offset = 0
}
cacheKey := fmt.Sprintf("dashboard:alerts:%s:%s:%d:%d", rangeStr, scenario, limit, offset)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, tErr := parseTimeRange(rangeStr)
if tErr != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": tErr.Error()})
return
}
alerts, total, source := h.fetchLAPIAlerts(c.Request.Context(), since, scenario, limit, offset)
result := gin.H{
"alerts": alerts,
"total": total,
"source": source,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashAlertsTTL)
c.JSON(http.StatusOK, result)
}
// fetchLAPIAlerts attempts to get alerts from LAPI, falling back to cscli.
func (h *CrowdsecHandler) fetchLAPIAlerts(ctx context.Context, since time.Time, scenario string, limit, offset int) (alerts []interface{}, total int, source string) {
lapiURL := "http://127.0.0.1:8085"
if h.Security != nil {
cfg, err := h.Security.Get()
if err == nil && cfg != nil && cfg.CrowdSecAPIURL != "" {
lapiURL = cfg.CrowdSecAPIURL
}
}
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
q := url.Values{}
q.Set("since", since.Format(time.RFC3339))
if scenario != "" {
q.Set("scenario", scenario)
}
q.Set("limit", strconv.Itoa(limit))
endpoint := baseURL.ResolveReference(&url.URL{Path: "/v1/alerts"})
endpoint.RawQuery = q.Encode()
reqURL := endpoint.String()
apiKey := getLAPIKey()
reqCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
req, reqErr := http.NewRequestWithContext(reqCtx, http.MethodGet, reqURL, http.NoBody)
if reqErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
if apiKey != "" {
req.Header.Set("X-Api-Key", apiKey)
}
req.Header.Set("Accept", "application/json")
client := network.NewInternalServiceHTTPClient(10 * time.Second)
resp, doErr := client.Do(req)
if doErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
var rawAlerts []interface{}
if decErr := json.NewDecoder(resp.Body).Decode(&rawAlerts); decErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
// Capture full count before slicing for correct pagination semantics
fullTotal := len(rawAlerts)
// Apply offset for pagination
if offset > 0 && offset < len(rawAlerts) {
rawAlerts = rawAlerts[offset:]
} else if offset >= len(rawAlerts) {
rawAlerts = nil
}
if limit < len(rawAlerts) {
rawAlerts = rawAlerts[:limit]
}
return rawAlerts, fullTotal, "lapi"
}
// fetchAlertsCscli falls back to using cscli to list alerts.
func (h *CrowdsecHandler) fetchAlertsCscli(ctx context.Context, scenario string, limit int) (alerts []interface{}, total int, source string) {
args := []string{"alerts", "list", "-o", "json"}
if scenario != "" {
args = append(args, "-s", scenario)
}
args = append(args, "-l", strconv.Itoa(limit))
output, err := h.CmdExec.Execute(ctx, "cscli", args...)
if err != nil {
logger.Log().WithError(err).Warn("Failed to list alerts via cscli")
return []interface{}{}, 0, "cscli"
}
if jErr := json.Unmarshal(output, &alerts); jErr != nil {
return []interface{}{}, 0, "cscli"
}
return alerts, len(alerts), "cscli"
}
// ExportDecisions exports decisions as downloadable CSV or JSON.
func (h *CrowdsecHandler) ExportDecisions(c *gin.Context) {
format := strings.ToLower(c.DefaultQuery("format", "csv"))
rangeStr := normalizeRange(c.Query("range"))
source := strings.ToLower(c.DefaultQuery("source", "all"))
if format != "csv" && format != "json" {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid format: must be csv or json"})
return
}
validSources := map[string]bool{"crowdsec": true, "waf": true, "ratelimit": true, "manual": true, "all": true}
if !validSources[source] {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid source: must be crowdsec, waf, ratelimit, manual, or all"})
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
var decisions []models.SecurityDecision
q := h.DB.Where("created_at >= ?", since)
if source != "all" {
q = q.Where("source = ?", source)
}
q.Order("created_at DESC").Limit(exportMaxRows).Find(&decisions)
ts := time.Now().UTC().Format("20060102-150405")
switch format {
case "csv":
filename := fmt.Sprintf("crowdsec-decisions-%s.csv", ts)
c.Header("Content-Type", "text/csv; charset=utf-8")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filename))
w := csv.NewWriter(c.Writer)
_ = w.Write([]string{"uuid", "ip", "action", "source", "scenario", "rule_id", "host", "country", "created_at", "expires_at"})
for _, d := range decisions {
_ = w.Write([]string{
d.UUID,
sanitizeCSVField(d.IP),
d.Action,
d.Source,
sanitizeCSVField(d.Scenario),
sanitizeCSVField(d.RuleID),
sanitizeCSVField(d.Host),
sanitizeCSVField(d.Country),
d.CreatedAt.UTC().Format(time.RFC3339),
func() string {
if d.ExpiresAt != nil {
return d.ExpiresAt.UTC().Format(time.RFC3339)
}
return ""
}(),
})
}
w.Flush()
if err := w.Error(); err != nil {
logger.Log().WithError(err).Warn("CSV export write error")
}
case "json":
filename := fmt.Sprintf("crowdsec-decisions-%s.json", ts)
c.Header("Content-Type", "application/json")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filename))
c.JSON(http.StatusOK, decisions)
}
}

View File

@@ -0,0 +1,70 @@
package handlers
import (
"strings"
"sync"
"time"
)
type cacheEntry struct {
data interface{}
expiresAt time.Time
}
type dashboardCache struct {
mu sync.RWMutex
entries map[string]*cacheEntry
}
func newDashboardCache() *dashboardCache {
return &dashboardCache{
entries: make(map[string]*cacheEntry),
}
}
func (c *dashboardCache) Get(key string) (interface{}, bool) {
c.mu.RLock()
entry, ok := c.entries[key]
if !ok {
c.mu.RUnlock()
return nil, false
}
if time.Now().Before(entry.expiresAt) {
data := entry.data
c.mu.RUnlock()
return data, true
}
c.mu.RUnlock()
c.mu.Lock()
defer c.mu.Unlock()
entry, ok = c.entries[key]
if ok && time.Now().After(entry.expiresAt) {
delete(c.entries, key)
}
return nil, false
}
func (c *dashboardCache) Set(key string, data interface{}, ttl time.Duration) {
c.mu.Lock()
defer c.mu.Unlock()
c.entries[key] = &cacheEntry{
data: data,
expiresAt: time.Now().Add(ttl),
}
}
func (c *dashboardCache) Invalidate(prefixes ...string) {
c.mu.Lock()
defer c.mu.Unlock()
for key := range c.entries {
for _, prefix := range prefixes {
if strings.HasPrefix(key, prefix) {
delete(c.entries, key)
break
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -28,7 +28,6 @@ func (m *mockCommandExecutor) Execute(ctx context.Context, name string, args ...
}
func TestListDecisions_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -67,7 +66,6 @@ func TestListDecisions_Success(t *testing.T) {
}
func TestListDecisions_EmptyList(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -98,7 +96,6 @@ func TestListDecisions_EmptyList(t *testing.T) {
}
func TestListDecisions_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -130,7 +127,6 @@ func TestListDecisions_CscliError(t *testing.T) {
}
func TestListDecisions_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -154,7 +150,6 @@ func TestListDecisions_InvalidJSON(t *testing.T) {
}
func TestBanIP_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -205,7 +200,6 @@ func TestBanIP_Success(t *testing.T) {
}
func TestBanIP_DefaultDuration(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -245,7 +239,6 @@ func TestBanIP_DefaultDuration(t *testing.T) {
}
func TestBanIP_MissingIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -268,7 +261,6 @@ func TestBanIP_MissingIP(t *testing.T) {
}
func TestBanIP_EmptyIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -293,7 +285,6 @@ func TestBanIP_EmptyIP(t *testing.T) {
}
func TestBanIP_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -323,7 +314,6 @@ func TestBanIP_CscliError(t *testing.T) {
}
func TestUnbanIP_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -357,7 +347,6 @@ func TestUnbanIP_Success(t *testing.T) {
}
func TestUnbanIP_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -381,7 +370,6 @@ func TestUnbanIP_CscliError(t *testing.T) {
}
func TestListDecisions_MultipleDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -430,7 +418,6 @@ func TestListDecisions_MultipleDecisions(t *testing.T) {
}
func TestBanIP_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()

View File

@@ -63,9 +63,16 @@ type CrowdsecHandler struct {
Hub *crowdsec.HubService
Console *crowdsec.ConsoleEnrollmentService
Security *services.SecurityService
WhitelistSvc *services.CrowdSecWhitelistService
CaddyManager *caddy.Manager // For config reload after bouncer registration
LAPIMaxWait time.Duration // For testing; 0 means 60s default
LAPIPollInterval time.Duration // For testing; 0 means 500ms default
dashCache *dashboardCache
// validateLAPIURL validates and parses a LAPI base URL.
// This field allows tests to inject a permissive validator for mock servers
// without mutating package-level state (which causes data races).
validateLAPIURL func(string) (*url.URL, error)
// registrationMutex protects concurrent bouncer registration attempts
registrationMutex sync.Mutex
@@ -84,6 +91,14 @@ const (
bouncerName = "caddy-bouncer"
)
// resolveLAPIURLValidator returns the handler's validator or the default.
func (h *CrowdsecHandler) resolveLAPIURLValidator(raw string) (*url.URL, error) {
if h.validateLAPIURL != nil {
return h.validateLAPIURL(raw)
}
return validateCrowdsecLAPIBaseURLDefault(raw)
}
func (h *CrowdsecHandler) bouncerKeyPath() string {
if h != nil && strings.TrimSpace(h.DataDir) != "" {
return filepath.Join(h.DataDir, "bouncer_key")
@@ -369,16 +384,22 @@ func NewCrowdsecHandler(db *gorm.DB, executor CrowdsecExecutor, binPath, dataDir
securitySvc = services.NewSecurityService(db)
consoleSvc = crowdsec.NewConsoleEnrollmentService(db, &crowdsec.SecureCommandExecutor{}, dataDir, consoleSecret)
}
return &CrowdsecHandler{
DB: db,
Executor: executor,
CmdExec: &RealCommandExecutor{},
BinPath: binPath,
DataDir: dataDir,
Hub: hubSvc,
Console: consoleSvc,
Security: securitySvc,
h := &CrowdsecHandler{
DB: db,
Executor: executor,
CmdExec: &RealCommandExecutor{},
BinPath: binPath,
DataDir: dataDir,
Hub: hubSvc,
Console: consoleSvc,
Security: securitySvc,
dashCache: newDashboardCache(),
validateLAPIURL: validateCrowdsecLAPIBaseURLDefault,
}
if db != nil {
h.WhitelistSvc = services.NewCrowdSecWhitelistService(db, dataDir)
}
return h
}
// isCerberusEnabled returns true when Cerberus is enabled via DB or env flag.
@@ -1442,18 +1463,10 @@ const (
defaultCrowdsecLAPIPort = 8085
)
// validateCrowdsecLAPIBaseURLFunc is a variable holding the LAPI URL validation function.
// This indirection allows tests to inject a permissive validator for mock servers.
var validateCrowdsecLAPIBaseURLFunc = validateCrowdsecLAPIBaseURLDefault
func validateCrowdsecLAPIBaseURLDefault(raw string) (*url.URL, error) {
return security.ValidateInternalServiceBaseURL(raw, defaultCrowdsecLAPIPort, security.InternalServiceHostAllowlist())
}
func validateCrowdsecLAPIBaseURL(raw string) (*url.URL, error) {
return validateCrowdsecLAPIBaseURLFunc(raw)
}
// GetLAPIDecisions queries CrowdSec LAPI directly for current decisions.
// This is an alternative to ListDecisions which uses cscli.
// Query params:
@@ -1471,7 +1484,7 @@ func (h *CrowdsecHandler) GetLAPIDecisions(c *gin.Context) {
}
}
baseURL, err := validateCrowdsecLAPIBaseURL(lapiURL)
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
logger.Log().WithError(err).WithField("lapi_url", lapiURL).Warn("Blocked CrowdSec LAPI URL by internal allowlist policy")
// Fallback to cscli-based method.
@@ -2142,7 +2155,7 @@ func (h *CrowdsecHandler) CheckLAPIHealth(c *gin.Context) {
ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)
defer cancel()
baseURL, err := validateCrowdsecLAPIBaseURL(lapiURL)
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
c.JSON(http.StatusOK, gin.H{"healthy": false, "error": "invalid LAPI URL (blocked by SSRF policy)", "lapi_url": lapiURL})
return
@@ -2287,6 +2300,21 @@ func (h *CrowdsecHandler) BanIP(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{"status": "banned", "ip": ip, "duration": duration})
// Log to security_decisions for dashboard aggregation
if h.Security != nil {
parsedDur, _ := time.ParseDuration(duration)
expiry := time.Now().Add(parsedDur)
_ = h.Security.LogDecision(&models.SecurityDecision{
IP: ip,
Action: "block",
Source: "crowdsec",
RuleID: reason,
Scenario: "manual",
ExpiresAt: &expiry,
})
}
h.dashCache.Invalidate("dashboard")
}
// UnbanIP removes a ban for an IP address
@@ -2313,6 +2341,7 @@ func (h *CrowdsecHandler) UnbanIP(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{"status": "unbanned", "ip": ip})
h.dashCache.Invalidate("dashboard")
}
// RegisterBouncer registers a new bouncer or returns existing bouncer status.
@@ -2676,6 +2705,75 @@ func fileExists(path string) bool {
return err == nil
}
// ListWhitelists returns all CrowdSec IP/CIDR whitelist entries.
func (h *CrowdsecHandler) ListWhitelists(c *gin.Context) {
entries, err := h.WhitelistSvc.List(c.Request.Context())
if err != nil {
logger.Log().WithError(err).Error("failed to list whitelist entries")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list whitelist entries"})
return
}
c.JSON(http.StatusOK, gin.H{"whitelist": entries})
}
// AddWhitelist adds a new IP or CIDR to the CrowdSec whitelist.
func (h *CrowdsecHandler) AddWhitelist(c *gin.Context) {
var req struct {
IPOrCIDR string `json:"ip_or_cidr" binding:"required"`
Reason string `json:"reason"`
}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "ip_or_cidr is required"})
return
}
entry, err := h.WhitelistSvc.Add(c.Request.Context(), req.IPOrCIDR, req.Reason)
if err != nil {
switch {
case errors.Is(err, services.ErrInvalidIPOrCIDR):
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid IP address or CIDR notation"})
case errors.Is(err, services.ErrDuplicateEntry):
c.JSON(http.StatusConflict, gin.H{"error": "entry already exists in whitelist"})
default:
logger.Log().WithError(err).Error("failed to add whitelist entry")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to add whitelist entry"})
}
return
}
if _, execErr := h.CmdExec.Execute(c.Request.Context(), "cscli", "hub", "reload"); execErr != nil {
logger.Log().WithError(execErr).Warn("cscli hub reload failed after whitelist add (non-fatal)")
}
c.JSON(http.StatusCreated, entry)
}
// DeleteWhitelist removes a whitelist entry by UUID.
func (h *CrowdsecHandler) DeleteWhitelist(c *gin.Context) {
id := c.Param("uuid")
if id == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid is required"})
return
}
if err := h.WhitelistSvc.Delete(c.Request.Context(), id); err != nil {
switch {
case errors.Is(err, services.ErrWhitelistNotFound):
c.JSON(http.StatusNotFound, gin.H{"error": "whitelist entry not found"})
default:
logger.Log().WithError(err).Error("failed to delete whitelist entry")
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete whitelist entry"})
}
return
}
if _, execErr := h.CmdExec.Execute(c.Request.Context(), "cscli", "hub", "reload"); execErr != nil {
logger.Log().WithError(execErr).Warn("cscli hub reload failed after whitelist delete (non-fatal)")
}
c.Status(http.StatusNoContent)
}
// RegisterRoutes registers crowdsec admin routes under protected group
func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) {
rg.POST("/admin/crowdsec/start", h.Start)
@@ -2711,4 +2809,15 @@ func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) {
// Acquisition configuration endpoints
rg.GET("/admin/crowdsec/acquisition", h.GetAcquisitionConfig)
rg.PUT("/admin/crowdsec/acquisition", h.UpdateAcquisitionConfig)
// Dashboard aggregation endpoints (PR-1)
rg.GET("/admin/crowdsec/dashboard/summary", h.DashboardSummary)
rg.GET("/admin/crowdsec/dashboard/timeline", h.DashboardTimeline)
rg.GET("/admin/crowdsec/dashboard/top-ips", h.DashboardTopIPs)
rg.GET("/admin/crowdsec/dashboard/scenarios", h.DashboardScenarios)
rg.GET("/admin/crowdsec/alerts", h.ListAlerts)
rg.GET("/admin/crowdsec/decisions/export", h.ExportDecisions)
// Whitelist management endpoints (Issue #939)
rg.GET("/admin/crowdsec/whitelist", h.ListWhitelists)
rg.POST("/admin/crowdsec/whitelist", h.AddWhitelist)
rg.DELETE("/admin/crowdsec/whitelist/:uuid", h.DeleteWhitelist)
}

View File

@@ -106,7 +106,6 @@ func TestMapCrowdsecStatus(t *testing.T) {
// TestIsConsoleEnrollmentEnabled tests the isConsoleEnrollmentEnabled helper
func TestIsConsoleEnrollmentEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
tests := []struct {
name string
@@ -191,7 +190,6 @@ func TestActorFromContext(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
tt.setupCtx(c)
@@ -204,7 +202,6 @@ func TestActorFromContext(t *testing.T) {
// TestHubEndpoints tests the hubEndpoints helper
func TestHubEndpoints(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -233,7 +230,6 @@ func TestHubEndpoints(t *testing.T) {
// TestGetCachedPreset tests the GetCachedPreset handler
func TestGetCachedPreset(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -264,7 +260,6 @@ func TestGetCachedPreset(t *testing.T) {
// TestGetCachedPreset_NotFound tests GetCachedPreset with non-existent preset
func TestGetCachedPreset_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -293,7 +288,6 @@ func TestGetCachedPreset_NotFound(t *testing.T) {
// TestGetLAPIDecisions tests the GetLAPIDecisions handler
func TestGetLAPIDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -313,7 +307,6 @@ func TestGetLAPIDecisions(t *testing.T) {
// TestCheckLAPIHealth tests the CheckLAPIHealth handler
func TestCheckLAPIHealth(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -332,7 +325,6 @@ func TestCheckLAPIHealth(t *testing.T) {
// TestListDecisions tests the ListDecisions handler
func TestListDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -351,7 +343,6 @@ func TestListDecisions(t *testing.T) {
// TestBanIP tests the BanIP handler
func TestBanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -373,7 +364,6 @@ func TestBanIP(t *testing.T) {
// TestUnbanIP tests the UnbanIP handler
func TestUnbanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -395,7 +385,6 @@ func TestUnbanIP(t *testing.T) {
// TestGetAcquisitionConfig tests the GetAcquisitionConfig handler
func TestGetAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
acquisPath := filepath.Join(tmpDir, "acquis.yaml")
@@ -417,7 +406,6 @@ func TestGetAcquisitionConfig(t *testing.T) {
// TestUpdateAcquisitionConfig tests the UpdateAcquisitionConfig handler
func TestUpdateAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
acquisPath := filepath.Join(tmpDir, "acquis.yaml")

View File

@@ -29,7 +29,6 @@ func (f *errorExec) Status(ctx context.Context, configDir string) (running bool,
}
func TestCrowdsec_Start_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -48,7 +47,6 @@ func TestCrowdsec_Start_Error(t *testing.T) {
}
func TestCrowdsec_Stop_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -67,7 +65,6 @@ func TestCrowdsec_Stop_Error(t *testing.T) {
}
func TestCrowdsec_Status_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -87,7 +84,6 @@ func TestCrowdsec_Status_Error(t *testing.T) {
// ReadFile tests
func TestCrowdsec_ReadFile_MissingPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -106,7 +102,6 @@ func TestCrowdsec_ReadFile_MissingPath(t *testing.T) {
}
func TestCrowdsec_ReadFile_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -126,7 +121,6 @@ func TestCrowdsec_ReadFile_PathTraversal(t *testing.T) {
}
func TestCrowdsec_ReadFile_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -146,7 +140,6 @@ func TestCrowdsec_ReadFile_NotFound(t *testing.T) {
// WriteFile tests
func TestCrowdsec_WriteFile_InvalidPayload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -166,7 +159,6 @@ func TestCrowdsec_WriteFile_InvalidPayload(t *testing.T) {
}
func TestCrowdsec_WriteFile_MissingPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -189,7 +181,6 @@ func TestCrowdsec_WriteFile_MissingPath(t *testing.T) {
}
func TestCrowdsec_WriteFile_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -214,7 +205,6 @@ func TestCrowdsec_WriteFile_PathTraversal(t *testing.T) {
// ExportConfig tests
func TestCrowdsec_ExportConfig_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
// Use a non-existent directory
nonExistentDir := "/tmp/crowdsec-nonexistent-dir-12345"
@@ -238,7 +228,6 @@ func TestCrowdsec_ExportConfig_NotFound(t *testing.T) {
// ListFiles tests
func TestCrowdsec_ListFiles_EmptyDir(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -263,7 +252,6 @@ func TestCrowdsec_ListFiles_EmptyDir(t *testing.T) {
}
func TestCrowdsec_ListFiles_NonExistent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
nonExistentDir := "/tmp/crowdsec-nonexistent-dir-67890"
_ = os.RemoveAll(nonExistentDir)
@@ -289,7 +277,6 @@ func TestCrowdsec_ListFiles_NonExistent(t *testing.T) {
// ImportConfig error cases
func TestCrowdsec_ImportConfig_NoFile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -310,7 +297,6 @@ func TestCrowdsec_ImportConfig_NoFile(t *testing.T) {
// Additional ReadFile test with nested path that exists
func TestCrowdsec_ReadFile_NestedPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -336,7 +322,6 @@ func TestCrowdsec_ReadFile_NestedPath(t *testing.T) {
// Test WriteFile when backup fails (simulate by making dir unwritable)
func TestCrowdsec_WriteFile_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -364,7 +349,6 @@ func TestCrowdsec_WriteFile_Success(t *testing.T) {
}
func TestCrowdsec_ListPresets_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
t.Setenv("FEATURE_CERBERUS_ENABLED", "false")
tmpDir := t.TempDir()
@@ -383,7 +367,6 @@ func TestCrowdsec_ListPresets_Disabled(t *testing.T) {
}
func TestCrowdsec_ListPresets_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -406,7 +389,6 @@ func TestCrowdsec_ListPresets_Success(t *testing.T) {
}
func TestCrowdsec_PullPreset_Validation(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -431,7 +413,6 @@ func TestCrowdsec_PullPreset_Validation(t *testing.T) {
}
func TestCrowdsec_ApplyPreset_Validation(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,6 @@ import (
)
func TestGetLAPIDecisions_FallbackToCscli(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
// Create handler with mock executor
@@ -40,7 +39,6 @@ func TestGetLAPIDecisions_FallbackToCscli(t *testing.T) {
}
func TestGetLAPIDecisions_EmptyResponse(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
// Create handler with mock executor that returns empty array
@@ -67,7 +65,6 @@ func TestGetLAPIDecisions_EmptyResponse(t *testing.T) {
}
func TestCheckLAPIHealth_Handler(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
handler := &CrowdsecHandler{

View File

@@ -46,7 +46,6 @@ func makePresetTar(t *testing.T, files map[string]string) []byte {
}
func TestListPresetsIncludesCacheAndIndex(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
_, err = cache.Store(context.Background(), "crowdsecurity/demo", "etag1", "hub", "preview", []byte("archive"))
@@ -92,7 +91,6 @@ func TestListPresetsIncludesCacheAndIndex(t *testing.T) {
}
func TestPullPresetHandlerSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
dataDir := filepath.Join(t.TempDir(), "crowdsec")
@@ -132,7 +130,6 @@ func TestPullPresetHandlerSuccess(t *testing.T) {
}
func TestApplyPresetHandlerAudits(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.CrowdsecPresetEvent{}))
@@ -186,7 +183,6 @@ func TestApplyPresetHandlerAudits(t *testing.T) {
}
func TestPullPresetHandlerHubError(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -213,7 +209,6 @@ func TestPullPresetHandlerHubError(t *testing.T) {
}
func TestPullPresetHandlerTimeout(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -241,7 +236,6 @@ func TestPullPresetHandlerTimeout(t *testing.T) {
}
func TestGetCachedPresetNotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -260,7 +254,6 @@ func TestGetCachedPresetNotFound(t *testing.T) {
}
func TestGetCachedPresetServiceUnavailable(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
h.Hub = &crowdsec.HubService{}
@@ -277,7 +270,6 @@ func TestGetCachedPresetServiceUnavailable(t *testing.T) {
}
func TestApplyPresetHandlerBackupFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.CrowdsecPresetEvent{}))
@@ -325,7 +317,6 @@ func TestApplyPresetHandlerBackupFailure(t *testing.T) {
}
func TestListPresetsMergesCuratedAndHub(t *testing.T) {
gin.SetMode(gin.TestMode)
hub := crowdsec.NewHubService(nil, nil, t.TempDir())
hub.HubBaseURL = "http://hub.example"
@@ -375,7 +366,6 @@ func TestListPresetsMergesCuratedAndHub(t *testing.T) {
}
func TestGetCachedPresetSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -403,7 +393,6 @@ func TestGetCachedPresetSuccess(t *testing.T) {
}
func TestGetCachedPresetSlugRequired(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -424,7 +413,6 @@ func TestGetCachedPresetSlugRequired(t *testing.T) {
}
func TestGetCachedPresetPreviewError(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cacheDir := t.TempDir()
cache, err := crowdsec.NewHubCache(cacheDir, time.Hour)
@@ -451,7 +439,6 @@ func TestGetCachedPresetPreviewError(t *testing.T) {
}
func TestPullCuratedPresetSkipsHub(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
// Setup handler with a hub service that would fail if called
@@ -489,7 +476,6 @@ func TestPullCuratedPresetSkipsHub(t *testing.T) {
}
func TestApplyCuratedPresetSkipsHub(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
db := OpenTestDB(t)

Some files were not shown because too many files have changed in this diff Show More